xref: /openbmc/qemu/target/sparc/translate.c (revision 1ea9c62a5eb32d910f3ff9e7bbbdfb9f875a600f)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 
29 #include "exec/helper-gen.h"
30 
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 #include "asi.h"
34 
35 #define HELPER_H "helper.h"
36 #include "exec/helper-info.c.inc"
37 #undef  HELPER_H
38 
39 /* Dynamic PC, must exit to main loop. */
40 #define DYNAMIC_PC         1
41 /* Dynamic PC, one of two values according to jump_pc[T2]. */
42 #define JUMP_PC            2
43 /* Dynamic PC, may lookup next TB. */
44 #define DYNAMIC_PC_LOOKUP  3
45 
46 #define DISAS_EXIT  DISAS_TARGET_0
47 
48 /* global register indexes */
49 static TCGv_ptr cpu_regwptr;
50 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
51 static TCGv_i32 cpu_cc_op;
52 static TCGv_i32 cpu_psr;
53 static TCGv cpu_fsr, cpu_pc, cpu_npc;
54 static TCGv cpu_regs[32];
55 static TCGv cpu_y;
56 #ifndef CONFIG_USER_ONLY
57 static TCGv cpu_tbr;
58 #endif
59 static TCGv cpu_cond;
60 #ifdef TARGET_SPARC64
61 static TCGv_i32 cpu_xcc, cpu_fprs;
62 static TCGv cpu_gsr;
63 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
64 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
65 #else
66 static TCGv cpu_wim;
67 #endif
68 /* Floating point registers */
69 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
70 
71 typedef struct DisasDelayException {
72     struct DisasDelayException *next;
73     TCGLabel *lab;
74     TCGv_i32 excp;
75     /* Saved state at parent insn. */
76     target_ulong pc;
77     target_ulong npc;
78 } DisasDelayException;
79 
80 typedef struct DisasContext {
81     DisasContextBase base;
82     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
83     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
84     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
85     int mem_idx;
86     bool fpu_enabled;
87     bool address_mask_32bit;
88 #ifndef CONFIG_USER_ONLY
89     bool supervisor;
90 #ifdef TARGET_SPARC64
91     bool hypervisor;
92 #endif
93 #endif
94 
95     uint32_t cc_op;  /* current CC operation */
96     sparc_def_t *def;
97 #ifdef TARGET_SPARC64
98     int fprs_dirty;
99     int asi;
100 #endif
101     DisasDelayException *delay_excp_list;
102 } DisasContext;
103 
104 typedef struct {
105     TCGCond cond;
106     bool is_bool;
107     TCGv c1, c2;
108 } DisasCompare;
109 
110 // This function uses non-native bit order
111 #define GET_FIELD(X, FROM, TO)                                  \
112     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
113 
114 // This function uses the order in the manuals, i.e. bit 0 is 2^0
115 #define GET_FIELD_SP(X, FROM, TO)               \
116     GET_FIELD(X, 31 - (TO), 31 - (FROM))
117 
118 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
119 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
120 
121 #ifdef TARGET_SPARC64
122 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
123 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
124 #else
125 #define DFPREG(r) (r & 0x1e)
126 #define QFPREG(r) (r & 0x1c)
127 #endif
128 
129 #define UA2005_HTRAP_MASK 0xff
130 #define V8_TRAP_MASK 0x7f
131 
132 static int sign_extend(int x, int len)
133 {
134     len = 32 - len;
135     return (x << len) >> len;
136 }
137 
138 #define IS_IMM (insn & (1<<13))
139 
140 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     int bit = (rd < 32) ? 1 : 2;
144     /* If we know we've already set this bit within the TB,
145        we can avoid setting it again.  */
146     if (!(dc->fprs_dirty & bit)) {
147         dc->fprs_dirty |= bit;
148         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
149     }
150 #endif
151 }
152 
153 /* floating point registers moves */
154 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
155 {
156     TCGv_i32 ret = tcg_temp_new_i32();
157     if (src & 1) {
158         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
159     } else {
160         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
161     }
162     return ret;
163 }
164 
165 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
166 {
167     TCGv_i64 t = tcg_temp_new_i64();
168 
169     tcg_gen_extu_i32_i64(t, v);
170     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
171                         (dst & 1 ? 0 : 32), 32);
172     gen_update_fprs_dirty(dc, dst);
173 }
174 
175 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
176 {
177     return tcg_temp_new_i32();
178 }
179 
180 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
181 {
182     src = DFPREG(src);
183     return cpu_fpr[src / 2];
184 }
185 
186 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
187 {
188     dst = DFPREG(dst);
189     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
190     gen_update_fprs_dirty(dc, dst);
191 }
192 
193 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
194 {
195     return cpu_fpr[DFPREG(dst) / 2];
196 }
197 
198 static void gen_op_load_fpr_QT0(unsigned int src)
199 {
200     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
201                    offsetof(CPU_QuadU, ll.upper));
202     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.lower));
204 }
205 
206 static void gen_op_load_fpr_QT1(unsigned int src)
207 {
208     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt1) +
209                    offsetof(CPU_QuadU, ll.upper));
210     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.lower));
212 }
213 
214 static void gen_op_store_QT0_fpr(unsigned int dst)
215 {
216     tcg_gen_ld_i64(cpu_fpr[dst / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221 
222 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
223                             TCGv_i64 v1, TCGv_i64 v2)
224 {
225     dst = QFPREG(dst);
226 
227     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
228     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
229     gen_update_fprs_dirty(dc, dst);
230 }
231 
232 #ifdef TARGET_SPARC64
233 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
234 {
235     src = QFPREG(src);
236     return cpu_fpr[src / 2];
237 }
238 
239 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
240 {
241     src = QFPREG(src);
242     return cpu_fpr[src / 2 + 1];
243 }
244 
245 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
246 {
247     rd = QFPREG(rd);
248     rs = QFPREG(rs);
249 
250     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
251     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
252     gen_update_fprs_dirty(dc, rd);
253 }
254 #endif
255 
256 /* moves */
257 #ifdef CONFIG_USER_ONLY
258 #define supervisor(dc) 0
259 #ifdef TARGET_SPARC64
260 #define hypervisor(dc) 0
261 #endif
262 #else
263 #ifdef TARGET_SPARC64
264 #define hypervisor(dc) (dc->hypervisor)
265 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
266 #else
267 #define supervisor(dc) (dc->supervisor)
268 #endif
269 #endif
270 
271 #if !defined(TARGET_SPARC64)
272 # define AM_CHECK(dc)  false
273 #elif defined(TARGET_ABI32)
274 # define AM_CHECK(dc)  true
275 #elif defined(CONFIG_USER_ONLY)
276 # define AM_CHECK(dc)  false
277 #else
278 # define AM_CHECK(dc)  ((dc)->address_mask_32bit)
279 #endif
280 
281 static void gen_address_mask(DisasContext *dc, TCGv addr)
282 {
283     if (AM_CHECK(dc)) {
284         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
285     }
286 }
287 
288 static target_ulong address_mask_i(DisasContext *dc, target_ulong addr)
289 {
290     return AM_CHECK(dc) ? (uint32_t)addr : addr;
291 }
292 
293 static TCGv gen_load_gpr(DisasContext *dc, int reg)
294 {
295     if (reg > 0) {
296         assert(reg < 32);
297         return cpu_regs[reg];
298     } else {
299         TCGv t = tcg_temp_new();
300         tcg_gen_movi_tl(t, 0);
301         return t;
302     }
303 }
304 
305 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
306 {
307     if (reg > 0) {
308         assert(reg < 32);
309         tcg_gen_mov_tl(cpu_regs[reg], v);
310     }
311 }
312 
313 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
314 {
315     if (reg > 0) {
316         assert(reg < 32);
317         return cpu_regs[reg];
318     } else {
319         return tcg_temp_new();
320     }
321 }
322 
323 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
324 {
325     return translator_use_goto_tb(&s->base, pc) &&
326            translator_use_goto_tb(&s->base, npc);
327 }
328 
329 static void gen_goto_tb(DisasContext *s, int tb_num,
330                         target_ulong pc, target_ulong npc)
331 {
332     if (use_goto_tb(s, pc, npc))  {
333         /* jump to same page: we can use a direct jump */
334         tcg_gen_goto_tb(tb_num);
335         tcg_gen_movi_tl(cpu_pc, pc);
336         tcg_gen_movi_tl(cpu_npc, npc);
337         tcg_gen_exit_tb(s->base.tb, tb_num);
338     } else {
339         /* jump to another page: we can use an indirect jump */
340         tcg_gen_movi_tl(cpu_pc, pc);
341         tcg_gen_movi_tl(cpu_npc, npc);
342         tcg_gen_lookup_and_goto_ptr();
343     }
344 }
345 
346 // XXX suboptimal
347 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
348 {
349     tcg_gen_extu_i32_tl(reg, src);
350     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
351 }
352 
353 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
354 {
355     tcg_gen_extu_i32_tl(reg, src);
356     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
357 }
358 
359 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
360 {
361     tcg_gen_extu_i32_tl(reg, src);
362     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
363 }
364 
365 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
366 {
367     tcg_gen_extu_i32_tl(reg, src);
368     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
369 }
370 
371 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
372 {
373     tcg_gen_mov_tl(cpu_cc_src, src1);
374     tcg_gen_mov_tl(cpu_cc_src2, src2);
375     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
376     tcg_gen_mov_tl(dst, cpu_cc_dst);
377 }
378 
379 static TCGv_i32 gen_add32_carry32(void)
380 {
381     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
382 
383     /* Carry is computed from a previous add: (dst < src)  */
384 #if TARGET_LONG_BITS == 64
385     cc_src1_32 = tcg_temp_new_i32();
386     cc_src2_32 = tcg_temp_new_i32();
387     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
388     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
389 #else
390     cc_src1_32 = cpu_cc_dst;
391     cc_src2_32 = cpu_cc_src;
392 #endif
393 
394     carry_32 = tcg_temp_new_i32();
395     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
396 
397     return carry_32;
398 }
399 
400 static TCGv_i32 gen_sub32_carry32(void)
401 {
402     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
403 
404     /* Carry is computed from a previous borrow: (src1 < src2)  */
405 #if TARGET_LONG_BITS == 64
406     cc_src1_32 = tcg_temp_new_i32();
407     cc_src2_32 = tcg_temp_new_i32();
408     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
409     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
410 #else
411     cc_src1_32 = cpu_cc_src;
412     cc_src2_32 = cpu_cc_src2;
413 #endif
414 
415     carry_32 = tcg_temp_new_i32();
416     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
417 
418     return carry_32;
419 }
420 
421 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
422                             TCGv src2, int update_cc)
423 {
424     TCGv_i32 carry_32;
425     TCGv carry;
426 
427     switch (dc->cc_op) {
428     case CC_OP_DIV:
429     case CC_OP_LOGIC:
430         /* Carry is known to be zero.  Fall back to plain ADD.  */
431         if (update_cc) {
432             gen_op_add_cc(dst, src1, src2);
433         } else {
434             tcg_gen_add_tl(dst, src1, src2);
435         }
436         return;
437 
438     case CC_OP_ADD:
439     case CC_OP_TADD:
440     case CC_OP_TADDTV:
441         if (TARGET_LONG_BITS == 32) {
442             /* We can re-use the host's hardware carry generation by using
443                an ADD2 opcode.  We discard the low part of the output.
444                Ideally we'd combine this operation with the add that
445                generated the carry in the first place.  */
446             carry = tcg_temp_new();
447             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
448             goto add_done;
449         }
450         carry_32 = gen_add32_carry32();
451         break;
452 
453     case CC_OP_SUB:
454     case CC_OP_TSUB:
455     case CC_OP_TSUBTV:
456         carry_32 = gen_sub32_carry32();
457         break;
458 
459     default:
460         /* We need external help to produce the carry.  */
461         carry_32 = tcg_temp_new_i32();
462         gen_helper_compute_C_icc(carry_32, tcg_env);
463         break;
464     }
465 
466 #if TARGET_LONG_BITS == 64
467     carry = tcg_temp_new();
468     tcg_gen_extu_i32_i64(carry, carry_32);
469 #else
470     carry = carry_32;
471 #endif
472 
473     tcg_gen_add_tl(dst, src1, src2);
474     tcg_gen_add_tl(dst, dst, carry);
475 
476  add_done:
477     if (update_cc) {
478         tcg_gen_mov_tl(cpu_cc_src, src1);
479         tcg_gen_mov_tl(cpu_cc_src2, src2);
480         tcg_gen_mov_tl(cpu_cc_dst, dst);
481         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
482         dc->cc_op = CC_OP_ADDX;
483     }
484 }
485 
486 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
487 {
488     tcg_gen_mov_tl(cpu_cc_src, src1);
489     tcg_gen_mov_tl(cpu_cc_src2, src2);
490     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
491     tcg_gen_mov_tl(dst, cpu_cc_dst);
492 }
493 
494 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
495                             TCGv src2, int update_cc)
496 {
497     TCGv_i32 carry_32;
498     TCGv carry;
499 
500     switch (dc->cc_op) {
501     case CC_OP_DIV:
502     case CC_OP_LOGIC:
503         /* Carry is known to be zero.  Fall back to plain SUB.  */
504         if (update_cc) {
505             gen_op_sub_cc(dst, src1, src2);
506         } else {
507             tcg_gen_sub_tl(dst, src1, src2);
508         }
509         return;
510 
511     case CC_OP_ADD:
512     case CC_OP_TADD:
513     case CC_OP_TADDTV:
514         carry_32 = gen_add32_carry32();
515         break;
516 
517     case CC_OP_SUB:
518     case CC_OP_TSUB:
519     case CC_OP_TSUBTV:
520         if (TARGET_LONG_BITS == 32) {
521             /* We can re-use the host's hardware carry generation by using
522                a SUB2 opcode.  We discard the low part of the output.
523                Ideally we'd combine this operation with the add that
524                generated the carry in the first place.  */
525             carry = tcg_temp_new();
526             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
527             goto sub_done;
528         }
529         carry_32 = gen_sub32_carry32();
530         break;
531 
532     default:
533         /* We need external help to produce the carry.  */
534         carry_32 = tcg_temp_new_i32();
535         gen_helper_compute_C_icc(carry_32, tcg_env);
536         break;
537     }
538 
539 #if TARGET_LONG_BITS == 64
540     carry = tcg_temp_new();
541     tcg_gen_extu_i32_i64(carry, carry_32);
542 #else
543     carry = carry_32;
544 #endif
545 
546     tcg_gen_sub_tl(dst, src1, src2);
547     tcg_gen_sub_tl(dst, dst, carry);
548 
549  sub_done:
550     if (update_cc) {
551         tcg_gen_mov_tl(cpu_cc_src, src1);
552         tcg_gen_mov_tl(cpu_cc_src2, src2);
553         tcg_gen_mov_tl(cpu_cc_dst, dst);
554         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
555         dc->cc_op = CC_OP_SUBX;
556     }
557 }
558 
559 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
560 {
561     TCGv r_temp, zero, t0;
562 
563     r_temp = tcg_temp_new();
564     t0 = tcg_temp_new();
565 
566     /* old op:
567     if (!(env->y & 1))
568         T1 = 0;
569     */
570     zero = tcg_constant_tl(0);
571     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
572     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
573     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
574     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
575                        zero, cpu_cc_src2);
576 
577     // b2 = T0 & 1;
578     // env->y = (b2 << 31) | (env->y >> 1);
579     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
580     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
581 
582     // b1 = N ^ V;
583     gen_mov_reg_N(t0, cpu_psr);
584     gen_mov_reg_V(r_temp, cpu_psr);
585     tcg_gen_xor_tl(t0, t0, r_temp);
586 
587     // T0 = (b1 << 31) | (T0 >> 1);
588     // src1 = T0;
589     tcg_gen_shli_tl(t0, t0, 31);
590     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
591     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
592 
593     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
594 
595     tcg_gen_mov_tl(dst, cpu_cc_dst);
596 }
597 
598 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
599 {
600 #if TARGET_LONG_BITS == 32
601     if (sign_ext) {
602         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
603     } else {
604         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
605     }
606 #else
607     TCGv t0 = tcg_temp_new_i64();
608     TCGv t1 = tcg_temp_new_i64();
609 
610     if (sign_ext) {
611         tcg_gen_ext32s_i64(t0, src1);
612         tcg_gen_ext32s_i64(t1, src2);
613     } else {
614         tcg_gen_ext32u_i64(t0, src1);
615         tcg_gen_ext32u_i64(t1, src2);
616     }
617 
618     tcg_gen_mul_i64(dst, t0, t1);
619     tcg_gen_shri_i64(cpu_y, dst, 32);
620 #endif
621 }
622 
623 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
624 {
625     /* zero-extend truncated operands before multiplication */
626     gen_op_multiply(dst, src1, src2, 0);
627 }
628 
629 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
630 {
631     /* sign-extend truncated operands before multiplication */
632     gen_op_multiply(dst, src1, src2, 1);
633 }
634 
635 // 1
636 static void gen_op_eval_ba(TCGv dst)
637 {
638     tcg_gen_movi_tl(dst, 1);
639 }
640 
641 // Z
642 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
643 {
644     gen_mov_reg_Z(dst, src);
645 }
646 
647 // Z | (N ^ V)
648 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
649 {
650     TCGv t0 = tcg_temp_new();
651     gen_mov_reg_N(t0, src);
652     gen_mov_reg_V(dst, src);
653     tcg_gen_xor_tl(dst, dst, t0);
654     gen_mov_reg_Z(t0, src);
655     tcg_gen_or_tl(dst, dst, t0);
656 }
657 
658 // N ^ V
659 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
660 {
661     TCGv t0 = tcg_temp_new();
662     gen_mov_reg_V(t0, src);
663     gen_mov_reg_N(dst, src);
664     tcg_gen_xor_tl(dst, dst, t0);
665 }
666 
667 // C | Z
668 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
669 {
670     TCGv t0 = tcg_temp_new();
671     gen_mov_reg_Z(t0, src);
672     gen_mov_reg_C(dst, src);
673     tcg_gen_or_tl(dst, dst, t0);
674 }
675 
676 // C
677 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
678 {
679     gen_mov_reg_C(dst, src);
680 }
681 
682 // V
683 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
684 {
685     gen_mov_reg_V(dst, src);
686 }
687 
688 // 0
689 static void gen_op_eval_bn(TCGv dst)
690 {
691     tcg_gen_movi_tl(dst, 0);
692 }
693 
694 // N
695 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
696 {
697     gen_mov_reg_N(dst, src);
698 }
699 
700 // !Z
701 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
702 {
703     gen_mov_reg_Z(dst, src);
704     tcg_gen_xori_tl(dst, dst, 0x1);
705 }
706 
707 // !(Z | (N ^ V))
708 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
709 {
710     gen_op_eval_ble(dst, src);
711     tcg_gen_xori_tl(dst, dst, 0x1);
712 }
713 
714 // !(N ^ V)
715 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
716 {
717     gen_op_eval_bl(dst, src);
718     tcg_gen_xori_tl(dst, dst, 0x1);
719 }
720 
721 // !(C | Z)
722 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
723 {
724     gen_op_eval_bleu(dst, src);
725     tcg_gen_xori_tl(dst, dst, 0x1);
726 }
727 
728 // !C
729 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
730 {
731     gen_mov_reg_C(dst, src);
732     tcg_gen_xori_tl(dst, dst, 0x1);
733 }
734 
735 // !N
736 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
737 {
738     gen_mov_reg_N(dst, src);
739     tcg_gen_xori_tl(dst, dst, 0x1);
740 }
741 
742 // !V
743 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
744 {
745     gen_mov_reg_V(dst, src);
746     tcg_gen_xori_tl(dst, dst, 0x1);
747 }
748 
749 /*
750   FPSR bit field FCC1 | FCC0:
751    0 =
752    1 <
753    2 >
754    3 unordered
755 */
756 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
757                                     unsigned int fcc_offset)
758 {
759     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
760     tcg_gen_andi_tl(reg, reg, 0x1);
761 }
762 
763 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
764 {
765     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
766     tcg_gen_andi_tl(reg, reg, 0x1);
767 }
768 
769 // !0: FCC0 | FCC1
770 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
771 {
772     TCGv t0 = tcg_temp_new();
773     gen_mov_reg_FCC0(dst, src, fcc_offset);
774     gen_mov_reg_FCC1(t0, src, fcc_offset);
775     tcg_gen_or_tl(dst, dst, t0);
776 }
777 
778 // 1 or 2: FCC0 ^ FCC1
779 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
780 {
781     TCGv t0 = tcg_temp_new();
782     gen_mov_reg_FCC0(dst, src, fcc_offset);
783     gen_mov_reg_FCC1(t0, src, fcc_offset);
784     tcg_gen_xor_tl(dst, dst, t0);
785 }
786 
787 // 1 or 3: FCC0
788 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
789 {
790     gen_mov_reg_FCC0(dst, src, fcc_offset);
791 }
792 
793 // 1: FCC0 & !FCC1
794 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
795 {
796     TCGv t0 = tcg_temp_new();
797     gen_mov_reg_FCC0(dst, src, fcc_offset);
798     gen_mov_reg_FCC1(t0, src, fcc_offset);
799     tcg_gen_andc_tl(dst, dst, t0);
800 }
801 
802 // 2 or 3: FCC1
803 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
804 {
805     gen_mov_reg_FCC1(dst, src, fcc_offset);
806 }
807 
808 // 2: !FCC0 & FCC1
809 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
810 {
811     TCGv t0 = tcg_temp_new();
812     gen_mov_reg_FCC0(dst, src, fcc_offset);
813     gen_mov_reg_FCC1(t0, src, fcc_offset);
814     tcg_gen_andc_tl(dst, t0, dst);
815 }
816 
817 // 3: FCC0 & FCC1
818 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
819 {
820     TCGv t0 = tcg_temp_new();
821     gen_mov_reg_FCC0(dst, src, fcc_offset);
822     gen_mov_reg_FCC1(t0, src, fcc_offset);
823     tcg_gen_and_tl(dst, dst, t0);
824 }
825 
826 // 0: !(FCC0 | FCC1)
827 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
828 {
829     TCGv t0 = tcg_temp_new();
830     gen_mov_reg_FCC0(dst, src, fcc_offset);
831     gen_mov_reg_FCC1(t0, src, fcc_offset);
832     tcg_gen_or_tl(dst, dst, t0);
833     tcg_gen_xori_tl(dst, dst, 0x1);
834 }
835 
836 // 0 or 3: !(FCC0 ^ FCC1)
837 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
838 {
839     TCGv t0 = tcg_temp_new();
840     gen_mov_reg_FCC0(dst, src, fcc_offset);
841     gen_mov_reg_FCC1(t0, src, fcc_offset);
842     tcg_gen_xor_tl(dst, dst, t0);
843     tcg_gen_xori_tl(dst, dst, 0x1);
844 }
845 
846 // 0 or 2: !FCC0
847 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
848 {
849     gen_mov_reg_FCC0(dst, src, fcc_offset);
850     tcg_gen_xori_tl(dst, dst, 0x1);
851 }
852 
853 // !1: !(FCC0 & !FCC1)
854 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
855 {
856     TCGv t0 = tcg_temp_new();
857     gen_mov_reg_FCC0(dst, src, fcc_offset);
858     gen_mov_reg_FCC1(t0, src, fcc_offset);
859     tcg_gen_andc_tl(dst, dst, t0);
860     tcg_gen_xori_tl(dst, dst, 0x1);
861 }
862 
863 // 0 or 1: !FCC1
864 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
865 {
866     gen_mov_reg_FCC1(dst, src, fcc_offset);
867     tcg_gen_xori_tl(dst, dst, 0x1);
868 }
869 
870 // !2: !(!FCC0 & FCC1)
871 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
872 {
873     TCGv t0 = tcg_temp_new();
874     gen_mov_reg_FCC0(dst, src, fcc_offset);
875     gen_mov_reg_FCC1(t0, src, fcc_offset);
876     tcg_gen_andc_tl(dst, t0, dst);
877     tcg_gen_xori_tl(dst, dst, 0x1);
878 }
879 
880 // !3: !(FCC0 & FCC1)
881 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
882 {
883     TCGv t0 = tcg_temp_new();
884     gen_mov_reg_FCC0(dst, src, fcc_offset);
885     gen_mov_reg_FCC1(t0, src, fcc_offset);
886     tcg_gen_and_tl(dst, dst, t0);
887     tcg_gen_xori_tl(dst, dst, 0x1);
888 }
889 
890 static void gen_branch2(DisasContext *dc, target_ulong pc1,
891                         target_ulong pc2, TCGv r_cond)
892 {
893     TCGLabel *l1 = gen_new_label();
894 
895     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
896 
897     gen_goto_tb(dc, 0, pc1, pc1 + 4);
898 
899     gen_set_label(l1);
900     gen_goto_tb(dc, 1, pc2, pc2 + 4);
901 }
902 
903 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
904 {
905     TCGLabel *l1 = gen_new_label();
906     target_ulong npc = dc->npc;
907 
908     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
909 
910     gen_goto_tb(dc, 0, npc, pc1);
911 
912     gen_set_label(l1);
913     gen_goto_tb(dc, 1, npc + 4, npc + 8);
914 
915     dc->base.is_jmp = DISAS_NORETURN;
916 }
917 
918 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
919 {
920     target_ulong npc = dc->npc;
921 
922     if (npc & 3) {
923         switch (npc) {
924         case DYNAMIC_PC:
925         case DYNAMIC_PC_LOOKUP:
926             tcg_gen_mov_tl(cpu_pc, cpu_npc);
927             tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
928             tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc,
929                                cpu_cond, tcg_constant_tl(0),
930                                tcg_constant_tl(pc1), cpu_npc);
931             dc->pc = npc;
932             break;
933         default:
934             g_assert_not_reached();
935         }
936     } else {
937         dc->pc = npc;
938         dc->jump_pc[0] = pc1;
939         dc->jump_pc[1] = npc + 4;
940         dc->npc = JUMP_PC;
941     }
942 }
943 
944 static void gen_generic_branch(DisasContext *dc)
945 {
946     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
947     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
948     TCGv zero = tcg_constant_tl(0);
949 
950     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
951 }
952 
953 /* call this function before using the condition register as it may
954    have been set for a jump */
955 static void flush_cond(DisasContext *dc)
956 {
957     if (dc->npc == JUMP_PC) {
958         gen_generic_branch(dc);
959         dc->npc = DYNAMIC_PC_LOOKUP;
960     }
961 }
962 
963 static void save_npc(DisasContext *dc)
964 {
965     if (dc->npc & 3) {
966         switch (dc->npc) {
967         case JUMP_PC:
968             gen_generic_branch(dc);
969             dc->npc = DYNAMIC_PC_LOOKUP;
970             break;
971         case DYNAMIC_PC:
972         case DYNAMIC_PC_LOOKUP:
973             break;
974         default:
975             g_assert_not_reached();
976         }
977     } else {
978         tcg_gen_movi_tl(cpu_npc, dc->npc);
979     }
980 }
981 
982 static void update_psr(DisasContext *dc)
983 {
984     if (dc->cc_op != CC_OP_FLAGS) {
985         dc->cc_op = CC_OP_FLAGS;
986         gen_helper_compute_psr(tcg_env);
987     }
988 }
989 
990 static void save_state(DisasContext *dc)
991 {
992     tcg_gen_movi_tl(cpu_pc, dc->pc);
993     save_npc(dc);
994 }
995 
996 static void gen_exception(DisasContext *dc, int which)
997 {
998     save_state(dc);
999     gen_helper_raise_exception(tcg_env, tcg_constant_i32(which));
1000     dc->base.is_jmp = DISAS_NORETURN;
1001 }
1002 
1003 static TCGLabel *delay_exceptionv(DisasContext *dc, TCGv_i32 excp)
1004 {
1005     DisasDelayException *e = g_new0(DisasDelayException, 1);
1006 
1007     e->next = dc->delay_excp_list;
1008     dc->delay_excp_list = e;
1009 
1010     e->lab = gen_new_label();
1011     e->excp = excp;
1012     e->pc = dc->pc;
1013     /* Caller must have used flush_cond before branch. */
1014     assert(e->npc != JUMP_PC);
1015     e->npc = dc->npc;
1016 
1017     return e->lab;
1018 }
1019 
1020 static TCGLabel *delay_exception(DisasContext *dc, int excp)
1021 {
1022     return delay_exceptionv(dc, tcg_constant_i32(excp));
1023 }
1024 
1025 static void gen_check_align(DisasContext *dc, TCGv addr, int mask)
1026 {
1027     TCGv t = tcg_temp_new();
1028     TCGLabel *lab;
1029 
1030     tcg_gen_andi_tl(t, addr, mask);
1031 
1032     flush_cond(dc);
1033     lab = delay_exception(dc, TT_UNALIGNED);
1034     tcg_gen_brcondi_tl(TCG_COND_NE, t, 0, lab);
1035 }
1036 
1037 static void gen_mov_pc_npc(DisasContext *dc)
1038 {
1039     if (dc->npc & 3) {
1040         switch (dc->npc) {
1041         case JUMP_PC:
1042             gen_generic_branch(dc);
1043             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1044             dc->pc = DYNAMIC_PC_LOOKUP;
1045             break;
1046         case DYNAMIC_PC:
1047         case DYNAMIC_PC_LOOKUP:
1048             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1049             dc->pc = dc->npc;
1050             break;
1051         default:
1052             g_assert_not_reached();
1053         }
1054     } else {
1055         dc->pc = dc->npc;
1056     }
1057 }
1058 
1059 static void gen_op_next_insn(void)
1060 {
1061     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1062     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1063 }
1064 
1065 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1066                         DisasContext *dc)
1067 {
1068     static int subcc_cond[16] = {
1069         TCG_COND_NEVER,
1070         TCG_COND_EQ,
1071         TCG_COND_LE,
1072         TCG_COND_LT,
1073         TCG_COND_LEU,
1074         TCG_COND_LTU,
1075         -1, /* neg */
1076         -1, /* overflow */
1077         TCG_COND_ALWAYS,
1078         TCG_COND_NE,
1079         TCG_COND_GT,
1080         TCG_COND_GE,
1081         TCG_COND_GTU,
1082         TCG_COND_GEU,
1083         -1, /* pos */
1084         -1, /* no overflow */
1085     };
1086 
1087     static int logic_cond[16] = {
1088         TCG_COND_NEVER,
1089         TCG_COND_EQ,     /* eq:  Z */
1090         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1091         TCG_COND_LT,     /* lt:  N ^ V -> N */
1092         TCG_COND_EQ,     /* leu: C | Z -> Z */
1093         TCG_COND_NEVER,  /* ltu: C -> 0 */
1094         TCG_COND_LT,     /* neg: N */
1095         TCG_COND_NEVER,  /* vs:  V -> 0 */
1096         TCG_COND_ALWAYS,
1097         TCG_COND_NE,     /* ne:  !Z */
1098         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1099         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1100         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1101         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1102         TCG_COND_GE,     /* pos: !N */
1103         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1104     };
1105 
1106     TCGv_i32 r_src;
1107     TCGv r_dst;
1108 
1109 #ifdef TARGET_SPARC64
1110     if (xcc) {
1111         r_src = cpu_xcc;
1112     } else {
1113         r_src = cpu_psr;
1114     }
1115 #else
1116     r_src = cpu_psr;
1117 #endif
1118 
1119     switch (dc->cc_op) {
1120     case CC_OP_LOGIC:
1121         cmp->cond = logic_cond[cond];
1122     do_compare_dst_0:
1123         cmp->is_bool = false;
1124         cmp->c2 = tcg_constant_tl(0);
1125 #ifdef TARGET_SPARC64
1126         if (!xcc) {
1127             cmp->c1 = tcg_temp_new();
1128             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1129             break;
1130         }
1131 #endif
1132         cmp->c1 = cpu_cc_dst;
1133         break;
1134 
1135     case CC_OP_SUB:
1136         switch (cond) {
1137         case 6:  /* neg */
1138         case 14: /* pos */
1139             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1140             goto do_compare_dst_0;
1141 
1142         case 7: /* overflow */
1143         case 15: /* !overflow */
1144             goto do_dynamic;
1145 
1146         default:
1147             cmp->cond = subcc_cond[cond];
1148             cmp->is_bool = false;
1149 #ifdef TARGET_SPARC64
1150             if (!xcc) {
1151                 /* Note that sign-extension works for unsigned compares as
1152                    long as both operands are sign-extended.  */
1153                 cmp->c1 = tcg_temp_new();
1154                 cmp->c2 = tcg_temp_new();
1155                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1156                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1157                 break;
1158             }
1159 #endif
1160             cmp->c1 = cpu_cc_src;
1161             cmp->c2 = cpu_cc_src2;
1162             break;
1163         }
1164         break;
1165 
1166     default:
1167     do_dynamic:
1168         gen_helper_compute_psr(tcg_env);
1169         dc->cc_op = CC_OP_FLAGS;
1170         /* FALLTHRU */
1171 
1172     case CC_OP_FLAGS:
1173         /* We're going to generate a boolean result.  */
1174         cmp->cond = TCG_COND_NE;
1175         cmp->is_bool = true;
1176         cmp->c1 = r_dst = tcg_temp_new();
1177         cmp->c2 = tcg_constant_tl(0);
1178 
1179         switch (cond) {
1180         case 0x0:
1181             gen_op_eval_bn(r_dst);
1182             break;
1183         case 0x1:
1184             gen_op_eval_be(r_dst, r_src);
1185             break;
1186         case 0x2:
1187             gen_op_eval_ble(r_dst, r_src);
1188             break;
1189         case 0x3:
1190             gen_op_eval_bl(r_dst, r_src);
1191             break;
1192         case 0x4:
1193             gen_op_eval_bleu(r_dst, r_src);
1194             break;
1195         case 0x5:
1196             gen_op_eval_bcs(r_dst, r_src);
1197             break;
1198         case 0x6:
1199             gen_op_eval_bneg(r_dst, r_src);
1200             break;
1201         case 0x7:
1202             gen_op_eval_bvs(r_dst, r_src);
1203             break;
1204         case 0x8:
1205             gen_op_eval_ba(r_dst);
1206             break;
1207         case 0x9:
1208             gen_op_eval_bne(r_dst, r_src);
1209             break;
1210         case 0xa:
1211             gen_op_eval_bg(r_dst, r_src);
1212             break;
1213         case 0xb:
1214             gen_op_eval_bge(r_dst, r_src);
1215             break;
1216         case 0xc:
1217             gen_op_eval_bgu(r_dst, r_src);
1218             break;
1219         case 0xd:
1220             gen_op_eval_bcc(r_dst, r_src);
1221             break;
1222         case 0xe:
1223             gen_op_eval_bpos(r_dst, r_src);
1224             break;
1225         case 0xf:
1226             gen_op_eval_bvc(r_dst, r_src);
1227             break;
1228         }
1229         break;
1230     }
1231 }
1232 
1233 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1234 {
1235     unsigned int offset;
1236     TCGv r_dst;
1237 
1238     /* For now we still generate a straight boolean result.  */
1239     cmp->cond = TCG_COND_NE;
1240     cmp->is_bool = true;
1241     cmp->c1 = r_dst = tcg_temp_new();
1242     cmp->c2 = tcg_constant_tl(0);
1243 
1244     switch (cc) {
1245     default:
1246     case 0x0:
1247         offset = 0;
1248         break;
1249     case 0x1:
1250         offset = 32 - 10;
1251         break;
1252     case 0x2:
1253         offset = 34 - 10;
1254         break;
1255     case 0x3:
1256         offset = 36 - 10;
1257         break;
1258     }
1259 
1260     switch (cond) {
1261     case 0x0:
1262         gen_op_eval_bn(r_dst);
1263         break;
1264     case 0x1:
1265         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1266         break;
1267     case 0x2:
1268         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1269         break;
1270     case 0x3:
1271         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1272         break;
1273     case 0x4:
1274         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1275         break;
1276     case 0x5:
1277         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1278         break;
1279     case 0x6:
1280         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1281         break;
1282     case 0x7:
1283         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1284         break;
1285     case 0x8:
1286         gen_op_eval_ba(r_dst);
1287         break;
1288     case 0x9:
1289         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1290         break;
1291     case 0xa:
1292         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1293         break;
1294     case 0xb:
1295         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1296         break;
1297     case 0xc:
1298         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1299         break;
1300     case 0xd:
1301         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1302         break;
1303     case 0xe:
1304         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1305         break;
1306     case 0xf:
1307         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1308         break;
1309     }
1310 }
1311 
1312 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1313 {
1314     DisasCompare cmp;
1315     gen_fcompare(&cmp, cc, cond);
1316 
1317     /* The interface is to return a boolean in r_dst.  */
1318     if (cmp.is_bool) {
1319         tcg_gen_mov_tl(r_dst, cmp.c1);
1320     } else {
1321         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1322     }
1323 }
1324 
1325 // Inverted logic
1326 static const TCGCond gen_tcg_cond_reg[8] = {
1327     TCG_COND_NEVER,  /* reserved */
1328     TCG_COND_NE,
1329     TCG_COND_GT,
1330     TCG_COND_GE,
1331     TCG_COND_NEVER,  /* reserved */
1332     TCG_COND_EQ,
1333     TCG_COND_LE,
1334     TCG_COND_LT,
1335 };
1336 
1337 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1338 {
1339     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1340     cmp->is_bool = false;
1341     cmp->c1 = r_src;
1342     cmp->c2 = tcg_constant_tl(0);
1343 }
1344 
1345 #ifdef TARGET_SPARC64
1346 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1347 {
1348     switch (fccno) {
1349     case 0:
1350         gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1351         break;
1352     case 1:
1353         gen_helper_fcmps_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1354         break;
1355     case 2:
1356         gen_helper_fcmps_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1357         break;
1358     case 3:
1359         gen_helper_fcmps_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1360         break;
1361     }
1362 }
1363 
1364 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1365 {
1366     switch (fccno) {
1367     case 0:
1368         gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1369         break;
1370     case 1:
1371         gen_helper_fcmpd_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1372         break;
1373     case 2:
1374         gen_helper_fcmpd_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1375         break;
1376     case 3:
1377         gen_helper_fcmpd_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1378         break;
1379     }
1380 }
1381 
1382 static void gen_op_fcmpq(int fccno)
1383 {
1384     switch (fccno) {
1385     case 0:
1386         gen_helper_fcmpq(cpu_fsr, tcg_env);
1387         break;
1388     case 1:
1389         gen_helper_fcmpq_fcc1(cpu_fsr, tcg_env);
1390         break;
1391     case 2:
1392         gen_helper_fcmpq_fcc2(cpu_fsr, tcg_env);
1393         break;
1394     case 3:
1395         gen_helper_fcmpq_fcc3(cpu_fsr, tcg_env);
1396         break;
1397     }
1398 }
1399 
1400 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1401 {
1402     switch (fccno) {
1403     case 0:
1404         gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1405         break;
1406     case 1:
1407         gen_helper_fcmpes_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1408         break;
1409     case 2:
1410         gen_helper_fcmpes_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1411         break;
1412     case 3:
1413         gen_helper_fcmpes_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1414         break;
1415     }
1416 }
1417 
1418 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1419 {
1420     switch (fccno) {
1421     case 0:
1422         gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1423         break;
1424     case 1:
1425         gen_helper_fcmped_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1426         break;
1427     case 2:
1428         gen_helper_fcmped_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1429         break;
1430     case 3:
1431         gen_helper_fcmped_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1432         break;
1433     }
1434 }
1435 
1436 static void gen_op_fcmpeq(int fccno)
1437 {
1438     switch (fccno) {
1439     case 0:
1440         gen_helper_fcmpeq(cpu_fsr, tcg_env);
1441         break;
1442     case 1:
1443         gen_helper_fcmpeq_fcc1(cpu_fsr, tcg_env);
1444         break;
1445     case 2:
1446         gen_helper_fcmpeq_fcc2(cpu_fsr, tcg_env);
1447         break;
1448     case 3:
1449         gen_helper_fcmpeq_fcc3(cpu_fsr, tcg_env);
1450         break;
1451     }
1452 }
1453 
1454 #else
1455 
1456 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1457 {
1458     gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1459 }
1460 
1461 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1462 {
1463     gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1464 }
1465 
1466 static void gen_op_fcmpq(int fccno)
1467 {
1468     gen_helper_fcmpq(cpu_fsr, tcg_env);
1469 }
1470 
1471 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1472 {
1473     gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1474 }
1475 
1476 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1477 {
1478     gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1479 }
1480 
1481 static void gen_op_fcmpeq(int fccno)
1482 {
1483     gen_helper_fcmpeq(cpu_fsr, tcg_env);
1484 }
1485 #endif
1486 
1487 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1488 {
1489     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1490     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1491     gen_exception(dc, TT_FP_EXCP);
1492 }
1493 
1494 static int gen_trap_ifnofpu(DisasContext *dc)
1495 {
1496 #if !defined(CONFIG_USER_ONLY)
1497     if (!dc->fpu_enabled) {
1498         gen_exception(dc, TT_NFPU_INSN);
1499         return 1;
1500     }
1501 #endif
1502     return 0;
1503 }
1504 
1505 static void gen_op_clear_ieee_excp_and_FTT(void)
1506 {
1507     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1508 }
1509 
1510 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1511                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1512 {
1513     TCGv_i32 dst, src;
1514 
1515     src = gen_load_fpr_F(dc, rs);
1516     dst = gen_dest_fpr_F(dc);
1517 
1518     gen(dst, tcg_env, src);
1519     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1520 
1521     gen_store_fpr_F(dc, rd, dst);
1522 }
1523 
1524 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1525                           void (*gen)(TCGv_i32, TCGv_i32))
1526 {
1527     TCGv_i32 dst, src;
1528 
1529     src = gen_load_fpr_F(dc, rs);
1530     dst = gen_dest_fpr_F(dc);
1531 
1532     gen(dst, src);
1533 
1534     gen_store_fpr_F(dc, rd, dst);
1535 }
1536 
1537 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1538                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1539 {
1540     TCGv_i32 dst, src1, src2;
1541 
1542     src1 = gen_load_fpr_F(dc, rs1);
1543     src2 = gen_load_fpr_F(dc, rs2);
1544     dst = gen_dest_fpr_F(dc);
1545 
1546     gen(dst, tcg_env, src1, src2);
1547     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1548 
1549     gen_store_fpr_F(dc, rd, dst);
1550 }
1551 
1552 #ifdef TARGET_SPARC64
1553 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1554                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1555 {
1556     TCGv_i32 dst, src1, src2;
1557 
1558     src1 = gen_load_fpr_F(dc, rs1);
1559     src2 = gen_load_fpr_F(dc, rs2);
1560     dst = gen_dest_fpr_F(dc);
1561 
1562     gen(dst, src1, src2);
1563 
1564     gen_store_fpr_F(dc, rd, dst);
1565 }
1566 #endif
1567 
1568 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1569                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1570 {
1571     TCGv_i64 dst, src;
1572 
1573     src = gen_load_fpr_D(dc, rs);
1574     dst = gen_dest_fpr_D(dc, rd);
1575 
1576     gen(dst, tcg_env, src);
1577     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1578 
1579     gen_store_fpr_D(dc, rd, dst);
1580 }
1581 
1582 #ifdef TARGET_SPARC64
1583 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1584                           void (*gen)(TCGv_i64, TCGv_i64))
1585 {
1586     TCGv_i64 dst, src;
1587 
1588     src = gen_load_fpr_D(dc, rs);
1589     dst = gen_dest_fpr_D(dc, rd);
1590 
1591     gen(dst, src);
1592 
1593     gen_store_fpr_D(dc, rd, dst);
1594 }
1595 #endif
1596 
1597 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1598                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1599 {
1600     TCGv_i64 dst, src1, src2;
1601 
1602     src1 = gen_load_fpr_D(dc, rs1);
1603     src2 = gen_load_fpr_D(dc, rs2);
1604     dst = gen_dest_fpr_D(dc, rd);
1605 
1606     gen(dst, tcg_env, src1, src2);
1607     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1608 
1609     gen_store_fpr_D(dc, rd, dst);
1610 }
1611 
1612 #ifdef TARGET_SPARC64
1613 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1614                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1615 {
1616     TCGv_i64 dst, src1, src2;
1617 
1618     src1 = gen_load_fpr_D(dc, rs1);
1619     src2 = gen_load_fpr_D(dc, rs2);
1620     dst = gen_dest_fpr_D(dc, rd);
1621 
1622     gen(dst, src1, src2);
1623 
1624     gen_store_fpr_D(dc, rd, dst);
1625 }
1626 
1627 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1628                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1629 {
1630     TCGv_i64 dst, src1, src2;
1631 
1632     src1 = gen_load_fpr_D(dc, rs1);
1633     src2 = gen_load_fpr_D(dc, rs2);
1634     dst = gen_dest_fpr_D(dc, rd);
1635 
1636     gen(dst, cpu_gsr, src1, src2);
1637 
1638     gen_store_fpr_D(dc, rd, dst);
1639 }
1640 
1641 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1642                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1643 {
1644     TCGv_i64 dst, src0, src1, src2;
1645 
1646     src1 = gen_load_fpr_D(dc, rs1);
1647     src2 = gen_load_fpr_D(dc, rs2);
1648     src0 = gen_load_fpr_D(dc, rd);
1649     dst = gen_dest_fpr_D(dc, rd);
1650 
1651     gen(dst, src0, src1, src2);
1652 
1653     gen_store_fpr_D(dc, rd, dst);
1654 }
1655 #endif
1656 
1657 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1658                        void (*gen)(TCGv_ptr))
1659 {
1660     gen_op_load_fpr_QT1(QFPREG(rs));
1661 
1662     gen(tcg_env);
1663     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1664 
1665     gen_op_store_QT0_fpr(QFPREG(rd));
1666     gen_update_fprs_dirty(dc, QFPREG(rd));
1667 }
1668 
1669 #ifdef TARGET_SPARC64
1670 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1671                           void (*gen)(TCGv_ptr))
1672 {
1673     gen_op_load_fpr_QT1(QFPREG(rs));
1674 
1675     gen(tcg_env);
1676 
1677     gen_op_store_QT0_fpr(QFPREG(rd));
1678     gen_update_fprs_dirty(dc, QFPREG(rd));
1679 }
1680 #endif
1681 
1682 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1683                         void (*gen)(TCGv_ptr))
1684 {
1685     gen_op_load_fpr_QT0(QFPREG(rs1));
1686     gen_op_load_fpr_QT1(QFPREG(rs2));
1687 
1688     gen(tcg_env);
1689     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1690 
1691     gen_op_store_QT0_fpr(QFPREG(rd));
1692     gen_update_fprs_dirty(dc, QFPREG(rd));
1693 }
1694 
1695 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1696                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1697 {
1698     TCGv_i64 dst;
1699     TCGv_i32 src1, src2;
1700 
1701     src1 = gen_load_fpr_F(dc, rs1);
1702     src2 = gen_load_fpr_F(dc, rs2);
1703     dst = gen_dest_fpr_D(dc, rd);
1704 
1705     gen(dst, tcg_env, src1, src2);
1706     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1707 
1708     gen_store_fpr_D(dc, rd, dst);
1709 }
1710 
1711 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1712                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1713 {
1714     TCGv_i64 src1, src2;
1715 
1716     src1 = gen_load_fpr_D(dc, rs1);
1717     src2 = gen_load_fpr_D(dc, rs2);
1718 
1719     gen(tcg_env, src1, src2);
1720     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1721 
1722     gen_op_store_QT0_fpr(QFPREG(rd));
1723     gen_update_fprs_dirty(dc, QFPREG(rd));
1724 }
1725 
1726 #ifdef TARGET_SPARC64
1727 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1728                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1729 {
1730     TCGv_i64 dst;
1731     TCGv_i32 src;
1732 
1733     src = gen_load_fpr_F(dc, rs);
1734     dst = gen_dest_fpr_D(dc, rd);
1735 
1736     gen(dst, tcg_env, src);
1737     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1738 
1739     gen_store_fpr_D(dc, rd, dst);
1740 }
1741 #endif
1742 
1743 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1744                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1745 {
1746     TCGv_i64 dst;
1747     TCGv_i32 src;
1748 
1749     src = gen_load_fpr_F(dc, rs);
1750     dst = gen_dest_fpr_D(dc, rd);
1751 
1752     gen(dst, tcg_env, src);
1753 
1754     gen_store_fpr_D(dc, rd, dst);
1755 }
1756 
1757 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1758                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1759 {
1760     TCGv_i32 dst;
1761     TCGv_i64 src;
1762 
1763     src = gen_load_fpr_D(dc, rs);
1764     dst = gen_dest_fpr_F(dc);
1765 
1766     gen(dst, tcg_env, src);
1767     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1768 
1769     gen_store_fpr_F(dc, rd, dst);
1770 }
1771 
1772 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1773                        void (*gen)(TCGv_i32, TCGv_ptr))
1774 {
1775     TCGv_i32 dst;
1776 
1777     gen_op_load_fpr_QT1(QFPREG(rs));
1778     dst = gen_dest_fpr_F(dc);
1779 
1780     gen(dst, tcg_env);
1781     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1782 
1783     gen_store_fpr_F(dc, rd, dst);
1784 }
1785 
1786 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1787                        void (*gen)(TCGv_i64, TCGv_ptr))
1788 {
1789     TCGv_i64 dst;
1790 
1791     gen_op_load_fpr_QT1(QFPREG(rs));
1792     dst = gen_dest_fpr_D(dc, rd);
1793 
1794     gen(dst, tcg_env);
1795     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1796 
1797     gen_store_fpr_D(dc, rd, dst);
1798 }
1799 
1800 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1801                           void (*gen)(TCGv_ptr, TCGv_i32))
1802 {
1803     TCGv_i32 src;
1804 
1805     src = gen_load_fpr_F(dc, rs);
1806 
1807     gen(tcg_env, src);
1808 
1809     gen_op_store_QT0_fpr(QFPREG(rd));
1810     gen_update_fprs_dirty(dc, QFPREG(rd));
1811 }
1812 
1813 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1814                           void (*gen)(TCGv_ptr, TCGv_i64))
1815 {
1816     TCGv_i64 src;
1817 
1818     src = gen_load_fpr_D(dc, rs);
1819 
1820     gen(tcg_env, src);
1821 
1822     gen_op_store_QT0_fpr(QFPREG(rd));
1823     gen_update_fprs_dirty(dc, QFPREG(rd));
1824 }
1825 
1826 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1827                      TCGv addr, int mmu_idx, MemOp memop)
1828 {
1829     gen_address_mask(dc, addr);
1830     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1831 }
1832 
1833 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1834 {
1835     TCGv m1 = tcg_constant_tl(0xff);
1836     gen_address_mask(dc, addr);
1837     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1838 }
1839 
1840 /* asi moves */
1841 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1842 typedef enum {
1843     GET_ASI_HELPER,
1844     GET_ASI_EXCP,
1845     GET_ASI_DIRECT,
1846     GET_ASI_DTWINX,
1847     GET_ASI_BLOCK,
1848     GET_ASI_SHORT,
1849     GET_ASI_BCOPY,
1850     GET_ASI_BFILL,
1851 } ASIType;
1852 
1853 typedef struct {
1854     ASIType type;
1855     int asi;
1856     int mem_idx;
1857     MemOp memop;
1858 } DisasASI;
1859 
1860 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1861 {
1862     int asi = GET_FIELD(insn, 19, 26);
1863     ASIType type = GET_ASI_HELPER;
1864     int mem_idx = dc->mem_idx;
1865 
1866 #ifndef TARGET_SPARC64
1867     /* Before v9, all asis are immediate and privileged.  */
1868     if (IS_IMM) {
1869         gen_exception(dc, TT_ILL_INSN);
1870         type = GET_ASI_EXCP;
1871     } else if (supervisor(dc)
1872                /* Note that LEON accepts ASI_USERDATA in user mode, for
1873                   use with CASA.  Also note that previous versions of
1874                   QEMU allowed (and old versions of gcc emitted) ASI_P
1875                   for LEON, which is incorrect.  */
1876                || (asi == ASI_USERDATA
1877                    && (dc->def->features & CPU_FEATURE_CASA))) {
1878         switch (asi) {
1879         case ASI_USERDATA:   /* User data access */
1880             mem_idx = MMU_USER_IDX;
1881             type = GET_ASI_DIRECT;
1882             break;
1883         case ASI_KERNELDATA: /* Supervisor data access */
1884             mem_idx = MMU_KERNEL_IDX;
1885             type = GET_ASI_DIRECT;
1886             break;
1887         case ASI_M_BYPASS:    /* MMU passthrough */
1888         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1889             mem_idx = MMU_PHYS_IDX;
1890             type = GET_ASI_DIRECT;
1891             break;
1892         case ASI_M_BCOPY: /* Block copy, sta access */
1893             mem_idx = MMU_KERNEL_IDX;
1894             type = GET_ASI_BCOPY;
1895             break;
1896         case ASI_M_BFILL: /* Block fill, stda access */
1897             mem_idx = MMU_KERNEL_IDX;
1898             type = GET_ASI_BFILL;
1899             break;
1900         }
1901 
1902         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1903          * permissions check in get_physical_address(..).
1904          */
1905         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1906     } else {
1907         gen_exception(dc, TT_PRIV_INSN);
1908         type = GET_ASI_EXCP;
1909     }
1910 #else
1911     if (IS_IMM) {
1912         asi = dc->asi;
1913     }
1914     /* With v9, all asis below 0x80 are privileged.  */
1915     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1916        down that bit into DisasContext.  For the moment that's ok,
1917        since the direct implementations below doesn't have any ASIs
1918        in the restricted [0x30, 0x7f] range, and the check will be
1919        done properly in the helper.  */
1920     if (!supervisor(dc) && asi < 0x80) {
1921         gen_exception(dc, TT_PRIV_ACT);
1922         type = GET_ASI_EXCP;
1923     } else {
1924         switch (asi) {
1925         case ASI_REAL:      /* Bypass */
1926         case ASI_REAL_IO:   /* Bypass, non-cacheable */
1927         case ASI_REAL_L:    /* Bypass LE */
1928         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
1929         case ASI_TWINX_REAL:   /* Real address, twinx */
1930         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
1931         case ASI_QUAD_LDD_PHYS:
1932         case ASI_QUAD_LDD_PHYS_L:
1933             mem_idx = MMU_PHYS_IDX;
1934             break;
1935         case ASI_N:  /* Nucleus */
1936         case ASI_NL: /* Nucleus LE */
1937         case ASI_TWINX_N:
1938         case ASI_TWINX_NL:
1939         case ASI_NUCLEUS_QUAD_LDD:
1940         case ASI_NUCLEUS_QUAD_LDD_L:
1941             if (hypervisor(dc)) {
1942                 mem_idx = MMU_PHYS_IDX;
1943             } else {
1944                 mem_idx = MMU_NUCLEUS_IDX;
1945             }
1946             break;
1947         case ASI_AIUP:  /* As if user primary */
1948         case ASI_AIUPL: /* As if user primary LE */
1949         case ASI_TWINX_AIUP:
1950         case ASI_TWINX_AIUP_L:
1951         case ASI_BLK_AIUP_4V:
1952         case ASI_BLK_AIUP_L_4V:
1953         case ASI_BLK_AIUP:
1954         case ASI_BLK_AIUPL:
1955             mem_idx = MMU_USER_IDX;
1956             break;
1957         case ASI_AIUS:  /* As if user secondary */
1958         case ASI_AIUSL: /* As if user secondary LE */
1959         case ASI_TWINX_AIUS:
1960         case ASI_TWINX_AIUS_L:
1961         case ASI_BLK_AIUS_4V:
1962         case ASI_BLK_AIUS_L_4V:
1963         case ASI_BLK_AIUS:
1964         case ASI_BLK_AIUSL:
1965             mem_idx = MMU_USER_SECONDARY_IDX;
1966             break;
1967         case ASI_S:  /* Secondary */
1968         case ASI_SL: /* Secondary LE */
1969         case ASI_TWINX_S:
1970         case ASI_TWINX_SL:
1971         case ASI_BLK_COMMIT_S:
1972         case ASI_BLK_S:
1973         case ASI_BLK_SL:
1974         case ASI_FL8_S:
1975         case ASI_FL8_SL:
1976         case ASI_FL16_S:
1977         case ASI_FL16_SL:
1978             if (mem_idx == MMU_USER_IDX) {
1979                 mem_idx = MMU_USER_SECONDARY_IDX;
1980             } else if (mem_idx == MMU_KERNEL_IDX) {
1981                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
1982             }
1983             break;
1984         case ASI_P:  /* Primary */
1985         case ASI_PL: /* Primary LE */
1986         case ASI_TWINX_P:
1987         case ASI_TWINX_PL:
1988         case ASI_BLK_COMMIT_P:
1989         case ASI_BLK_P:
1990         case ASI_BLK_PL:
1991         case ASI_FL8_P:
1992         case ASI_FL8_PL:
1993         case ASI_FL16_P:
1994         case ASI_FL16_PL:
1995             break;
1996         }
1997         switch (asi) {
1998         case ASI_REAL:
1999         case ASI_REAL_IO:
2000         case ASI_REAL_L:
2001         case ASI_REAL_IO_L:
2002         case ASI_N:
2003         case ASI_NL:
2004         case ASI_AIUP:
2005         case ASI_AIUPL:
2006         case ASI_AIUS:
2007         case ASI_AIUSL:
2008         case ASI_S:
2009         case ASI_SL:
2010         case ASI_P:
2011         case ASI_PL:
2012             type = GET_ASI_DIRECT;
2013             break;
2014         case ASI_TWINX_REAL:
2015         case ASI_TWINX_REAL_L:
2016         case ASI_TWINX_N:
2017         case ASI_TWINX_NL:
2018         case ASI_TWINX_AIUP:
2019         case ASI_TWINX_AIUP_L:
2020         case ASI_TWINX_AIUS:
2021         case ASI_TWINX_AIUS_L:
2022         case ASI_TWINX_P:
2023         case ASI_TWINX_PL:
2024         case ASI_TWINX_S:
2025         case ASI_TWINX_SL:
2026         case ASI_QUAD_LDD_PHYS:
2027         case ASI_QUAD_LDD_PHYS_L:
2028         case ASI_NUCLEUS_QUAD_LDD:
2029         case ASI_NUCLEUS_QUAD_LDD_L:
2030             type = GET_ASI_DTWINX;
2031             break;
2032         case ASI_BLK_COMMIT_P:
2033         case ASI_BLK_COMMIT_S:
2034         case ASI_BLK_AIUP_4V:
2035         case ASI_BLK_AIUP_L_4V:
2036         case ASI_BLK_AIUP:
2037         case ASI_BLK_AIUPL:
2038         case ASI_BLK_AIUS_4V:
2039         case ASI_BLK_AIUS_L_4V:
2040         case ASI_BLK_AIUS:
2041         case ASI_BLK_AIUSL:
2042         case ASI_BLK_S:
2043         case ASI_BLK_SL:
2044         case ASI_BLK_P:
2045         case ASI_BLK_PL:
2046             type = GET_ASI_BLOCK;
2047             break;
2048         case ASI_FL8_S:
2049         case ASI_FL8_SL:
2050         case ASI_FL8_P:
2051         case ASI_FL8_PL:
2052             memop = MO_UB;
2053             type = GET_ASI_SHORT;
2054             break;
2055         case ASI_FL16_S:
2056         case ASI_FL16_SL:
2057         case ASI_FL16_P:
2058         case ASI_FL16_PL:
2059             memop = MO_TEUW;
2060             type = GET_ASI_SHORT;
2061             break;
2062         }
2063         /* The little-endian asis all have bit 3 set.  */
2064         if (asi & 8) {
2065             memop ^= MO_BSWAP;
2066         }
2067     }
2068 #endif
2069 
2070     return (DisasASI){ type, asi, mem_idx, memop };
2071 }
2072 
2073 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2074                        int insn, MemOp memop)
2075 {
2076     DisasASI da = get_asi(dc, insn, memop);
2077 
2078     switch (da.type) {
2079     case GET_ASI_EXCP:
2080         break;
2081     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2082         gen_exception(dc, TT_ILL_INSN);
2083         break;
2084     case GET_ASI_DIRECT:
2085         gen_address_mask(dc, addr);
2086         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2087         break;
2088     default:
2089         {
2090             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2091             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2092 
2093             save_state(dc);
2094 #ifdef TARGET_SPARC64
2095             gen_helper_ld_asi(dst, tcg_env, addr, r_asi, r_mop);
2096 #else
2097             {
2098                 TCGv_i64 t64 = tcg_temp_new_i64();
2099                 gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2100                 tcg_gen_trunc_i64_tl(dst, t64);
2101             }
2102 #endif
2103         }
2104         break;
2105     }
2106 }
2107 
2108 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2109                        int insn, MemOp memop)
2110 {
2111     DisasASI da = get_asi(dc, insn, memop);
2112 
2113     switch (da.type) {
2114     case GET_ASI_EXCP:
2115         break;
2116     case GET_ASI_DTWINX: /* Reserved for stda.  */
2117 #ifndef TARGET_SPARC64
2118         gen_exception(dc, TT_ILL_INSN);
2119         break;
2120 #else
2121         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2122             /* Pre OpenSPARC CPUs don't have these */
2123             gen_exception(dc, TT_ILL_INSN);
2124             return;
2125         }
2126         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2127          * are ST_BLKINIT_ ASIs */
2128 #endif
2129         /* fall through */
2130     case GET_ASI_DIRECT:
2131         gen_address_mask(dc, addr);
2132         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2133         break;
2134 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2135     case GET_ASI_BCOPY:
2136         /* Copy 32 bytes from the address in SRC to ADDR.  */
2137         /* ??? The original qemu code suggests 4-byte alignment, dropping
2138            the low bits, but the only place I can see this used is in the
2139            Linux kernel with 32 byte alignment, which would make more sense
2140            as a cacheline-style operation.  */
2141         {
2142             TCGv saddr = tcg_temp_new();
2143             TCGv daddr = tcg_temp_new();
2144             TCGv four = tcg_constant_tl(4);
2145             TCGv_i32 tmp = tcg_temp_new_i32();
2146             int i;
2147 
2148             tcg_gen_andi_tl(saddr, src, -4);
2149             tcg_gen_andi_tl(daddr, addr, -4);
2150             for (i = 0; i < 32; i += 4) {
2151                 /* Since the loads and stores are paired, allow the
2152                    copy to happen in the host endianness.  */
2153                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2154                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2155                 tcg_gen_add_tl(saddr, saddr, four);
2156                 tcg_gen_add_tl(daddr, daddr, four);
2157             }
2158         }
2159         break;
2160 #endif
2161     default:
2162         {
2163             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2164             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2165 
2166             save_state(dc);
2167 #ifdef TARGET_SPARC64
2168             gen_helper_st_asi(tcg_env, addr, src, r_asi, r_mop);
2169 #else
2170             {
2171                 TCGv_i64 t64 = tcg_temp_new_i64();
2172                 tcg_gen_extu_tl_i64(t64, src);
2173                 gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2174             }
2175 #endif
2176 
2177             /* A write to a TLB register may alter page maps.  End the TB. */
2178             dc->npc = DYNAMIC_PC;
2179         }
2180         break;
2181     }
2182 }
2183 
2184 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2185                          TCGv addr, int insn)
2186 {
2187     DisasASI da = get_asi(dc, insn, MO_TEUL);
2188 
2189     switch (da.type) {
2190     case GET_ASI_EXCP:
2191         break;
2192     case GET_ASI_DIRECT:
2193         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2194         break;
2195     default:
2196         /* ??? Should be DAE_invalid_asi.  */
2197         gen_exception(dc, TT_DATA_ACCESS);
2198         break;
2199     }
2200 }
2201 
2202 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2203                         int insn, int rd)
2204 {
2205     DisasASI da = get_asi(dc, insn, MO_TEUL);
2206     TCGv oldv;
2207 
2208     switch (da.type) {
2209     case GET_ASI_EXCP:
2210         return;
2211     case GET_ASI_DIRECT:
2212         oldv = tcg_temp_new();
2213         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2214                                   da.mem_idx, da.memop | MO_ALIGN);
2215         gen_store_gpr(dc, rd, oldv);
2216         break;
2217     default:
2218         /* ??? Should be DAE_invalid_asi.  */
2219         gen_exception(dc, TT_DATA_ACCESS);
2220         break;
2221     }
2222 }
2223 
2224 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2225 {
2226     DisasASI da = get_asi(dc, insn, MO_UB);
2227 
2228     switch (da.type) {
2229     case GET_ASI_EXCP:
2230         break;
2231     case GET_ASI_DIRECT:
2232         gen_ldstub(dc, dst, addr, da.mem_idx);
2233         break;
2234     default:
2235         /* ??? In theory, this should be raise DAE_invalid_asi.
2236            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2237         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2238             gen_helper_exit_atomic(tcg_env);
2239         } else {
2240             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2241             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2242             TCGv_i64 s64, t64;
2243 
2244             save_state(dc);
2245             t64 = tcg_temp_new_i64();
2246             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2247 
2248             s64 = tcg_constant_i64(0xff);
2249             gen_helper_st_asi(tcg_env, addr, s64, r_asi, r_mop);
2250 
2251             tcg_gen_trunc_i64_tl(dst, t64);
2252 
2253             /* End the TB.  */
2254             dc->npc = DYNAMIC_PC;
2255         }
2256         break;
2257     }
2258 }
2259 #endif
2260 
2261 #ifdef TARGET_SPARC64
2262 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2263                         int insn, int size, int rd)
2264 {
2265     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2266     TCGv_i32 d32;
2267     TCGv_i64 d64;
2268 
2269     switch (da.type) {
2270     case GET_ASI_EXCP:
2271         break;
2272 
2273     case GET_ASI_DIRECT:
2274         gen_address_mask(dc, addr);
2275         switch (size) {
2276         case 4:
2277             d32 = gen_dest_fpr_F(dc);
2278             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2279             gen_store_fpr_F(dc, rd, d32);
2280             break;
2281         case 8:
2282             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2283                                 da.memop | MO_ALIGN_4);
2284             break;
2285         case 16:
2286             d64 = tcg_temp_new_i64();
2287             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2288             tcg_gen_addi_tl(addr, addr, 8);
2289             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2290                                 da.memop | MO_ALIGN_4);
2291             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2292             break;
2293         default:
2294             g_assert_not_reached();
2295         }
2296         break;
2297 
2298     case GET_ASI_BLOCK:
2299         /* Valid for lddfa on aligned registers only.  */
2300         if (size == 8 && (rd & 7) == 0) {
2301             MemOp memop;
2302             TCGv eight;
2303             int i;
2304 
2305             gen_address_mask(dc, addr);
2306 
2307             /* The first operation checks required alignment.  */
2308             memop = da.memop | MO_ALIGN_64;
2309             eight = tcg_constant_tl(8);
2310             for (i = 0; ; ++i) {
2311                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2312                                     da.mem_idx, memop);
2313                 if (i == 7) {
2314                     break;
2315                 }
2316                 tcg_gen_add_tl(addr, addr, eight);
2317                 memop = da.memop;
2318             }
2319         } else {
2320             gen_exception(dc, TT_ILL_INSN);
2321         }
2322         break;
2323 
2324     case GET_ASI_SHORT:
2325         /* Valid for lddfa only.  */
2326         if (size == 8) {
2327             gen_address_mask(dc, addr);
2328             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2329                                 da.memop | MO_ALIGN);
2330         } else {
2331             gen_exception(dc, TT_ILL_INSN);
2332         }
2333         break;
2334 
2335     default:
2336         {
2337             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2338             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2339 
2340             save_state(dc);
2341             /* According to the table in the UA2011 manual, the only
2342                other asis that are valid for ldfa/lddfa/ldqfa are
2343                the NO_FAULT asis.  We still need a helper for these,
2344                but we can just use the integer asi helper for them.  */
2345             switch (size) {
2346             case 4:
2347                 d64 = tcg_temp_new_i64();
2348                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2349                 d32 = gen_dest_fpr_F(dc);
2350                 tcg_gen_extrl_i64_i32(d32, d64);
2351                 gen_store_fpr_F(dc, rd, d32);
2352                 break;
2353             case 8:
2354                 gen_helper_ld_asi(cpu_fpr[rd / 2], tcg_env, addr, r_asi, r_mop);
2355                 break;
2356             case 16:
2357                 d64 = tcg_temp_new_i64();
2358                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2359                 tcg_gen_addi_tl(addr, addr, 8);
2360                 gen_helper_ld_asi(cpu_fpr[rd/2+1], tcg_env, addr, r_asi, r_mop);
2361                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2362                 break;
2363             default:
2364                 g_assert_not_reached();
2365             }
2366         }
2367         break;
2368     }
2369 }
2370 
2371 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2372                         int insn, int size, int rd)
2373 {
2374     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2375     TCGv_i32 d32;
2376 
2377     switch (da.type) {
2378     case GET_ASI_EXCP:
2379         break;
2380 
2381     case GET_ASI_DIRECT:
2382         gen_address_mask(dc, addr);
2383         switch (size) {
2384         case 4:
2385             d32 = gen_load_fpr_F(dc, rd);
2386             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2387             break;
2388         case 8:
2389             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2390                                 da.memop | MO_ALIGN_4);
2391             break;
2392         case 16:
2393             /* Only 4-byte alignment required.  However, it is legal for the
2394                cpu to signal the alignment fault, and the OS trap handler is
2395                required to fix it up.  Requiring 16-byte alignment here avoids
2396                having to probe the second page before performing the first
2397                write.  */
2398             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2399                                 da.memop | MO_ALIGN_16);
2400             tcg_gen_addi_tl(addr, addr, 8);
2401             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2402             break;
2403         default:
2404             g_assert_not_reached();
2405         }
2406         break;
2407 
2408     case GET_ASI_BLOCK:
2409         /* Valid for stdfa on aligned registers only.  */
2410         if (size == 8 && (rd & 7) == 0) {
2411             MemOp memop;
2412             TCGv eight;
2413             int i;
2414 
2415             gen_address_mask(dc, addr);
2416 
2417             /* The first operation checks required alignment.  */
2418             memop = da.memop | MO_ALIGN_64;
2419             eight = tcg_constant_tl(8);
2420             for (i = 0; ; ++i) {
2421                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2422                                     da.mem_idx, memop);
2423                 if (i == 7) {
2424                     break;
2425                 }
2426                 tcg_gen_add_tl(addr, addr, eight);
2427                 memop = da.memop;
2428             }
2429         } else {
2430             gen_exception(dc, TT_ILL_INSN);
2431         }
2432         break;
2433 
2434     case GET_ASI_SHORT:
2435         /* Valid for stdfa only.  */
2436         if (size == 8) {
2437             gen_address_mask(dc, addr);
2438             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2439                                 da.memop | MO_ALIGN);
2440         } else {
2441             gen_exception(dc, TT_ILL_INSN);
2442         }
2443         break;
2444 
2445     default:
2446         /* According to the table in the UA2011 manual, the only
2447            other asis that are valid for ldfa/lddfa/ldqfa are
2448            the PST* asis, which aren't currently handled.  */
2449         gen_exception(dc, TT_ILL_INSN);
2450         break;
2451     }
2452 }
2453 
2454 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2455 {
2456     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2457     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2458     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2459 
2460     switch (da.type) {
2461     case GET_ASI_EXCP:
2462         return;
2463 
2464     case GET_ASI_DTWINX:
2465         gen_address_mask(dc, addr);
2466         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2467         tcg_gen_addi_tl(addr, addr, 8);
2468         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2469         break;
2470 
2471     case GET_ASI_DIRECT:
2472         {
2473             TCGv_i64 tmp = tcg_temp_new_i64();
2474 
2475             gen_address_mask(dc, addr);
2476             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2477 
2478             /* Note that LE ldda acts as if each 32-bit register
2479                result is byte swapped.  Having just performed one
2480                64-bit bswap, we need now to swap the writebacks.  */
2481             if ((da.memop & MO_BSWAP) == MO_TE) {
2482                 tcg_gen_extr32_i64(lo, hi, tmp);
2483             } else {
2484                 tcg_gen_extr32_i64(hi, lo, tmp);
2485             }
2486         }
2487         break;
2488 
2489     default:
2490         /* ??? In theory we've handled all of the ASIs that are valid
2491            for ldda, and this should raise DAE_invalid_asi.  However,
2492            real hardware allows others.  This can be seen with e.g.
2493            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2494         {
2495             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2496             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2497             TCGv_i64 tmp = tcg_temp_new_i64();
2498 
2499             save_state(dc);
2500             gen_helper_ld_asi(tmp, tcg_env, addr, r_asi, r_mop);
2501 
2502             /* See above.  */
2503             if ((da.memop & MO_BSWAP) == MO_TE) {
2504                 tcg_gen_extr32_i64(lo, hi, tmp);
2505             } else {
2506                 tcg_gen_extr32_i64(hi, lo, tmp);
2507             }
2508         }
2509         break;
2510     }
2511 
2512     gen_store_gpr(dc, rd, hi);
2513     gen_store_gpr(dc, rd + 1, lo);
2514 }
2515 
2516 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2517                          int insn, int rd)
2518 {
2519     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2520     TCGv lo = gen_load_gpr(dc, rd + 1);
2521 
2522     switch (da.type) {
2523     case GET_ASI_EXCP:
2524         break;
2525 
2526     case GET_ASI_DTWINX:
2527         gen_address_mask(dc, addr);
2528         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2529         tcg_gen_addi_tl(addr, addr, 8);
2530         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2531         break;
2532 
2533     case GET_ASI_DIRECT:
2534         {
2535             TCGv_i64 t64 = tcg_temp_new_i64();
2536 
2537             /* Note that LE stda acts as if each 32-bit register result is
2538                byte swapped.  We will perform one 64-bit LE store, so now
2539                we must swap the order of the construction.  */
2540             if ((da.memop & MO_BSWAP) == MO_TE) {
2541                 tcg_gen_concat32_i64(t64, lo, hi);
2542             } else {
2543                 tcg_gen_concat32_i64(t64, hi, lo);
2544             }
2545             gen_address_mask(dc, addr);
2546             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2547         }
2548         break;
2549 
2550     default:
2551         /* ??? In theory we've handled all of the ASIs that are valid
2552            for stda, and this should raise DAE_invalid_asi.  */
2553         {
2554             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2555             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2556             TCGv_i64 t64 = tcg_temp_new_i64();
2557 
2558             /* See above.  */
2559             if ((da.memop & MO_BSWAP) == MO_TE) {
2560                 tcg_gen_concat32_i64(t64, lo, hi);
2561             } else {
2562                 tcg_gen_concat32_i64(t64, hi, lo);
2563             }
2564 
2565             save_state(dc);
2566             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2567         }
2568         break;
2569     }
2570 }
2571 
2572 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2573                          int insn, int rd)
2574 {
2575     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2576     TCGv oldv;
2577 
2578     switch (da.type) {
2579     case GET_ASI_EXCP:
2580         return;
2581     case GET_ASI_DIRECT:
2582         oldv = tcg_temp_new();
2583         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2584                                   da.mem_idx, da.memop | MO_ALIGN);
2585         gen_store_gpr(dc, rd, oldv);
2586         break;
2587     default:
2588         /* ??? Should be DAE_invalid_asi.  */
2589         gen_exception(dc, TT_DATA_ACCESS);
2590         break;
2591     }
2592 }
2593 
2594 #elif !defined(CONFIG_USER_ONLY)
2595 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2596 {
2597     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2598        whereby "rd + 1" elicits "error: array subscript is above array".
2599        Since we have already asserted that rd is even, the semantics
2600        are unchanged.  */
2601     TCGv lo = gen_dest_gpr(dc, rd | 1);
2602     TCGv hi = gen_dest_gpr(dc, rd);
2603     TCGv_i64 t64 = tcg_temp_new_i64();
2604     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2605 
2606     switch (da.type) {
2607     case GET_ASI_EXCP:
2608         return;
2609     case GET_ASI_DIRECT:
2610         gen_address_mask(dc, addr);
2611         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2612         break;
2613     default:
2614         {
2615             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2616             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2617 
2618             save_state(dc);
2619             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2620         }
2621         break;
2622     }
2623 
2624     tcg_gen_extr_i64_i32(lo, hi, t64);
2625     gen_store_gpr(dc, rd | 1, lo);
2626     gen_store_gpr(dc, rd, hi);
2627 }
2628 
2629 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2630                          int insn, int rd)
2631 {
2632     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2633     TCGv lo = gen_load_gpr(dc, rd + 1);
2634     TCGv_i64 t64 = tcg_temp_new_i64();
2635 
2636     tcg_gen_concat_tl_i64(t64, lo, hi);
2637 
2638     switch (da.type) {
2639     case GET_ASI_EXCP:
2640         break;
2641     case GET_ASI_DIRECT:
2642         gen_address_mask(dc, addr);
2643         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2644         break;
2645     case GET_ASI_BFILL:
2646         /* Store 32 bytes of T64 to ADDR.  */
2647         /* ??? The original qemu code suggests 8-byte alignment, dropping
2648            the low bits, but the only place I can see this used is in the
2649            Linux kernel with 32 byte alignment, which would make more sense
2650            as a cacheline-style operation.  */
2651         {
2652             TCGv d_addr = tcg_temp_new();
2653             TCGv eight = tcg_constant_tl(8);
2654             int i;
2655 
2656             tcg_gen_andi_tl(d_addr, addr, -8);
2657             for (i = 0; i < 32; i += 8) {
2658                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2659                 tcg_gen_add_tl(d_addr, d_addr, eight);
2660             }
2661         }
2662         break;
2663     default:
2664         {
2665             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2666             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2667 
2668             save_state(dc);
2669             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2670         }
2671         break;
2672     }
2673 }
2674 #endif
2675 
2676 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2677 {
2678     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2679     return gen_load_gpr(dc, rs1);
2680 }
2681 
2682 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2683 {
2684     if (IS_IMM) { /* immediate */
2685         target_long simm = GET_FIELDs(insn, 19, 31);
2686         TCGv t = tcg_temp_new();
2687         tcg_gen_movi_tl(t, simm);
2688         return t;
2689     } else {      /* register */
2690         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2691         return gen_load_gpr(dc, rs2);
2692     }
2693 }
2694 
2695 #ifdef TARGET_SPARC64
2696 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2697 {
2698     TCGv_i32 c32, zero, dst, s1, s2;
2699 
2700     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2701        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2702        the later.  */
2703     c32 = tcg_temp_new_i32();
2704     if (cmp->is_bool) {
2705         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2706     } else {
2707         TCGv_i64 c64 = tcg_temp_new_i64();
2708         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2709         tcg_gen_extrl_i64_i32(c32, c64);
2710     }
2711 
2712     s1 = gen_load_fpr_F(dc, rs);
2713     s2 = gen_load_fpr_F(dc, rd);
2714     dst = gen_dest_fpr_F(dc);
2715     zero = tcg_constant_i32(0);
2716 
2717     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2718 
2719     gen_store_fpr_F(dc, rd, dst);
2720 }
2721 
2722 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2723 {
2724     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2725     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2726                         gen_load_fpr_D(dc, rs),
2727                         gen_load_fpr_D(dc, rd));
2728     gen_store_fpr_D(dc, rd, dst);
2729 }
2730 
2731 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2732 {
2733     int qd = QFPREG(rd);
2734     int qs = QFPREG(rs);
2735 
2736     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2737                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2738     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2739                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2740 
2741     gen_update_fprs_dirty(dc, qd);
2742 }
2743 
2744 #ifndef CONFIG_USER_ONLY
2745 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env tcg_env)
2746 {
2747     TCGv_i32 r_tl = tcg_temp_new_i32();
2748 
2749     /* load env->tl into r_tl */
2750     tcg_gen_ld_i32(r_tl, tcg_env, offsetof(CPUSPARCState, tl));
2751 
2752     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2753     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2754 
2755     /* calculate offset to current trap state from env->ts, reuse r_tl */
2756     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2757     tcg_gen_addi_ptr(r_tsptr, tcg_env, offsetof(CPUSPARCState, ts));
2758 
2759     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2760     {
2761         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2762         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2763         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2764     }
2765 }
2766 #endif
2767 
2768 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2769                      int width, bool cc, bool left)
2770 {
2771     TCGv lo1, lo2;
2772     uint64_t amask, tabl, tabr;
2773     int shift, imask, omask;
2774 
2775     if (cc) {
2776         tcg_gen_mov_tl(cpu_cc_src, s1);
2777         tcg_gen_mov_tl(cpu_cc_src2, s2);
2778         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2779         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2780         dc->cc_op = CC_OP_SUB;
2781     }
2782 
2783     /* Theory of operation: there are two tables, left and right (not to
2784        be confused with the left and right versions of the opcode).  These
2785        are indexed by the low 3 bits of the inputs.  To make things "easy",
2786        these tables are loaded into two constants, TABL and TABR below.
2787        The operation index = (input & imask) << shift calculates the index
2788        into the constant, while val = (table >> index) & omask calculates
2789        the value we're looking for.  */
2790     switch (width) {
2791     case 8:
2792         imask = 0x7;
2793         shift = 3;
2794         omask = 0xff;
2795         if (left) {
2796             tabl = 0x80c0e0f0f8fcfeffULL;
2797             tabr = 0xff7f3f1f0f070301ULL;
2798         } else {
2799             tabl = 0x0103070f1f3f7fffULL;
2800             tabr = 0xfffefcf8f0e0c080ULL;
2801         }
2802         break;
2803     case 16:
2804         imask = 0x6;
2805         shift = 1;
2806         omask = 0xf;
2807         if (left) {
2808             tabl = 0x8cef;
2809             tabr = 0xf731;
2810         } else {
2811             tabl = 0x137f;
2812             tabr = 0xfec8;
2813         }
2814         break;
2815     case 32:
2816         imask = 0x4;
2817         shift = 0;
2818         omask = 0x3;
2819         if (left) {
2820             tabl = (2 << 2) | 3;
2821             tabr = (3 << 2) | 1;
2822         } else {
2823             tabl = (1 << 2) | 3;
2824             tabr = (3 << 2) | 2;
2825         }
2826         break;
2827     default:
2828         abort();
2829     }
2830 
2831     lo1 = tcg_temp_new();
2832     lo2 = tcg_temp_new();
2833     tcg_gen_andi_tl(lo1, s1, imask);
2834     tcg_gen_andi_tl(lo2, s2, imask);
2835     tcg_gen_shli_tl(lo1, lo1, shift);
2836     tcg_gen_shli_tl(lo2, lo2, shift);
2837 
2838     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2839     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2840     tcg_gen_andi_tl(lo1, lo1, omask);
2841     tcg_gen_andi_tl(lo2, lo2, omask);
2842 
2843     amask = -8;
2844     if (AM_CHECK(dc)) {
2845         amask &= 0xffffffffULL;
2846     }
2847     tcg_gen_andi_tl(s1, s1, amask);
2848     tcg_gen_andi_tl(s2, s2, amask);
2849 
2850     /* Compute dst = (s1 == s2 ? lo1 : lo1 & lo2). */
2851     tcg_gen_and_tl(lo2, lo2, lo1);
2852     tcg_gen_movcond_tl(TCG_COND_EQ, dst, s1, s2, lo1, lo2);
2853 }
2854 
2855 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2856 {
2857     TCGv tmp = tcg_temp_new();
2858 
2859     tcg_gen_add_tl(tmp, s1, s2);
2860     tcg_gen_andi_tl(dst, tmp, -8);
2861     if (left) {
2862         tcg_gen_neg_tl(tmp, tmp);
2863     }
2864     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2865 }
2866 
2867 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2868 {
2869     TCGv t1, t2, shift;
2870 
2871     t1 = tcg_temp_new();
2872     t2 = tcg_temp_new();
2873     shift = tcg_temp_new();
2874 
2875     tcg_gen_andi_tl(shift, gsr, 7);
2876     tcg_gen_shli_tl(shift, shift, 3);
2877     tcg_gen_shl_tl(t1, s1, shift);
2878 
2879     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2880        shift of (up to 63) followed by a constant shift of 1.  */
2881     tcg_gen_xori_tl(shift, shift, 63);
2882     tcg_gen_shr_tl(t2, s2, shift);
2883     tcg_gen_shri_tl(t2, t2, 1);
2884 
2885     tcg_gen_or_tl(dst, t1, t2);
2886 }
2887 #endif
2888 
2889 /* Include the auto-generated decoder.  */
2890 #include "decode-insns.c.inc"
2891 
2892 #define TRANS(NAME, AVAIL, FUNC, ...) \
2893     static bool trans_##NAME(DisasContext *dc, arg_##NAME *a) \
2894     { return avail_##AVAIL(dc) && FUNC(dc, __VA_ARGS__); }
2895 
2896 #define avail_ALL(C)      true
2897 #ifdef TARGET_SPARC64
2898 # define avail_32(C)      false
2899 # define avail_64(C)      true
2900 #else
2901 # define avail_32(C)      true
2902 # define avail_64(C)      false
2903 #endif
2904 
2905 /* Default case for non jump instructions. */
2906 static bool advance_pc(DisasContext *dc)
2907 {
2908     if (dc->npc & 3) {
2909         switch (dc->npc) {
2910         case DYNAMIC_PC:
2911         case DYNAMIC_PC_LOOKUP:
2912             dc->pc = dc->npc;
2913             gen_op_next_insn();
2914             break;
2915         case JUMP_PC:
2916             /* we can do a static jump */
2917             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
2918             dc->base.is_jmp = DISAS_NORETURN;
2919             break;
2920         default:
2921             g_assert_not_reached();
2922         }
2923     } else {
2924         dc->pc = dc->npc;
2925         dc->npc = dc->npc + 4;
2926     }
2927     return true;
2928 }
2929 
2930 static bool advance_jump_uncond_never(DisasContext *dc, bool annul)
2931 {
2932     if (annul) {
2933         dc->pc = dc->npc + 4;
2934         dc->npc = dc->pc + 4;
2935     } else {
2936         dc->pc = dc->npc;
2937         dc->npc = dc->pc + 4;
2938     }
2939     return true;
2940 }
2941 
2942 static bool advance_jump_uncond_always(DisasContext *dc, bool annul,
2943                                        target_ulong dest)
2944 {
2945     if (annul) {
2946         dc->pc = dest;
2947         dc->npc = dest + 4;
2948     } else {
2949         dc->pc = dc->npc;
2950         dc->npc = dest;
2951         tcg_gen_mov_tl(cpu_pc, cpu_npc);
2952     }
2953     return true;
2954 }
2955 
2956 static bool advance_jump_cond(DisasContext *dc, bool annul, target_ulong dest)
2957 {
2958     if (annul) {
2959         gen_branch_a(dc, dest);
2960     } else {
2961         gen_branch_n(dc, dest);
2962     }
2963     return true;
2964 }
2965 
2966 static bool do_bpcc(DisasContext *dc, arg_bcc *a)
2967 {
2968     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2969     DisasCompare cmp;
2970 
2971     switch (a->cond) {
2972     case 0x0:
2973         return advance_jump_uncond_never(dc, a->a);
2974     case 0x8:
2975         return advance_jump_uncond_always(dc, a->a, target);
2976     default:
2977         flush_cond(dc);
2978 
2979         gen_compare(&cmp, a->cc, a->cond, dc);
2980         if (cmp.is_bool) {
2981             tcg_gen_mov_tl(cpu_cond, cmp.c1);
2982         } else {
2983             tcg_gen_setcond_tl(cmp.cond, cpu_cond, cmp.c1, cmp.c2);
2984         }
2985         return advance_jump_cond(dc, a->a, target);
2986     }
2987 }
2988 
2989 TRANS(Bicc, ALL, do_bpcc, a)
2990 TRANS(BPcc,  64, do_bpcc, a)
2991 
2992 static bool do_fbpfcc(DisasContext *dc, arg_bcc *a)
2993 {
2994     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2995 
2996     if (gen_trap_ifnofpu(dc)) {
2997         return true;
2998     }
2999     switch (a->cond) {
3000     case 0x0:
3001         return advance_jump_uncond_never(dc, a->a);
3002     case 0x8:
3003         return advance_jump_uncond_always(dc, a->a, target);
3004     default:
3005         flush_cond(dc);
3006         gen_fcond(cpu_cond, a->cc, a->cond);
3007         return advance_jump_cond(dc, a->a, target);
3008     }
3009 }
3010 
3011 TRANS(FBPfcc,  64, do_fbpfcc, a)
3012 TRANS(FBfcc,  ALL, do_fbpfcc, a)
3013 
3014 static bool trans_BPr(DisasContext *dc, arg_BPr *a)
3015 {
3016     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3017     DisasCompare cmp;
3018 
3019     if (!avail_64(dc)) {
3020         return false;
3021     }
3022     if (gen_tcg_cond_reg[a->cond] == TCG_COND_NEVER) {
3023         return false;
3024     }
3025 
3026     flush_cond(dc);
3027     gen_compare_reg(&cmp, a->cond, gen_load_gpr(dc, a->rs1));
3028     tcg_gen_setcond_tl(cmp.cond, cpu_cond, cmp.c1, cmp.c2);
3029     return advance_jump_cond(dc, a->a, target);
3030 }
3031 
3032 static bool trans_CALL(DisasContext *dc, arg_CALL *a)
3033 {
3034     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3035 
3036     gen_store_gpr(dc, 15, tcg_constant_tl(dc->pc));
3037     gen_mov_pc_npc(dc);
3038     dc->npc = target;
3039     return true;
3040 }
3041 
3042 static bool trans_NCP(DisasContext *dc, arg_NCP *a)
3043 {
3044     /*
3045      * For sparc32, always generate the no-coprocessor exception.
3046      * For sparc64, always generate illegal instruction.
3047      */
3048 #ifdef TARGET_SPARC64
3049     return false;
3050 #else
3051     gen_exception(dc, TT_NCP_INSN);
3052     return true;
3053 #endif
3054 }
3055 
3056 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3057     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3058         goto illegal_insn;
3059 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3060     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3061         goto nfpu_insn;
3062 
3063 /* before an instruction, dc->pc must be static */
3064 static void disas_sparc_legacy(DisasContext *dc, unsigned int insn)
3065 {
3066     unsigned int opc, rs1, rs2, rd;
3067     TCGv cpu_src1, cpu_src2;
3068     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3069     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3070     target_long simm;
3071 
3072     opc = GET_FIELD(insn, 0, 1);
3073     rd = GET_FIELD(insn, 2, 6);
3074 
3075     switch (opc) {
3076     case 0:                     /* branches/sethi */
3077         {
3078             unsigned int xop = GET_FIELD(insn, 7, 9);
3079             switch (xop) {
3080 #ifdef TARGET_SPARC64
3081             case 0x1:           /* V9 BPcc */
3082                 g_assert_not_reached(); /* in decodetree */
3083             case 0x3:           /* V9 BPr */
3084                 g_assert_not_reached(); /* in decodetree */
3085             case 0x5:           /* V9 FBPcc */
3086                 g_assert_not_reached(); /* in decodetree */
3087 #else
3088             case 0x7:           /* CBN+x */
3089                 g_assert_not_reached(); /* in decodetree */
3090 #endif
3091             case 0x2:           /* BN+x */
3092                 g_assert_not_reached(); /* in decodetree */
3093             case 0x6:           /* FBN+x */
3094                 g_assert_not_reached(); /* in decodetree */
3095             case 0x4:           /* SETHI */
3096                 /* Special-case %g0 because that's the canonical nop.  */
3097                 if (rd) {
3098                     uint32_t value = GET_FIELD(insn, 10, 31);
3099                     TCGv t = gen_dest_gpr(dc, rd);
3100                     tcg_gen_movi_tl(t, value << 10);
3101                     gen_store_gpr(dc, rd, t);
3102                 }
3103                 break;
3104             case 0x0:           /* UNIMPL */
3105             default:
3106                 goto illegal_insn;
3107             }
3108             break;
3109         }
3110         break;
3111     case 1:
3112         g_assert_not_reached(); /* in decodetree */
3113     case 2:                     /* FPU & Logical Operations */
3114         {
3115             unsigned int xop = GET_FIELD(insn, 7, 12);
3116             TCGv cpu_dst = tcg_temp_new();
3117             TCGv cpu_tmp0;
3118 
3119             if (xop == 0x3a) {  /* generate trap */
3120                 int cond = GET_FIELD(insn, 3, 6);
3121                 TCGv_i32 trap;
3122                 TCGLabel *l1 = NULL;
3123                 int mask;
3124 
3125                 if (cond == 0) {
3126                     /* Trap never.  */
3127                     break;
3128                 }
3129 
3130                 save_state(dc);
3131 
3132                 if (cond != 8) {
3133                     /* Conditional trap.  */
3134                     DisasCompare cmp;
3135 #ifdef TARGET_SPARC64
3136                     /* V9 icc/xcc */
3137                     int cc = GET_FIELD_SP(insn, 11, 12);
3138                     if (cc == 0) {
3139                         gen_compare(&cmp, 0, cond, dc);
3140                     } else if (cc == 2) {
3141                         gen_compare(&cmp, 1, cond, dc);
3142                     } else {
3143                         goto illegal_insn;
3144                     }
3145 #else
3146                     gen_compare(&cmp, 0, cond, dc);
3147 #endif
3148                     l1 = gen_new_label();
3149                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3150                                       cmp.c1, cmp.c2, l1);
3151                 }
3152 
3153                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3154                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3155 
3156                 /* Don't use the normal temporaries, as they may well have
3157                    gone out of scope with the branch above.  While we're
3158                    doing that we might as well pre-truncate to 32-bit.  */
3159                 trap = tcg_temp_new_i32();
3160 
3161                 rs1 = GET_FIELD_SP(insn, 14, 18);
3162                 if (IS_IMM) {
3163                     rs2 = GET_FIELD_SP(insn, 0, 7);
3164                     if (rs1 == 0) {
3165                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3166                         /* Signal that the trap value is fully constant.  */
3167                         mask = 0;
3168                     } else {
3169                         TCGv t1 = gen_load_gpr(dc, rs1);
3170                         tcg_gen_trunc_tl_i32(trap, t1);
3171                         tcg_gen_addi_i32(trap, trap, rs2);
3172                     }
3173                 } else {
3174                     TCGv t1, t2;
3175                     rs2 = GET_FIELD_SP(insn, 0, 4);
3176                     t1 = gen_load_gpr(dc, rs1);
3177                     t2 = gen_load_gpr(dc, rs2);
3178                     tcg_gen_add_tl(t1, t1, t2);
3179                     tcg_gen_trunc_tl_i32(trap, t1);
3180                 }
3181                 if (mask != 0) {
3182                     tcg_gen_andi_i32(trap, trap, mask);
3183                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3184                 }
3185 
3186                 gen_helper_raise_exception(tcg_env, trap);
3187 
3188                 if (cond == 8) {
3189                     /* An unconditional trap ends the TB.  */
3190                     dc->base.is_jmp = DISAS_NORETURN;
3191                     goto jmp_insn;
3192                 } else {
3193                     /* A conditional trap falls through to the next insn.  */
3194                     gen_set_label(l1);
3195                     break;
3196                 }
3197             } else if (xop == 0x28) {
3198                 rs1 = GET_FIELD(insn, 13, 17);
3199                 switch(rs1) {
3200                 case 0: /* rdy */
3201 #ifndef TARGET_SPARC64
3202                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3203                                        manual, rdy on the microSPARC
3204                                        II */
3205                 case 0x0f:          /* stbar in the SPARCv8 manual,
3206                                        rdy on the microSPARC II */
3207                 case 0x10 ... 0x1f: /* implementation-dependent in the
3208                                        SPARCv8 manual, rdy on the
3209                                        microSPARC II */
3210                     /* Read Asr17 */
3211                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3212                         TCGv t = gen_dest_gpr(dc, rd);
3213                         /* Read Asr17 for a Leon3 monoprocessor */
3214                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3215                         gen_store_gpr(dc, rd, t);
3216                         break;
3217                     }
3218 #endif
3219                     gen_store_gpr(dc, rd, cpu_y);
3220                     break;
3221 #ifdef TARGET_SPARC64
3222                 case 0x2: /* V9 rdccr */
3223                     update_psr(dc);
3224                     gen_helper_rdccr(cpu_dst, tcg_env);
3225                     gen_store_gpr(dc, rd, cpu_dst);
3226                     break;
3227                 case 0x3: /* V9 rdasi */
3228                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3229                     gen_store_gpr(dc, rd, cpu_dst);
3230                     break;
3231                 case 0x4: /* V9 rdtick */
3232                     {
3233                         TCGv_ptr r_tickptr;
3234                         TCGv_i32 r_const;
3235 
3236                         r_tickptr = tcg_temp_new_ptr();
3237                         r_const = tcg_constant_i32(dc->mem_idx);
3238                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3239                                        offsetof(CPUSPARCState, tick));
3240                         if (translator_io_start(&dc->base)) {
3241                             dc->base.is_jmp = DISAS_EXIT;
3242                         }
3243                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3244                                                   r_const);
3245                         gen_store_gpr(dc, rd, cpu_dst);
3246                     }
3247                     break;
3248                 case 0x5: /* V9 rdpc */
3249                     {
3250                         TCGv t = gen_dest_gpr(dc, rd);
3251                         if (unlikely(AM_CHECK(dc))) {
3252                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3253                         } else {
3254                             tcg_gen_movi_tl(t, dc->pc);
3255                         }
3256                         gen_store_gpr(dc, rd, t);
3257                     }
3258                     break;
3259                 case 0x6: /* V9 rdfprs */
3260                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3261                     gen_store_gpr(dc, rd, cpu_dst);
3262                     break;
3263                 case 0xf: /* V9 membar */
3264                     break; /* no effect */
3265                 case 0x13: /* Graphics Status */
3266                     if (gen_trap_ifnofpu(dc)) {
3267                         goto jmp_insn;
3268                     }
3269                     gen_store_gpr(dc, rd, cpu_gsr);
3270                     break;
3271                 case 0x16: /* Softint */
3272                     tcg_gen_ld32s_tl(cpu_dst, tcg_env,
3273                                      offsetof(CPUSPARCState, softint));
3274                     gen_store_gpr(dc, rd, cpu_dst);
3275                     break;
3276                 case 0x17: /* Tick compare */
3277                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3278                     break;
3279                 case 0x18: /* System tick */
3280                     {
3281                         TCGv_ptr r_tickptr;
3282                         TCGv_i32 r_const;
3283 
3284                         r_tickptr = tcg_temp_new_ptr();
3285                         r_const = tcg_constant_i32(dc->mem_idx);
3286                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3287                                        offsetof(CPUSPARCState, stick));
3288                         if (translator_io_start(&dc->base)) {
3289                             dc->base.is_jmp = DISAS_EXIT;
3290                         }
3291                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3292                                                   r_const);
3293                         gen_store_gpr(dc, rd, cpu_dst);
3294                     }
3295                     break;
3296                 case 0x19: /* System tick compare */
3297                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3298                     break;
3299                 case 0x1a: /* UltraSPARC-T1 Strand status */
3300                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3301                      * this ASR as impl. dep
3302                      */
3303                     CHECK_IU_FEATURE(dc, HYPV);
3304                     {
3305                         TCGv t = gen_dest_gpr(dc, rd);
3306                         tcg_gen_movi_tl(t, 1UL);
3307                         gen_store_gpr(dc, rd, t);
3308                     }
3309                     break;
3310                 case 0x10: /* Performance Control */
3311                 case 0x11: /* Performance Instrumentation Counter */
3312                 case 0x12: /* Dispatch Control */
3313                 case 0x14: /* Softint set, WO */
3314                 case 0x15: /* Softint clear, WO */
3315 #endif
3316                 default:
3317                     goto illegal_insn;
3318                 }
3319 #if !defined(CONFIG_USER_ONLY)
3320             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3321 #ifndef TARGET_SPARC64
3322                 if (!supervisor(dc)) {
3323                     goto priv_insn;
3324                 }
3325                 update_psr(dc);
3326                 gen_helper_rdpsr(cpu_dst, tcg_env);
3327 #else
3328                 CHECK_IU_FEATURE(dc, HYPV);
3329                 if (!hypervisor(dc))
3330                     goto priv_insn;
3331                 rs1 = GET_FIELD(insn, 13, 17);
3332                 switch (rs1) {
3333                 case 0: // hpstate
3334                     tcg_gen_ld_i64(cpu_dst, tcg_env,
3335                                    offsetof(CPUSPARCState, hpstate));
3336                     break;
3337                 case 1: // htstate
3338                     // gen_op_rdhtstate();
3339                     break;
3340                 case 3: // hintp
3341                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3342                     break;
3343                 case 5: // htba
3344                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3345                     break;
3346                 case 6: // hver
3347                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3348                     break;
3349                 case 31: // hstick_cmpr
3350                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3351                     break;
3352                 default:
3353                     goto illegal_insn;
3354                 }
3355 #endif
3356                 gen_store_gpr(dc, rd, cpu_dst);
3357                 break;
3358             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3359                 if (!supervisor(dc)) {
3360                     goto priv_insn;
3361                 }
3362                 cpu_tmp0 = tcg_temp_new();
3363 #ifdef TARGET_SPARC64
3364                 rs1 = GET_FIELD(insn, 13, 17);
3365                 switch (rs1) {
3366                 case 0: // tpc
3367                     {
3368                         TCGv_ptr r_tsptr;
3369 
3370                         r_tsptr = tcg_temp_new_ptr();
3371                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3372                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3373                                       offsetof(trap_state, tpc));
3374                     }
3375                     break;
3376                 case 1: // tnpc
3377                     {
3378                         TCGv_ptr r_tsptr;
3379 
3380                         r_tsptr = tcg_temp_new_ptr();
3381                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3382                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3383                                       offsetof(trap_state, tnpc));
3384                     }
3385                     break;
3386                 case 2: // tstate
3387                     {
3388                         TCGv_ptr r_tsptr;
3389 
3390                         r_tsptr = tcg_temp_new_ptr();
3391                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3392                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3393                                       offsetof(trap_state, tstate));
3394                     }
3395                     break;
3396                 case 3: // tt
3397                     {
3398                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3399 
3400                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3401                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3402                                          offsetof(trap_state, tt));
3403                     }
3404                     break;
3405                 case 4: // tick
3406                     {
3407                         TCGv_ptr r_tickptr;
3408                         TCGv_i32 r_const;
3409 
3410                         r_tickptr = tcg_temp_new_ptr();
3411                         r_const = tcg_constant_i32(dc->mem_idx);
3412                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3413                                        offsetof(CPUSPARCState, tick));
3414                         if (translator_io_start(&dc->base)) {
3415                             dc->base.is_jmp = DISAS_EXIT;
3416                         }
3417                         gen_helper_tick_get_count(cpu_tmp0, tcg_env,
3418                                                   r_tickptr, r_const);
3419                     }
3420                     break;
3421                 case 5: // tba
3422                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3423                     break;
3424                 case 6: // pstate
3425                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3426                                      offsetof(CPUSPARCState, pstate));
3427                     break;
3428                 case 7: // tl
3429                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3430                                      offsetof(CPUSPARCState, tl));
3431                     break;
3432                 case 8: // pil
3433                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3434                                      offsetof(CPUSPARCState, psrpil));
3435                     break;
3436                 case 9: // cwp
3437                     gen_helper_rdcwp(cpu_tmp0, tcg_env);
3438                     break;
3439                 case 10: // cansave
3440                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3441                                      offsetof(CPUSPARCState, cansave));
3442                     break;
3443                 case 11: // canrestore
3444                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3445                                      offsetof(CPUSPARCState, canrestore));
3446                     break;
3447                 case 12: // cleanwin
3448                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3449                                      offsetof(CPUSPARCState, cleanwin));
3450                     break;
3451                 case 13: // otherwin
3452                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3453                                      offsetof(CPUSPARCState, otherwin));
3454                     break;
3455                 case 14: // wstate
3456                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3457                                      offsetof(CPUSPARCState, wstate));
3458                     break;
3459                 case 16: // UA2005 gl
3460                     CHECK_IU_FEATURE(dc, GL);
3461                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3462                                      offsetof(CPUSPARCState, gl));
3463                     break;
3464                 case 26: // UA2005 strand status
3465                     CHECK_IU_FEATURE(dc, HYPV);
3466                     if (!hypervisor(dc))
3467                         goto priv_insn;
3468                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3469                     break;
3470                 case 31: // ver
3471                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3472                     break;
3473                 case 15: // fq
3474                 default:
3475                     goto illegal_insn;
3476                 }
3477 #else
3478                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3479 #endif
3480                 gen_store_gpr(dc, rd, cpu_tmp0);
3481                 break;
3482 #endif
3483 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3484             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3485 #ifdef TARGET_SPARC64
3486                 gen_helper_flushw(tcg_env);
3487 #else
3488                 if (!supervisor(dc))
3489                     goto priv_insn;
3490                 gen_store_gpr(dc, rd, cpu_tbr);
3491 #endif
3492                 break;
3493 #endif
3494             } else if (xop == 0x34) {   /* FPU Operations */
3495                 if (gen_trap_ifnofpu(dc)) {
3496                     goto jmp_insn;
3497                 }
3498                 gen_op_clear_ieee_excp_and_FTT();
3499                 rs1 = GET_FIELD(insn, 13, 17);
3500                 rs2 = GET_FIELD(insn, 27, 31);
3501                 xop = GET_FIELD(insn, 18, 26);
3502 
3503                 switch (xop) {
3504                 case 0x1: /* fmovs */
3505                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3506                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3507                     break;
3508                 case 0x5: /* fnegs */
3509                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3510                     break;
3511                 case 0x9: /* fabss */
3512                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3513                     break;
3514                 case 0x29: /* fsqrts */
3515                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3516                     break;
3517                 case 0x2a: /* fsqrtd */
3518                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3519                     break;
3520                 case 0x2b: /* fsqrtq */
3521                     CHECK_FPU_FEATURE(dc, FLOAT128);
3522                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3523                     break;
3524                 case 0x41: /* fadds */
3525                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3526                     break;
3527                 case 0x42: /* faddd */
3528                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3529                     break;
3530                 case 0x43: /* faddq */
3531                     CHECK_FPU_FEATURE(dc, FLOAT128);
3532                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3533                     break;
3534                 case 0x45: /* fsubs */
3535                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3536                     break;
3537                 case 0x46: /* fsubd */
3538                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3539                     break;
3540                 case 0x47: /* fsubq */
3541                     CHECK_FPU_FEATURE(dc, FLOAT128);
3542                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3543                     break;
3544                 case 0x49: /* fmuls */
3545                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3546                     break;
3547                 case 0x4a: /* fmuld */
3548                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3549                     break;
3550                 case 0x4b: /* fmulq */
3551                     CHECK_FPU_FEATURE(dc, FLOAT128);
3552                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3553                     break;
3554                 case 0x4d: /* fdivs */
3555                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3556                     break;
3557                 case 0x4e: /* fdivd */
3558                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3559                     break;
3560                 case 0x4f: /* fdivq */
3561                     CHECK_FPU_FEATURE(dc, FLOAT128);
3562                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3563                     break;
3564                 case 0x69: /* fsmuld */
3565                     CHECK_FPU_FEATURE(dc, FSMULD);
3566                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3567                     break;
3568                 case 0x6e: /* fdmulq */
3569                     CHECK_FPU_FEATURE(dc, FLOAT128);
3570                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3571                     break;
3572                 case 0xc4: /* fitos */
3573                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3574                     break;
3575                 case 0xc6: /* fdtos */
3576                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3577                     break;
3578                 case 0xc7: /* fqtos */
3579                     CHECK_FPU_FEATURE(dc, FLOAT128);
3580                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3581                     break;
3582                 case 0xc8: /* fitod */
3583                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3584                     break;
3585                 case 0xc9: /* fstod */
3586                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3587                     break;
3588                 case 0xcb: /* fqtod */
3589                     CHECK_FPU_FEATURE(dc, FLOAT128);
3590                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3591                     break;
3592                 case 0xcc: /* fitoq */
3593                     CHECK_FPU_FEATURE(dc, FLOAT128);
3594                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3595                     break;
3596                 case 0xcd: /* fstoq */
3597                     CHECK_FPU_FEATURE(dc, FLOAT128);
3598                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3599                     break;
3600                 case 0xce: /* fdtoq */
3601                     CHECK_FPU_FEATURE(dc, FLOAT128);
3602                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3603                     break;
3604                 case 0xd1: /* fstoi */
3605                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3606                     break;
3607                 case 0xd2: /* fdtoi */
3608                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3609                     break;
3610                 case 0xd3: /* fqtoi */
3611                     CHECK_FPU_FEATURE(dc, FLOAT128);
3612                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3613                     break;
3614 #ifdef TARGET_SPARC64
3615                 case 0x2: /* V9 fmovd */
3616                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3617                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3618                     break;
3619                 case 0x3: /* V9 fmovq */
3620                     CHECK_FPU_FEATURE(dc, FLOAT128);
3621                     gen_move_Q(dc, rd, rs2);
3622                     break;
3623                 case 0x6: /* V9 fnegd */
3624                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3625                     break;
3626                 case 0x7: /* V9 fnegq */
3627                     CHECK_FPU_FEATURE(dc, FLOAT128);
3628                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3629                     break;
3630                 case 0xa: /* V9 fabsd */
3631                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3632                     break;
3633                 case 0xb: /* V9 fabsq */
3634                     CHECK_FPU_FEATURE(dc, FLOAT128);
3635                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3636                     break;
3637                 case 0x81: /* V9 fstox */
3638                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3639                     break;
3640                 case 0x82: /* V9 fdtox */
3641                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3642                     break;
3643                 case 0x83: /* V9 fqtox */
3644                     CHECK_FPU_FEATURE(dc, FLOAT128);
3645                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3646                     break;
3647                 case 0x84: /* V9 fxtos */
3648                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3649                     break;
3650                 case 0x88: /* V9 fxtod */
3651                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3652                     break;
3653                 case 0x8c: /* V9 fxtoq */
3654                     CHECK_FPU_FEATURE(dc, FLOAT128);
3655                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3656                     break;
3657 #endif
3658                 default:
3659                     goto illegal_insn;
3660                 }
3661             } else if (xop == 0x35) {   /* FPU Operations */
3662 #ifdef TARGET_SPARC64
3663                 int cond;
3664 #endif
3665                 if (gen_trap_ifnofpu(dc)) {
3666                     goto jmp_insn;
3667                 }
3668                 gen_op_clear_ieee_excp_and_FTT();
3669                 rs1 = GET_FIELD(insn, 13, 17);
3670                 rs2 = GET_FIELD(insn, 27, 31);
3671                 xop = GET_FIELD(insn, 18, 26);
3672 
3673 #ifdef TARGET_SPARC64
3674 #define FMOVR(sz)                                                  \
3675                 do {                                               \
3676                     DisasCompare cmp;                              \
3677                     cond = GET_FIELD_SP(insn, 10, 12);             \
3678                     cpu_src1 = get_src1(dc, insn);                 \
3679                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3680                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3681                 } while (0)
3682 
3683                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3684                     FMOVR(s);
3685                     break;
3686                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3687                     FMOVR(d);
3688                     break;
3689                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3690                     CHECK_FPU_FEATURE(dc, FLOAT128);
3691                     FMOVR(q);
3692                     break;
3693                 }
3694 #undef FMOVR
3695 #endif
3696                 switch (xop) {
3697 #ifdef TARGET_SPARC64
3698 #define FMOVCC(fcc, sz)                                                 \
3699                     do {                                                \
3700                         DisasCompare cmp;                               \
3701                         cond = GET_FIELD_SP(insn, 14, 17);              \
3702                         gen_fcompare(&cmp, fcc, cond);                  \
3703                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3704                     } while (0)
3705 
3706                     case 0x001: /* V9 fmovscc %fcc0 */
3707                         FMOVCC(0, s);
3708                         break;
3709                     case 0x002: /* V9 fmovdcc %fcc0 */
3710                         FMOVCC(0, d);
3711                         break;
3712                     case 0x003: /* V9 fmovqcc %fcc0 */
3713                         CHECK_FPU_FEATURE(dc, FLOAT128);
3714                         FMOVCC(0, q);
3715                         break;
3716                     case 0x041: /* V9 fmovscc %fcc1 */
3717                         FMOVCC(1, s);
3718                         break;
3719                     case 0x042: /* V9 fmovdcc %fcc1 */
3720                         FMOVCC(1, d);
3721                         break;
3722                     case 0x043: /* V9 fmovqcc %fcc1 */
3723                         CHECK_FPU_FEATURE(dc, FLOAT128);
3724                         FMOVCC(1, q);
3725                         break;
3726                     case 0x081: /* V9 fmovscc %fcc2 */
3727                         FMOVCC(2, s);
3728                         break;
3729                     case 0x082: /* V9 fmovdcc %fcc2 */
3730                         FMOVCC(2, d);
3731                         break;
3732                     case 0x083: /* V9 fmovqcc %fcc2 */
3733                         CHECK_FPU_FEATURE(dc, FLOAT128);
3734                         FMOVCC(2, q);
3735                         break;
3736                     case 0x0c1: /* V9 fmovscc %fcc3 */
3737                         FMOVCC(3, s);
3738                         break;
3739                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3740                         FMOVCC(3, d);
3741                         break;
3742                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3743                         CHECK_FPU_FEATURE(dc, FLOAT128);
3744                         FMOVCC(3, q);
3745                         break;
3746 #undef FMOVCC
3747 #define FMOVCC(xcc, sz)                                                 \
3748                     do {                                                \
3749                         DisasCompare cmp;                               \
3750                         cond = GET_FIELD_SP(insn, 14, 17);              \
3751                         gen_compare(&cmp, xcc, cond, dc);               \
3752                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3753                     } while (0)
3754 
3755                     case 0x101: /* V9 fmovscc %icc */
3756                         FMOVCC(0, s);
3757                         break;
3758                     case 0x102: /* V9 fmovdcc %icc */
3759                         FMOVCC(0, d);
3760                         break;
3761                     case 0x103: /* V9 fmovqcc %icc */
3762                         CHECK_FPU_FEATURE(dc, FLOAT128);
3763                         FMOVCC(0, q);
3764                         break;
3765                     case 0x181: /* V9 fmovscc %xcc */
3766                         FMOVCC(1, s);
3767                         break;
3768                     case 0x182: /* V9 fmovdcc %xcc */
3769                         FMOVCC(1, d);
3770                         break;
3771                     case 0x183: /* V9 fmovqcc %xcc */
3772                         CHECK_FPU_FEATURE(dc, FLOAT128);
3773                         FMOVCC(1, q);
3774                         break;
3775 #undef FMOVCC
3776 #endif
3777                     case 0x51: /* fcmps, V9 %fcc */
3778                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3779                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3780                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3781                         break;
3782                     case 0x52: /* fcmpd, V9 %fcc */
3783                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3784                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3785                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3786                         break;
3787                     case 0x53: /* fcmpq, V9 %fcc */
3788                         CHECK_FPU_FEATURE(dc, FLOAT128);
3789                         gen_op_load_fpr_QT0(QFPREG(rs1));
3790                         gen_op_load_fpr_QT1(QFPREG(rs2));
3791                         gen_op_fcmpq(rd & 3);
3792                         break;
3793                     case 0x55: /* fcmpes, V9 %fcc */
3794                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3795                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3796                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3797                         break;
3798                     case 0x56: /* fcmped, V9 %fcc */
3799                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3800                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3801                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3802                         break;
3803                     case 0x57: /* fcmpeq, V9 %fcc */
3804                         CHECK_FPU_FEATURE(dc, FLOAT128);
3805                         gen_op_load_fpr_QT0(QFPREG(rs1));
3806                         gen_op_load_fpr_QT1(QFPREG(rs2));
3807                         gen_op_fcmpeq(rd & 3);
3808                         break;
3809                     default:
3810                         goto illegal_insn;
3811                 }
3812             } else if (xop == 0x2) {
3813                 TCGv dst = gen_dest_gpr(dc, rd);
3814                 rs1 = GET_FIELD(insn, 13, 17);
3815                 if (rs1 == 0) {
3816                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3817                     if (IS_IMM) {       /* immediate */
3818                         simm = GET_FIELDs(insn, 19, 31);
3819                         tcg_gen_movi_tl(dst, simm);
3820                         gen_store_gpr(dc, rd, dst);
3821                     } else {            /* register */
3822                         rs2 = GET_FIELD(insn, 27, 31);
3823                         if (rs2 == 0) {
3824                             tcg_gen_movi_tl(dst, 0);
3825                             gen_store_gpr(dc, rd, dst);
3826                         } else {
3827                             cpu_src2 = gen_load_gpr(dc, rs2);
3828                             gen_store_gpr(dc, rd, cpu_src2);
3829                         }
3830                     }
3831                 } else {
3832                     cpu_src1 = get_src1(dc, insn);
3833                     if (IS_IMM) {       /* immediate */
3834                         simm = GET_FIELDs(insn, 19, 31);
3835                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3836                         gen_store_gpr(dc, rd, dst);
3837                     } else {            /* register */
3838                         rs2 = GET_FIELD(insn, 27, 31);
3839                         if (rs2 == 0) {
3840                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3841                             gen_store_gpr(dc, rd, cpu_src1);
3842                         } else {
3843                             cpu_src2 = gen_load_gpr(dc, rs2);
3844                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3845                             gen_store_gpr(dc, rd, dst);
3846                         }
3847                     }
3848                 }
3849 #ifdef TARGET_SPARC64
3850             } else if (xop == 0x25) { /* sll, V9 sllx */
3851                 cpu_src1 = get_src1(dc, insn);
3852                 if (IS_IMM) {   /* immediate */
3853                     simm = GET_FIELDs(insn, 20, 31);
3854                     if (insn & (1 << 12)) {
3855                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3856                     } else {
3857                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3858                     }
3859                 } else {                /* register */
3860                     rs2 = GET_FIELD(insn, 27, 31);
3861                     cpu_src2 = gen_load_gpr(dc, rs2);
3862                     cpu_tmp0 = tcg_temp_new();
3863                     if (insn & (1 << 12)) {
3864                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3865                     } else {
3866                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3867                     }
3868                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3869                 }
3870                 gen_store_gpr(dc, rd, cpu_dst);
3871             } else if (xop == 0x26) { /* srl, V9 srlx */
3872                 cpu_src1 = get_src1(dc, insn);
3873                 if (IS_IMM) {   /* immediate */
3874                     simm = GET_FIELDs(insn, 20, 31);
3875                     if (insn & (1 << 12)) {
3876                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3877                     } else {
3878                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3879                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3880                     }
3881                 } else {                /* register */
3882                     rs2 = GET_FIELD(insn, 27, 31);
3883                     cpu_src2 = gen_load_gpr(dc, rs2);
3884                     cpu_tmp0 = tcg_temp_new();
3885                     if (insn & (1 << 12)) {
3886                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3887                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3888                     } else {
3889                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3890                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3891                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3892                     }
3893                 }
3894                 gen_store_gpr(dc, rd, cpu_dst);
3895             } else if (xop == 0x27) { /* sra, V9 srax */
3896                 cpu_src1 = get_src1(dc, insn);
3897                 if (IS_IMM) {   /* immediate */
3898                     simm = GET_FIELDs(insn, 20, 31);
3899                     if (insn & (1 << 12)) {
3900                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3901                     } else {
3902                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3903                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3904                     }
3905                 } else {                /* register */
3906                     rs2 = GET_FIELD(insn, 27, 31);
3907                     cpu_src2 = gen_load_gpr(dc, rs2);
3908                     cpu_tmp0 = tcg_temp_new();
3909                     if (insn & (1 << 12)) {
3910                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3911                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3912                     } else {
3913                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3914                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3915                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3916                     }
3917                 }
3918                 gen_store_gpr(dc, rd, cpu_dst);
3919 #endif
3920             } else if (xop < 0x36) {
3921                 if (xop < 0x20) {
3922                     cpu_src1 = get_src1(dc, insn);
3923                     cpu_src2 = get_src2(dc, insn);
3924                     switch (xop & ~0x10) {
3925                     case 0x0: /* add */
3926                         if (xop & 0x10) {
3927                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3928                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3929                             dc->cc_op = CC_OP_ADD;
3930                         } else {
3931                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3932                         }
3933                         break;
3934                     case 0x1: /* and */
3935                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3936                         if (xop & 0x10) {
3937                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3938                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3939                             dc->cc_op = CC_OP_LOGIC;
3940                         }
3941                         break;
3942                     case 0x2: /* or */
3943                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3944                         if (xop & 0x10) {
3945                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3946                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3947                             dc->cc_op = CC_OP_LOGIC;
3948                         }
3949                         break;
3950                     case 0x3: /* xor */
3951                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3952                         if (xop & 0x10) {
3953                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3954                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3955                             dc->cc_op = CC_OP_LOGIC;
3956                         }
3957                         break;
3958                     case 0x4: /* sub */
3959                         if (xop & 0x10) {
3960                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3961                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3962                             dc->cc_op = CC_OP_SUB;
3963                         } else {
3964                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3965                         }
3966                         break;
3967                     case 0x5: /* andn */
3968                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3969                         if (xop & 0x10) {
3970                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3971                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3972                             dc->cc_op = CC_OP_LOGIC;
3973                         }
3974                         break;
3975                     case 0x6: /* orn */
3976                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3977                         if (xop & 0x10) {
3978                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3979                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3980                             dc->cc_op = CC_OP_LOGIC;
3981                         }
3982                         break;
3983                     case 0x7: /* xorn */
3984                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3985                         if (xop & 0x10) {
3986                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3987                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3988                             dc->cc_op = CC_OP_LOGIC;
3989                         }
3990                         break;
3991                     case 0x8: /* addx, V9 addc */
3992                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3993                                         (xop & 0x10));
3994                         break;
3995 #ifdef TARGET_SPARC64
3996                     case 0x9: /* V9 mulx */
3997                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3998                         break;
3999 #endif
4000                     case 0xa: /* umul */
4001                         CHECK_IU_FEATURE(dc, MUL);
4002                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4003                         if (xop & 0x10) {
4004                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4005                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4006                             dc->cc_op = CC_OP_LOGIC;
4007                         }
4008                         break;
4009                     case 0xb: /* smul */
4010                         CHECK_IU_FEATURE(dc, MUL);
4011                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4012                         if (xop & 0x10) {
4013                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4014                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4015                             dc->cc_op = CC_OP_LOGIC;
4016                         }
4017                         break;
4018                     case 0xc: /* subx, V9 subc */
4019                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4020                                         (xop & 0x10));
4021                         break;
4022 #ifdef TARGET_SPARC64
4023                     case 0xd: /* V9 udivx */
4024                         gen_helper_udivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4025                         break;
4026 #endif
4027                     case 0xe: /* udiv */
4028                         CHECK_IU_FEATURE(dc, DIV);
4029                         if (xop & 0x10) {
4030                             gen_helper_udiv_cc(cpu_dst, tcg_env, cpu_src1,
4031                                                cpu_src2);
4032                             dc->cc_op = CC_OP_DIV;
4033                         } else {
4034                             gen_helper_udiv(cpu_dst, tcg_env, cpu_src1,
4035                                             cpu_src2);
4036                         }
4037                         break;
4038                     case 0xf: /* sdiv */
4039                         CHECK_IU_FEATURE(dc, DIV);
4040                         if (xop & 0x10) {
4041                             gen_helper_sdiv_cc(cpu_dst, tcg_env, cpu_src1,
4042                                                cpu_src2);
4043                             dc->cc_op = CC_OP_DIV;
4044                         } else {
4045                             gen_helper_sdiv(cpu_dst, tcg_env, cpu_src1,
4046                                             cpu_src2);
4047                         }
4048                         break;
4049                     default:
4050                         goto illegal_insn;
4051                     }
4052                     gen_store_gpr(dc, rd, cpu_dst);
4053                 } else {
4054                     cpu_src1 = get_src1(dc, insn);
4055                     cpu_src2 = get_src2(dc, insn);
4056                     switch (xop) {
4057                     case 0x20: /* taddcc */
4058                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4059                         gen_store_gpr(dc, rd, cpu_dst);
4060                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4061                         dc->cc_op = CC_OP_TADD;
4062                         break;
4063                     case 0x21: /* tsubcc */
4064                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4065                         gen_store_gpr(dc, rd, cpu_dst);
4066                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4067                         dc->cc_op = CC_OP_TSUB;
4068                         break;
4069                     case 0x22: /* taddcctv */
4070                         gen_helper_taddcctv(cpu_dst, tcg_env,
4071                                             cpu_src1, cpu_src2);
4072                         gen_store_gpr(dc, rd, cpu_dst);
4073                         dc->cc_op = CC_OP_TADDTV;
4074                         break;
4075                     case 0x23: /* tsubcctv */
4076                         gen_helper_tsubcctv(cpu_dst, tcg_env,
4077                                             cpu_src1, cpu_src2);
4078                         gen_store_gpr(dc, rd, cpu_dst);
4079                         dc->cc_op = CC_OP_TSUBTV;
4080                         break;
4081                     case 0x24: /* mulscc */
4082                         update_psr(dc);
4083                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4084                         gen_store_gpr(dc, rd, cpu_dst);
4085                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4086                         dc->cc_op = CC_OP_ADD;
4087                         break;
4088 #ifndef TARGET_SPARC64
4089                     case 0x25:  /* sll */
4090                         if (IS_IMM) { /* immediate */
4091                             simm = GET_FIELDs(insn, 20, 31);
4092                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4093                         } else { /* register */
4094                             cpu_tmp0 = tcg_temp_new();
4095                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4096                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4097                         }
4098                         gen_store_gpr(dc, rd, cpu_dst);
4099                         break;
4100                     case 0x26:  /* srl */
4101                         if (IS_IMM) { /* immediate */
4102                             simm = GET_FIELDs(insn, 20, 31);
4103                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4104                         } else { /* register */
4105                             cpu_tmp0 = tcg_temp_new();
4106                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4107                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4108                         }
4109                         gen_store_gpr(dc, rd, cpu_dst);
4110                         break;
4111                     case 0x27:  /* sra */
4112                         if (IS_IMM) { /* immediate */
4113                             simm = GET_FIELDs(insn, 20, 31);
4114                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4115                         } else { /* register */
4116                             cpu_tmp0 = tcg_temp_new();
4117                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4118                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4119                         }
4120                         gen_store_gpr(dc, rd, cpu_dst);
4121                         break;
4122 #endif
4123                     case 0x30:
4124                         {
4125                             cpu_tmp0 = tcg_temp_new();
4126                             switch(rd) {
4127                             case 0: /* wry */
4128                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4129                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4130                                 break;
4131 #ifndef TARGET_SPARC64
4132                             case 0x01 ... 0x0f: /* undefined in the
4133                                                    SPARCv8 manual, nop
4134                                                    on the microSPARC
4135                                                    II */
4136                             case 0x10 ... 0x1f: /* implementation-dependent
4137                                                    in the SPARCv8
4138                                                    manual, nop on the
4139                                                    microSPARC II */
4140                                 if ((rd == 0x13) && (dc->def->features &
4141                                                      CPU_FEATURE_POWERDOWN)) {
4142                                     /* LEON3 power-down */
4143                                     save_state(dc);
4144                                     gen_helper_power_down(tcg_env);
4145                                 }
4146                                 break;
4147 #else
4148                             case 0x2: /* V9 wrccr */
4149                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4150                                 gen_helper_wrccr(tcg_env, cpu_tmp0);
4151                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4152                                 dc->cc_op = CC_OP_FLAGS;
4153                                 break;
4154                             case 0x3: /* V9 wrasi */
4155                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4156                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4157                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4158                                                 offsetof(CPUSPARCState, asi));
4159                                 /*
4160                                  * End TB to notice changed ASI.
4161                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4162                                  * update DisasContext and not exit the TB.
4163                                  */
4164                                 save_state(dc);
4165                                 gen_op_next_insn();
4166                                 tcg_gen_lookup_and_goto_ptr();
4167                                 dc->base.is_jmp = DISAS_NORETURN;
4168                                 break;
4169                             case 0x6: /* V9 wrfprs */
4170                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4171                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4172                                 dc->fprs_dirty = 0;
4173                                 save_state(dc);
4174                                 gen_op_next_insn();
4175                                 tcg_gen_exit_tb(NULL, 0);
4176                                 dc->base.is_jmp = DISAS_NORETURN;
4177                                 break;
4178                             case 0xf: /* V9 sir, nop if user */
4179 #if !defined(CONFIG_USER_ONLY)
4180                                 if (supervisor(dc)) {
4181                                     ; // XXX
4182                                 }
4183 #endif
4184                                 break;
4185                             case 0x13: /* Graphics Status */
4186                                 if (gen_trap_ifnofpu(dc)) {
4187                                     goto jmp_insn;
4188                                 }
4189                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4190                                 break;
4191                             case 0x14: /* Softint set */
4192                                 if (!supervisor(dc))
4193                                     goto illegal_insn;
4194                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4195                                 gen_helper_set_softint(tcg_env, cpu_tmp0);
4196                                 break;
4197                             case 0x15: /* Softint clear */
4198                                 if (!supervisor(dc))
4199                                     goto illegal_insn;
4200                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4201                                 gen_helper_clear_softint(tcg_env, cpu_tmp0);
4202                                 break;
4203                             case 0x16: /* Softint write */
4204                                 if (!supervisor(dc))
4205                                     goto illegal_insn;
4206                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4207                                 gen_helper_write_softint(tcg_env, cpu_tmp0);
4208                                 break;
4209                             case 0x17: /* Tick compare */
4210 #if !defined(CONFIG_USER_ONLY)
4211                                 if (!supervisor(dc))
4212                                     goto illegal_insn;
4213 #endif
4214                                 {
4215                                     TCGv_ptr r_tickptr;
4216 
4217                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4218                                                    cpu_src2);
4219                                     r_tickptr = tcg_temp_new_ptr();
4220                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4221                                                    offsetof(CPUSPARCState, tick));
4222                                     translator_io_start(&dc->base);
4223                                     gen_helper_tick_set_limit(r_tickptr,
4224                                                               cpu_tick_cmpr);
4225                                     /* End TB to handle timer interrupt */
4226                                     dc->base.is_jmp = DISAS_EXIT;
4227                                 }
4228                                 break;
4229                             case 0x18: /* System tick */
4230 #if !defined(CONFIG_USER_ONLY)
4231                                 if (!supervisor(dc))
4232                                     goto illegal_insn;
4233 #endif
4234                                 {
4235                                     TCGv_ptr r_tickptr;
4236 
4237                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4238                                                    cpu_src2);
4239                                     r_tickptr = tcg_temp_new_ptr();
4240                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4241                                                    offsetof(CPUSPARCState, stick));
4242                                     translator_io_start(&dc->base);
4243                                     gen_helper_tick_set_count(r_tickptr,
4244                                                               cpu_tmp0);
4245                                     /* End TB to handle timer interrupt */
4246                                     dc->base.is_jmp = DISAS_EXIT;
4247                                 }
4248                                 break;
4249                             case 0x19: /* System tick compare */
4250 #if !defined(CONFIG_USER_ONLY)
4251                                 if (!supervisor(dc))
4252                                     goto illegal_insn;
4253 #endif
4254                                 {
4255                                     TCGv_ptr r_tickptr;
4256 
4257                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4258                                                    cpu_src2);
4259                                     r_tickptr = tcg_temp_new_ptr();
4260                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4261                                                    offsetof(CPUSPARCState, stick));
4262                                     translator_io_start(&dc->base);
4263                                     gen_helper_tick_set_limit(r_tickptr,
4264                                                               cpu_stick_cmpr);
4265                                     /* End TB to handle timer interrupt */
4266                                     dc->base.is_jmp = DISAS_EXIT;
4267                                 }
4268                                 break;
4269 
4270                             case 0x10: /* Performance Control */
4271                             case 0x11: /* Performance Instrumentation
4272                                           Counter */
4273                             case 0x12: /* Dispatch Control */
4274 #endif
4275                             default:
4276                                 goto illegal_insn;
4277                             }
4278                         }
4279                         break;
4280 #if !defined(CONFIG_USER_ONLY)
4281                     case 0x31: /* wrpsr, V9 saved, restored */
4282                         {
4283                             if (!supervisor(dc))
4284                                 goto priv_insn;
4285 #ifdef TARGET_SPARC64
4286                             switch (rd) {
4287                             case 0:
4288                                 gen_helper_saved(tcg_env);
4289                                 break;
4290                             case 1:
4291                                 gen_helper_restored(tcg_env);
4292                                 break;
4293                             case 2: /* UA2005 allclean */
4294                             case 3: /* UA2005 otherw */
4295                             case 4: /* UA2005 normalw */
4296                             case 5: /* UA2005 invalw */
4297                                 // XXX
4298                             default:
4299                                 goto illegal_insn;
4300                             }
4301 #else
4302                             cpu_tmp0 = tcg_temp_new();
4303                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4304                             gen_helper_wrpsr(tcg_env, cpu_tmp0);
4305                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4306                             dc->cc_op = CC_OP_FLAGS;
4307                             save_state(dc);
4308                             gen_op_next_insn();
4309                             tcg_gen_exit_tb(NULL, 0);
4310                             dc->base.is_jmp = DISAS_NORETURN;
4311 #endif
4312                         }
4313                         break;
4314                     case 0x32: /* wrwim, V9 wrpr */
4315                         {
4316                             if (!supervisor(dc))
4317                                 goto priv_insn;
4318                             cpu_tmp0 = tcg_temp_new();
4319                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4320 #ifdef TARGET_SPARC64
4321                             switch (rd) {
4322                             case 0: // tpc
4323                                 {
4324                                     TCGv_ptr r_tsptr;
4325 
4326                                     r_tsptr = tcg_temp_new_ptr();
4327                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4328                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4329                                                   offsetof(trap_state, tpc));
4330                                 }
4331                                 break;
4332                             case 1: // tnpc
4333                                 {
4334                                     TCGv_ptr r_tsptr;
4335 
4336                                     r_tsptr = tcg_temp_new_ptr();
4337                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4338                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4339                                                   offsetof(trap_state, tnpc));
4340                                 }
4341                                 break;
4342                             case 2: // tstate
4343                                 {
4344                                     TCGv_ptr r_tsptr;
4345 
4346                                     r_tsptr = tcg_temp_new_ptr();
4347                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4348                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4349                                                   offsetof(trap_state,
4350                                                            tstate));
4351                                 }
4352                                 break;
4353                             case 3: // tt
4354                                 {
4355                                     TCGv_ptr r_tsptr;
4356 
4357                                     r_tsptr = tcg_temp_new_ptr();
4358                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4359                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4360                                                     offsetof(trap_state, tt));
4361                                 }
4362                                 break;
4363                             case 4: // tick
4364                                 {
4365                                     TCGv_ptr r_tickptr;
4366 
4367                                     r_tickptr = tcg_temp_new_ptr();
4368                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4369                                                    offsetof(CPUSPARCState, tick));
4370                                     translator_io_start(&dc->base);
4371                                     gen_helper_tick_set_count(r_tickptr,
4372                                                               cpu_tmp0);
4373                                     /* End TB to handle timer interrupt */
4374                                     dc->base.is_jmp = DISAS_EXIT;
4375                                 }
4376                                 break;
4377                             case 5: // tba
4378                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4379                                 break;
4380                             case 6: // pstate
4381                                 save_state(dc);
4382                                 if (translator_io_start(&dc->base)) {
4383                                     dc->base.is_jmp = DISAS_EXIT;
4384                                 }
4385                                 gen_helper_wrpstate(tcg_env, cpu_tmp0);
4386                                 dc->npc = DYNAMIC_PC;
4387                                 break;
4388                             case 7: // tl
4389                                 save_state(dc);
4390                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4391                                                offsetof(CPUSPARCState, tl));
4392                                 dc->npc = DYNAMIC_PC;
4393                                 break;
4394                             case 8: // pil
4395                                 if (translator_io_start(&dc->base)) {
4396                                     dc->base.is_jmp = DISAS_EXIT;
4397                                 }
4398                                 gen_helper_wrpil(tcg_env, cpu_tmp0);
4399                                 break;
4400                             case 9: // cwp
4401                                 gen_helper_wrcwp(tcg_env, cpu_tmp0);
4402                                 break;
4403                             case 10: // cansave
4404                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4405                                                 offsetof(CPUSPARCState,
4406                                                          cansave));
4407                                 break;
4408                             case 11: // canrestore
4409                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4410                                                 offsetof(CPUSPARCState,
4411                                                          canrestore));
4412                                 break;
4413                             case 12: // cleanwin
4414                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4415                                                 offsetof(CPUSPARCState,
4416                                                          cleanwin));
4417                                 break;
4418                             case 13: // otherwin
4419                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4420                                                 offsetof(CPUSPARCState,
4421                                                          otherwin));
4422                                 break;
4423                             case 14: // wstate
4424                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4425                                                 offsetof(CPUSPARCState,
4426                                                          wstate));
4427                                 break;
4428                             case 16: // UA2005 gl
4429                                 CHECK_IU_FEATURE(dc, GL);
4430                                 gen_helper_wrgl(tcg_env, cpu_tmp0);
4431                                 break;
4432                             case 26: // UA2005 strand status
4433                                 CHECK_IU_FEATURE(dc, HYPV);
4434                                 if (!hypervisor(dc))
4435                                     goto priv_insn;
4436                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4437                                 break;
4438                             default:
4439                                 goto illegal_insn;
4440                             }
4441 #else
4442                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4443                             if (dc->def->nwindows != 32) {
4444                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4445                                                 (1 << dc->def->nwindows) - 1);
4446                             }
4447 #endif
4448                         }
4449                         break;
4450                     case 0x33: /* wrtbr, UA2005 wrhpr */
4451                         {
4452 #ifndef TARGET_SPARC64
4453                             if (!supervisor(dc))
4454                                 goto priv_insn;
4455                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4456 #else
4457                             CHECK_IU_FEATURE(dc, HYPV);
4458                             if (!hypervisor(dc))
4459                                 goto priv_insn;
4460                             cpu_tmp0 = tcg_temp_new();
4461                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4462                             switch (rd) {
4463                             case 0: // hpstate
4464                                 tcg_gen_st_i64(cpu_tmp0, tcg_env,
4465                                                offsetof(CPUSPARCState,
4466                                                         hpstate));
4467                                 save_state(dc);
4468                                 gen_op_next_insn();
4469                                 tcg_gen_exit_tb(NULL, 0);
4470                                 dc->base.is_jmp = DISAS_NORETURN;
4471                                 break;
4472                             case 1: // htstate
4473                                 // XXX gen_op_wrhtstate();
4474                                 break;
4475                             case 3: // hintp
4476                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4477                                 break;
4478                             case 5: // htba
4479                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4480                                 break;
4481                             case 31: // hstick_cmpr
4482                                 {
4483                                     TCGv_ptr r_tickptr;
4484 
4485                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4486                                     r_tickptr = tcg_temp_new_ptr();
4487                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4488                                                    offsetof(CPUSPARCState, hstick));
4489                                     translator_io_start(&dc->base);
4490                                     gen_helper_tick_set_limit(r_tickptr,
4491                                                               cpu_hstick_cmpr);
4492                                     /* End TB to handle timer interrupt */
4493                                     dc->base.is_jmp = DISAS_EXIT;
4494                                 }
4495                                 break;
4496                             case 6: // hver readonly
4497                             default:
4498                                 goto illegal_insn;
4499                             }
4500 #endif
4501                         }
4502                         break;
4503 #endif
4504 #ifdef TARGET_SPARC64
4505                     case 0x2c: /* V9 movcc */
4506                         {
4507                             int cc = GET_FIELD_SP(insn, 11, 12);
4508                             int cond = GET_FIELD_SP(insn, 14, 17);
4509                             DisasCompare cmp;
4510                             TCGv dst;
4511 
4512                             if (insn & (1 << 18)) {
4513                                 if (cc == 0) {
4514                                     gen_compare(&cmp, 0, cond, dc);
4515                                 } else if (cc == 2) {
4516                                     gen_compare(&cmp, 1, cond, dc);
4517                                 } else {
4518                                     goto illegal_insn;
4519                                 }
4520                             } else {
4521                                 gen_fcompare(&cmp, cc, cond);
4522                             }
4523 
4524                             /* The get_src2 above loaded the normal 13-bit
4525                                immediate field, not the 11-bit field we have
4526                                in movcc.  But it did handle the reg case.  */
4527                             if (IS_IMM) {
4528                                 simm = GET_FIELD_SPs(insn, 0, 10);
4529                                 tcg_gen_movi_tl(cpu_src2, simm);
4530                             }
4531 
4532                             dst = gen_load_gpr(dc, rd);
4533                             tcg_gen_movcond_tl(cmp.cond, dst,
4534                                                cmp.c1, cmp.c2,
4535                                                cpu_src2, dst);
4536                             gen_store_gpr(dc, rd, dst);
4537                             break;
4538                         }
4539                     case 0x2d: /* V9 sdivx */
4540                         gen_helper_sdivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4541                         gen_store_gpr(dc, rd, cpu_dst);
4542                         break;
4543                     case 0x2e: /* V9 popc */
4544                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4545                         gen_store_gpr(dc, rd, cpu_dst);
4546                         break;
4547                     case 0x2f: /* V9 movr */
4548                         {
4549                             int cond = GET_FIELD_SP(insn, 10, 12);
4550                             DisasCompare cmp;
4551                             TCGv dst;
4552 
4553                             gen_compare_reg(&cmp, cond, cpu_src1);
4554 
4555                             /* The get_src2 above loaded the normal 13-bit
4556                                immediate field, not the 10-bit field we have
4557                                in movr.  But it did handle the reg case.  */
4558                             if (IS_IMM) {
4559                                 simm = GET_FIELD_SPs(insn, 0, 9);
4560                                 tcg_gen_movi_tl(cpu_src2, simm);
4561                             }
4562 
4563                             dst = gen_load_gpr(dc, rd);
4564                             tcg_gen_movcond_tl(cmp.cond, dst,
4565                                                cmp.c1, cmp.c2,
4566                                                cpu_src2, dst);
4567                             gen_store_gpr(dc, rd, dst);
4568                             break;
4569                         }
4570 #endif
4571                     default:
4572                         goto illegal_insn;
4573                     }
4574                 }
4575             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4576 #ifdef TARGET_SPARC64
4577                 int opf = GET_FIELD_SP(insn, 5, 13);
4578                 rs1 = GET_FIELD(insn, 13, 17);
4579                 rs2 = GET_FIELD(insn, 27, 31);
4580                 if (gen_trap_ifnofpu(dc)) {
4581                     goto jmp_insn;
4582                 }
4583 
4584                 switch (opf) {
4585                 case 0x000: /* VIS I edge8cc */
4586                     CHECK_FPU_FEATURE(dc, VIS1);
4587                     cpu_src1 = gen_load_gpr(dc, rs1);
4588                     cpu_src2 = gen_load_gpr(dc, rs2);
4589                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4590                     gen_store_gpr(dc, rd, cpu_dst);
4591                     break;
4592                 case 0x001: /* VIS II edge8n */
4593                     CHECK_FPU_FEATURE(dc, VIS2);
4594                     cpu_src1 = gen_load_gpr(dc, rs1);
4595                     cpu_src2 = gen_load_gpr(dc, rs2);
4596                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4597                     gen_store_gpr(dc, rd, cpu_dst);
4598                     break;
4599                 case 0x002: /* VIS I edge8lcc */
4600                     CHECK_FPU_FEATURE(dc, VIS1);
4601                     cpu_src1 = gen_load_gpr(dc, rs1);
4602                     cpu_src2 = gen_load_gpr(dc, rs2);
4603                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4604                     gen_store_gpr(dc, rd, cpu_dst);
4605                     break;
4606                 case 0x003: /* VIS II edge8ln */
4607                     CHECK_FPU_FEATURE(dc, VIS2);
4608                     cpu_src1 = gen_load_gpr(dc, rs1);
4609                     cpu_src2 = gen_load_gpr(dc, rs2);
4610                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4611                     gen_store_gpr(dc, rd, cpu_dst);
4612                     break;
4613                 case 0x004: /* VIS I edge16cc */
4614                     CHECK_FPU_FEATURE(dc, VIS1);
4615                     cpu_src1 = gen_load_gpr(dc, rs1);
4616                     cpu_src2 = gen_load_gpr(dc, rs2);
4617                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4618                     gen_store_gpr(dc, rd, cpu_dst);
4619                     break;
4620                 case 0x005: /* VIS II edge16n */
4621                     CHECK_FPU_FEATURE(dc, VIS2);
4622                     cpu_src1 = gen_load_gpr(dc, rs1);
4623                     cpu_src2 = gen_load_gpr(dc, rs2);
4624                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4625                     gen_store_gpr(dc, rd, cpu_dst);
4626                     break;
4627                 case 0x006: /* VIS I edge16lcc */
4628                     CHECK_FPU_FEATURE(dc, VIS1);
4629                     cpu_src1 = gen_load_gpr(dc, rs1);
4630                     cpu_src2 = gen_load_gpr(dc, rs2);
4631                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4632                     gen_store_gpr(dc, rd, cpu_dst);
4633                     break;
4634                 case 0x007: /* VIS II edge16ln */
4635                     CHECK_FPU_FEATURE(dc, VIS2);
4636                     cpu_src1 = gen_load_gpr(dc, rs1);
4637                     cpu_src2 = gen_load_gpr(dc, rs2);
4638                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4639                     gen_store_gpr(dc, rd, cpu_dst);
4640                     break;
4641                 case 0x008: /* VIS I edge32cc */
4642                     CHECK_FPU_FEATURE(dc, VIS1);
4643                     cpu_src1 = gen_load_gpr(dc, rs1);
4644                     cpu_src2 = gen_load_gpr(dc, rs2);
4645                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4646                     gen_store_gpr(dc, rd, cpu_dst);
4647                     break;
4648                 case 0x009: /* VIS II edge32n */
4649                     CHECK_FPU_FEATURE(dc, VIS2);
4650                     cpu_src1 = gen_load_gpr(dc, rs1);
4651                     cpu_src2 = gen_load_gpr(dc, rs2);
4652                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4653                     gen_store_gpr(dc, rd, cpu_dst);
4654                     break;
4655                 case 0x00a: /* VIS I edge32lcc */
4656                     CHECK_FPU_FEATURE(dc, VIS1);
4657                     cpu_src1 = gen_load_gpr(dc, rs1);
4658                     cpu_src2 = gen_load_gpr(dc, rs2);
4659                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4660                     gen_store_gpr(dc, rd, cpu_dst);
4661                     break;
4662                 case 0x00b: /* VIS II edge32ln */
4663                     CHECK_FPU_FEATURE(dc, VIS2);
4664                     cpu_src1 = gen_load_gpr(dc, rs1);
4665                     cpu_src2 = gen_load_gpr(dc, rs2);
4666                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4667                     gen_store_gpr(dc, rd, cpu_dst);
4668                     break;
4669                 case 0x010: /* VIS I array8 */
4670                     CHECK_FPU_FEATURE(dc, VIS1);
4671                     cpu_src1 = gen_load_gpr(dc, rs1);
4672                     cpu_src2 = gen_load_gpr(dc, rs2);
4673                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4674                     gen_store_gpr(dc, rd, cpu_dst);
4675                     break;
4676                 case 0x012: /* VIS I array16 */
4677                     CHECK_FPU_FEATURE(dc, VIS1);
4678                     cpu_src1 = gen_load_gpr(dc, rs1);
4679                     cpu_src2 = gen_load_gpr(dc, rs2);
4680                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4681                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4682                     gen_store_gpr(dc, rd, cpu_dst);
4683                     break;
4684                 case 0x014: /* VIS I array32 */
4685                     CHECK_FPU_FEATURE(dc, VIS1);
4686                     cpu_src1 = gen_load_gpr(dc, rs1);
4687                     cpu_src2 = gen_load_gpr(dc, rs2);
4688                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4689                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4690                     gen_store_gpr(dc, rd, cpu_dst);
4691                     break;
4692                 case 0x018: /* VIS I alignaddr */
4693                     CHECK_FPU_FEATURE(dc, VIS1);
4694                     cpu_src1 = gen_load_gpr(dc, rs1);
4695                     cpu_src2 = gen_load_gpr(dc, rs2);
4696                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4697                     gen_store_gpr(dc, rd, cpu_dst);
4698                     break;
4699                 case 0x01a: /* VIS I alignaddrl */
4700                     CHECK_FPU_FEATURE(dc, VIS1);
4701                     cpu_src1 = gen_load_gpr(dc, rs1);
4702                     cpu_src2 = gen_load_gpr(dc, rs2);
4703                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4704                     gen_store_gpr(dc, rd, cpu_dst);
4705                     break;
4706                 case 0x019: /* VIS II bmask */
4707                     CHECK_FPU_FEATURE(dc, VIS2);
4708                     cpu_src1 = gen_load_gpr(dc, rs1);
4709                     cpu_src2 = gen_load_gpr(dc, rs2);
4710                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4711                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4712                     gen_store_gpr(dc, rd, cpu_dst);
4713                     break;
4714                 case 0x020: /* VIS I fcmple16 */
4715                     CHECK_FPU_FEATURE(dc, VIS1);
4716                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4717                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4718                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4719                     gen_store_gpr(dc, rd, cpu_dst);
4720                     break;
4721                 case 0x022: /* VIS I fcmpne16 */
4722                     CHECK_FPU_FEATURE(dc, VIS1);
4723                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4724                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4725                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4726                     gen_store_gpr(dc, rd, cpu_dst);
4727                     break;
4728                 case 0x024: /* VIS I fcmple32 */
4729                     CHECK_FPU_FEATURE(dc, VIS1);
4730                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4731                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4732                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4733                     gen_store_gpr(dc, rd, cpu_dst);
4734                     break;
4735                 case 0x026: /* VIS I fcmpne32 */
4736                     CHECK_FPU_FEATURE(dc, VIS1);
4737                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4738                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4739                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4740                     gen_store_gpr(dc, rd, cpu_dst);
4741                     break;
4742                 case 0x028: /* VIS I fcmpgt16 */
4743                     CHECK_FPU_FEATURE(dc, VIS1);
4744                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4745                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4746                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4747                     gen_store_gpr(dc, rd, cpu_dst);
4748                     break;
4749                 case 0x02a: /* VIS I fcmpeq16 */
4750                     CHECK_FPU_FEATURE(dc, VIS1);
4751                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4752                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4753                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4754                     gen_store_gpr(dc, rd, cpu_dst);
4755                     break;
4756                 case 0x02c: /* VIS I fcmpgt32 */
4757                     CHECK_FPU_FEATURE(dc, VIS1);
4758                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4759                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4760                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4761                     gen_store_gpr(dc, rd, cpu_dst);
4762                     break;
4763                 case 0x02e: /* VIS I fcmpeq32 */
4764                     CHECK_FPU_FEATURE(dc, VIS1);
4765                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4766                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4767                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4768                     gen_store_gpr(dc, rd, cpu_dst);
4769                     break;
4770                 case 0x031: /* VIS I fmul8x16 */
4771                     CHECK_FPU_FEATURE(dc, VIS1);
4772                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4773                     break;
4774                 case 0x033: /* VIS I fmul8x16au */
4775                     CHECK_FPU_FEATURE(dc, VIS1);
4776                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4777                     break;
4778                 case 0x035: /* VIS I fmul8x16al */
4779                     CHECK_FPU_FEATURE(dc, VIS1);
4780                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4781                     break;
4782                 case 0x036: /* VIS I fmul8sux16 */
4783                     CHECK_FPU_FEATURE(dc, VIS1);
4784                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4785                     break;
4786                 case 0x037: /* VIS I fmul8ulx16 */
4787                     CHECK_FPU_FEATURE(dc, VIS1);
4788                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4789                     break;
4790                 case 0x038: /* VIS I fmuld8sux16 */
4791                     CHECK_FPU_FEATURE(dc, VIS1);
4792                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4793                     break;
4794                 case 0x039: /* VIS I fmuld8ulx16 */
4795                     CHECK_FPU_FEATURE(dc, VIS1);
4796                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4797                     break;
4798                 case 0x03a: /* VIS I fpack32 */
4799                     CHECK_FPU_FEATURE(dc, VIS1);
4800                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4801                     break;
4802                 case 0x03b: /* VIS I fpack16 */
4803                     CHECK_FPU_FEATURE(dc, VIS1);
4804                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4805                     cpu_dst_32 = gen_dest_fpr_F(dc);
4806                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4807                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4808                     break;
4809                 case 0x03d: /* VIS I fpackfix */
4810                     CHECK_FPU_FEATURE(dc, VIS1);
4811                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4812                     cpu_dst_32 = gen_dest_fpr_F(dc);
4813                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4814                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4815                     break;
4816                 case 0x03e: /* VIS I pdist */
4817                     CHECK_FPU_FEATURE(dc, VIS1);
4818                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4819                     break;
4820                 case 0x048: /* VIS I faligndata */
4821                     CHECK_FPU_FEATURE(dc, VIS1);
4822                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4823                     break;
4824                 case 0x04b: /* VIS I fpmerge */
4825                     CHECK_FPU_FEATURE(dc, VIS1);
4826                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4827                     break;
4828                 case 0x04c: /* VIS II bshuffle */
4829                     CHECK_FPU_FEATURE(dc, VIS2);
4830                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4831                     break;
4832                 case 0x04d: /* VIS I fexpand */
4833                     CHECK_FPU_FEATURE(dc, VIS1);
4834                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4835                     break;
4836                 case 0x050: /* VIS I fpadd16 */
4837                     CHECK_FPU_FEATURE(dc, VIS1);
4838                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4839                     break;
4840                 case 0x051: /* VIS I fpadd16s */
4841                     CHECK_FPU_FEATURE(dc, VIS1);
4842                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4843                     break;
4844                 case 0x052: /* VIS I fpadd32 */
4845                     CHECK_FPU_FEATURE(dc, VIS1);
4846                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4847                     break;
4848                 case 0x053: /* VIS I fpadd32s */
4849                     CHECK_FPU_FEATURE(dc, VIS1);
4850                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4851                     break;
4852                 case 0x054: /* VIS I fpsub16 */
4853                     CHECK_FPU_FEATURE(dc, VIS1);
4854                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4855                     break;
4856                 case 0x055: /* VIS I fpsub16s */
4857                     CHECK_FPU_FEATURE(dc, VIS1);
4858                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4859                     break;
4860                 case 0x056: /* VIS I fpsub32 */
4861                     CHECK_FPU_FEATURE(dc, VIS1);
4862                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4863                     break;
4864                 case 0x057: /* VIS I fpsub32s */
4865                     CHECK_FPU_FEATURE(dc, VIS1);
4866                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4867                     break;
4868                 case 0x060: /* VIS I fzero */
4869                     CHECK_FPU_FEATURE(dc, VIS1);
4870                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4871                     tcg_gen_movi_i64(cpu_dst_64, 0);
4872                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4873                     break;
4874                 case 0x061: /* VIS I fzeros */
4875                     CHECK_FPU_FEATURE(dc, VIS1);
4876                     cpu_dst_32 = gen_dest_fpr_F(dc);
4877                     tcg_gen_movi_i32(cpu_dst_32, 0);
4878                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4879                     break;
4880                 case 0x062: /* VIS I fnor */
4881                     CHECK_FPU_FEATURE(dc, VIS1);
4882                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4883                     break;
4884                 case 0x063: /* VIS I fnors */
4885                     CHECK_FPU_FEATURE(dc, VIS1);
4886                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4887                     break;
4888                 case 0x064: /* VIS I fandnot2 */
4889                     CHECK_FPU_FEATURE(dc, VIS1);
4890                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4891                     break;
4892                 case 0x065: /* VIS I fandnot2s */
4893                     CHECK_FPU_FEATURE(dc, VIS1);
4894                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4895                     break;
4896                 case 0x066: /* VIS I fnot2 */
4897                     CHECK_FPU_FEATURE(dc, VIS1);
4898                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4899                     break;
4900                 case 0x067: /* VIS I fnot2s */
4901                     CHECK_FPU_FEATURE(dc, VIS1);
4902                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4903                     break;
4904                 case 0x068: /* VIS I fandnot1 */
4905                     CHECK_FPU_FEATURE(dc, VIS1);
4906                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4907                     break;
4908                 case 0x069: /* VIS I fandnot1s */
4909                     CHECK_FPU_FEATURE(dc, VIS1);
4910                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4911                     break;
4912                 case 0x06a: /* VIS I fnot1 */
4913                     CHECK_FPU_FEATURE(dc, VIS1);
4914                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4915                     break;
4916                 case 0x06b: /* VIS I fnot1s */
4917                     CHECK_FPU_FEATURE(dc, VIS1);
4918                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4919                     break;
4920                 case 0x06c: /* VIS I fxor */
4921                     CHECK_FPU_FEATURE(dc, VIS1);
4922                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4923                     break;
4924                 case 0x06d: /* VIS I fxors */
4925                     CHECK_FPU_FEATURE(dc, VIS1);
4926                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4927                     break;
4928                 case 0x06e: /* VIS I fnand */
4929                     CHECK_FPU_FEATURE(dc, VIS1);
4930                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4931                     break;
4932                 case 0x06f: /* VIS I fnands */
4933                     CHECK_FPU_FEATURE(dc, VIS1);
4934                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4935                     break;
4936                 case 0x070: /* VIS I fand */
4937                     CHECK_FPU_FEATURE(dc, VIS1);
4938                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4939                     break;
4940                 case 0x071: /* VIS I fands */
4941                     CHECK_FPU_FEATURE(dc, VIS1);
4942                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4943                     break;
4944                 case 0x072: /* VIS I fxnor */
4945                     CHECK_FPU_FEATURE(dc, VIS1);
4946                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4947                     break;
4948                 case 0x073: /* VIS I fxnors */
4949                     CHECK_FPU_FEATURE(dc, VIS1);
4950                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4951                     break;
4952                 case 0x074: /* VIS I fsrc1 */
4953                     CHECK_FPU_FEATURE(dc, VIS1);
4954                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4955                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4956                     break;
4957                 case 0x075: /* VIS I fsrc1s */
4958                     CHECK_FPU_FEATURE(dc, VIS1);
4959                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4960                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4961                     break;
4962                 case 0x076: /* VIS I fornot2 */
4963                     CHECK_FPU_FEATURE(dc, VIS1);
4964                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4965                     break;
4966                 case 0x077: /* VIS I fornot2s */
4967                     CHECK_FPU_FEATURE(dc, VIS1);
4968                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4969                     break;
4970                 case 0x078: /* VIS I fsrc2 */
4971                     CHECK_FPU_FEATURE(dc, VIS1);
4972                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4973                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4974                     break;
4975                 case 0x079: /* VIS I fsrc2s */
4976                     CHECK_FPU_FEATURE(dc, VIS1);
4977                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4978                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4979                     break;
4980                 case 0x07a: /* VIS I fornot1 */
4981                     CHECK_FPU_FEATURE(dc, VIS1);
4982                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4983                     break;
4984                 case 0x07b: /* VIS I fornot1s */
4985                     CHECK_FPU_FEATURE(dc, VIS1);
4986                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4987                     break;
4988                 case 0x07c: /* VIS I for */
4989                     CHECK_FPU_FEATURE(dc, VIS1);
4990                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4991                     break;
4992                 case 0x07d: /* VIS I fors */
4993                     CHECK_FPU_FEATURE(dc, VIS1);
4994                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4995                     break;
4996                 case 0x07e: /* VIS I fone */
4997                     CHECK_FPU_FEATURE(dc, VIS1);
4998                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4999                     tcg_gen_movi_i64(cpu_dst_64, -1);
5000                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5001                     break;
5002                 case 0x07f: /* VIS I fones */
5003                     CHECK_FPU_FEATURE(dc, VIS1);
5004                     cpu_dst_32 = gen_dest_fpr_F(dc);
5005                     tcg_gen_movi_i32(cpu_dst_32, -1);
5006                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5007                     break;
5008                 case 0x080: /* VIS I shutdown */
5009                 case 0x081: /* VIS II siam */
5010                     // XXX
5011                     goto illegal_insn;
5012                 default:
5013                     goto illegal_insn;
5014                 }
5015 #else
5016                 goto ncp_insn;
5017 #endif
5018             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5019 #ifdef TARGET_SPARC64
5020                 goto illegal_insn;
5021 #else
5022                 goto ncp_insn;
5023 #endif
5024 #ifdef TARGET_SPARC64
5025             } else if (xop == 0x39) { /* V9 return */
5026                 save_state(dc);
5027                 cpu_src1 = get_src1(dc, insn);
5028                 cpu_tmp0 = tcg_temp_new();
5029                 if (IS_IMM) {   /* immediate */
5030                     simm = GET_FIELDs(insn, 19, 31);
5031                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5032                 } else {                /* register */
5033                     rs2 = GET_FIELD(insn, 27, 31);
5034                     if (rs2) {
5035                         cpu_src2 = gen_load_gpr(dc, rs2);
5036                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5037                     } else {
5038                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5039                     }
5040                 }
5041                 gen_check_align(dc, cpu_tmp0, 3);
5042                 gen_helper_restore(tcg_env);
5043                 gen_mov_pc_npc(dc);
5044                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5045                 dc->npc = DYNAMIC_PC_LOOKUP;
5046                 goto jmp_insn;
5047 #endif
5048             } else {
5049                 cpu_src1 = get_src1(dc, insn);
5050                 cpu_tmp0 = tcg_temp_new();
5051                 if (IS_IMM) {   /* immediate */
5052                     simm = GET_FIELDs(insn, 19, 31);
5053                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5054                 } else {                /* register */
5055                     rs2 = GET_FIELD(insn, 27, 31);
5056                     if (rs2) {
5057                         cpu_src2 = gen_load_gpr(dc, rs2);
5058                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5059                     } else {
5060                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5061                     }
5062                 }
5063                 switch (xop) {
5064                 case 0x38:      /* jmpl */
5065                     {
5066                         gen_check_align(dc, cpu_tmp0, 3);
5067                         gen_store_gpr(dc, rd, tcg_constant_tl(dc->pc));
5068                         gen_mov_pc_npc(dc);
5069                         gen_address_mask(dc, cpu_tmp0);
5070                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5071                         dc->npc = DYNAMIC_PC_LOOKUP;
5072                     }
5073                     goto jmp_insn;
5074 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5075                 case 0x39:      /* rett, V9 return */
5076                     {
5077                         if (!supervisor(dc))
5078                             goto priv_insn;
5079                         gen_check_align(dc, cpu_tmp0, 3);
5080                         gen_mov_pc_npc(dc);
5081                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5082                         dc->npc = DYNAMIC_PC;
5083                         gen_helper_rett(tcg_env);
5084                     }
5085                     goto jmp_insn;
5086 #endif
5087                 case 0x3b: /* flush */
5088                     /* nop */
5089                     break;
5090                 case 0x3c:      /* save */
5091                     gen_helper_save(tcg_env);
5092                     gen_store_gpr(dc, rd, cpu_tmp0);
5093                     break;
5094                 case 0x3d:      /* restore */
5095                     gen_helper_restore(tcg_env);
5096                     gen_store_gpr(dc, rd, cpu_tmp0);
5097                     break;
5098 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5099                 case 0x3e:      /* V9 done/retry */
5100                     {
5101                         switch (rd) {
5102                         case 0:
5103                             if (!supervisor(dc))
5104                                 goto priv_insn;
5105                             dc->npc = DYNAMIC_PC;
5106                             dc->pc = DYNAMIC_PC;
5107                             translator_io_start(&dc->base);
5108                             gen_helper_done(tcg_env);
5109                             goto jmp_insn;
5110                         case 1:
5111                             if (!supervisor(dc))
5112                                 goto priv_insn;
5113                             dc->npc = DYNAMIC_PC;
5114                             dc->pc = DYNAMIC_PC;
5115                             translator_io_start(&dc->base);
5116                             gen_helper_retry(tcg_env);
5117                             goto jmp_insn;
5118                         default:
5119                             goto illegal_insn;
5120                         }
5121                     }
5122                     break;
5123 #endif
5124                 default:
5125                     goto illegal_insn;
5126                 }
5127             }
5128             break;
5129         }
5130         break;
5131     case 3:                     /* load/store instructions */
5132         {
5133             unsigned int xop = GET_FIELD(insn, 7, 12);
5134             /* ??? gen_address_mask prevents us from using a source
5135                register directly.  Always generate a temporary.  */
5136             TCGv cpu_addr = tcg_temp_new();
5137 
5138             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5139             if (xop == 0x3c || xop == 0x3e) {
5140                 /* V9 casa/casxa : no offset */
5141             } else if (IS_IMM) {     /* immediate */
5142                 simm = GET_FIELDs(insn, 19, 31);
5143                 if (simm != 0) {
5144                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5145                 }
5146             } else {            /* register */
5147                 rs2 = GET_FIELD(insn, 27, 31);
5148                 if (rs2 != 0) {
5149                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5150                 }
5151             }
5152             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5153                 (xop > 0x17 && xop <= 0x1d ) ||
5154                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5155                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5156 
5157                 switch (xop) {
5158                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5159                     gen_address_mask(dc, cpu_addr);
5160                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5161                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5162                     break;
5163                 case 0x1:       /* ldub, load unsigned byte */
5164                     gen_address_mask(dc, cpu_addr);
5165                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5166                                        dc->mem_idx, MO_UB);
5167                     break;
5168                 case 0x2:       /* lduh, load unsigned halfword */
5169                     gen_address_mask(dc, cpu_addr);
5170                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5171                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5172                     break;
5173                 case 0x3:       /* ldd, load double word */
5174                     if (rd & 1)
5175                         goto illegal_insn;
5176                     else {
5177                         TCGv_i64 t64;
5178 
5179                         gen_address_mask(dc, cpu_addr);
5180                         t64 = tcg_temp_new_i64();
5181                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5182                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5183                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5184                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5185                         gen_store_gpr(dc, rd + 1, cpu_val);
5186                         tcg_gen_shri_i64(t64, t64, 32);
5187                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5188                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5189                     }
5190                     break;
5191                 case 0x9:       /* ldsb, load signed byte */
5192                     gen_address_mask(dc, cpu_addr);
5193                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5194                     break;
5195                 case 0xa:       /* ldsh, load signed halfword */
5196                     gen_address_mask(dc, cpu_addr);
5197                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5198                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5199                     break;
5200                 case 0xd:       /* ldstub */
5201                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5202                     break;
5203                 case 0x0f:
5204                     /* swap, swap register with memory. Also atomically */
5205                     cpu_src1 = gen_load_gpr(dc, rd);
5206                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5207                              dc->mem_idx, MO_TEUL);
5208                     break;
5209 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5210                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5211                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5212                     break;
5213                 case 0x11:      /* lduba, load unsigned byte alternate */
5214                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5215                     break;
5216                 case 0x12:      /* lduha, load unsigned halfword alternate */
5217                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5218                     break;
5219                 case 0x13:      /* ldda, load double word alternate */
5220                     if (rd & 1) {
5221                         goto illegal_insn;
5222                     }
5223                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5224                     goto skip_move;
5225                 case 0x19:      /* ldsba, load signed byte alternate */
5226                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5227                     break;
5228                 case 0x1a:      /* ldsha, load signed halfword alternate */
5229                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5230                     break;
5231                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5232                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5233                     break;
5234                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5235                                    atomically */
5236                     cpu_src1 = gen_load_gpr(dc, rd);
5237                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5238                     break;
5239 
5240 #ifndef TARGET_SPARC64
5241                 case 0x30: /* ldc */
5242                 case 0x31: /* ldcsr */
5243                 case 0x33: /* lddc */
5244                     goto ncp_insn;
5245 #endif
5246 #endif
5247 #ifdef TARGET_SPARC64
5248                 case 0x08: /* V9 ldsw */
5249                     gen_address_mask(dc, cpu_addr);
5250                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5251                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5252                     break;
5253                 case 0x0b: /* V9 ldx */
5254                     gen_address_mask(dc, cpu_addr);
5255                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5256                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5257                     break;
5258                 case 0x18: /* V9 ldswa */
5259                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5260                     break;
5261                 case 0x1b: /* V9 ldxa */
5262                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5263                     break;
5264                 case 0x2d: /* V9 prefetch, no effect */
5265                     goto skip_move;
5266                 case 0x30: /* V9 ldfa */
5267                     if (gen_trap_ifnofpu(dc)) {
5268                         goto jmp_insn;
5269                     }
5270                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5271                     gen_update_fprs_dirty(dc, rd);
5272                     goto skip_move;
5273                 case 0x33: /* V9 lddfa */
5274                     if (gen_trap_ifnofpu(dc)) {
5275                         goto jmp_insn;
5276                     }
5277                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5278                     gen_update_fprs_dirty(dc, DFPREG(rd));
5279                     goto skip_move;
5280                 case 0x3d: /* V9 prefetcha, no effect */
5281                     goto skip_move;
5282                 case 0x32: /* V9 ldqfa */
5283                     CHECK_FPU_FEATURE(dc, FLOAT128);
5284                     if (gen_trap_ifnofpu(dc)) {
5285                         goto jmp_insn;
5286                     }
5287                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5288                     gen_update_fprs_dirty(dc, QFPREG(rd));
5289                     goto skip_move;
5290 #endif
5291                 default:
5292                     goto illegal_insn;
5293                 }
5294                 gen_store_gpr(dc, rd, cpu_val);
5295 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5296             skip_move: ;
5297 #endif
5298             } else if (xop >= 0x20 && xop < 0x24) {
5299                 if (gen_trap_ifnofpu(dc)) {
5300                     goto jmp_insn;
5301                 }
5302                 switch (xop) {
5303                 case 0x20:      /* ldf, load fpreg */
5304                     gen_address_mask(dc, cpu_addr);
5305                     cpu_dst_32 = gen_dest_fpr_F(dc);
5306                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5307                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5308                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5309                     break;
5310                 case 0x21:      /* ldfsr, V9 ldxfsr */
5311 #ifdef TARGET_SPARC64
5312                     gen_address_mask(dc, cpu_addr);
5313                     if (rd == 1) {
5314                         TCGv_i64 t64 = tcg_temp_new_i64();
5315                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5316                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5317                         gen_helper_ldxfsr(cpu_fsr, tcg_env, cpu_fsr, t64);
5318                         break;
5319                     }
5320 #endif
5321                     cpu_dst_32 = tcg_temp_new_i32();
5322                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5323                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5324                     gen_helper_ldfsr(cpu_fsr, tcg_env, cpu_fsr, cpu_dst_32);
5325                     break;
5326                 case 0x22:      /* ldqf, load quad fpreg */
5327                     CHECK_FPU_FEATURE(dc, FLOAT128);
5328                     gen_address_mask(dc, cpu_addr);
5329                     cpu_src1_64 = tcg_temp_new_i64();
5330                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5331                                         MO_TEUQ | MO_ALIGN_4);
5332                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5333                     cpu_src2_64 = tcg_temp_new_i64();
5334                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5335                                         MO_TEUQ | MO_ALIGN_4);
5336                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5337                     break;
5338                 case 0x23:      /* lddf, load double fpreg */
5339                     gen_address_mask(dc, cpu_addr);
5340                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5341                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5342                                         MO_TEUQ | MO_ALIGN_4);
5343                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5344                     break;
5345                 default:
5346                     goto illegal_insn;
5347                 }
5348             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5349                        xop == 0xe || xop == 0x1e) {
5350                 TCGv cpu_val = gen_load_gpr(dc, rd);
5351 
5352                 switch (xop) {
5353                 case 0x4: /* st, store word */
5354                     gen_address_mask(dc, cpu_addr);
5355                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5356                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5357                     break;
5358                 case 0x5: /* stb, store byte */
5359                     gen_address_mask(dc, cpu_addr);
5360                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5361                     break;
5362                 case 0x6: /* sth, store halfword */
5363                     gen_address_mask(dc, cpu_addr);
5364                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5365                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5366                     break;
5367                 case 0x7: /* std, store double word */
5368                     if (rd & 1)
5369                         goto illegal_insn;
5370                     else {
5371                         TCGv_i64 t64;
5372                         TCGv lo;
5373 
5374                         gen_address_mask(dc, cpu_addr);
5375                         lo = gen_load_gpr(dc, rd + 1);
5376                         t64 = tcg_temp_new_i64();
5377                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5378                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5379                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5380                     }
5381                     break;
5382 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5383                 case 0x14: /* sta, V9 stwa, store word alternate */
5384                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5385                     break;
5386                 case 0x15: /* stba, store byte alternate */
5387                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5388                     break;
5389                 case 0x16: /* stha, store halfword alternate */
5390                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5391                     break;
5392                 case 0x17: /* stda, store double word alternate */
5393                     if (rd & 1) {
5394                         goto illegal_insn;
5395                     }
5396                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5397                     break;
5398 #endif
5399 #ifdef TARGET_SPARC64
5400                 case 0x0e: /* V9 stx */
5401                     gen_address_mask(dc, cpu_addr);
5402                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5403                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5404                     break;
5405                 case 0x1e: /* V9 stxa */
5406                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5407                     break;
5408 #endif
5409                 default:
5410                     goto illegal_insn;
5411                 }
5412             } else if (xop > 0x23 && xop < 0x28) {
5413                 if (gen_trap_ifnofpu(dc)) {
5414                     goto jmp_insn;
5415                 }
5416                 switch (xop) {
5417                 case 0x24: /* stf, store fpreg */
5418                     gen_address_mask(dc, cpu_addr);
5419                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5420                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5421                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5422                     break;
5423                 case 0x25: /* stfsr, V9 stxfsr */
5424                     {
5425 #ifdef TARGET_SPARC64
5426                         gen_address_mask(dc, cpu_addr);
5427                         if (rd == 1) {
5428                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5429                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5430                             break;
5431                         }
5432 #endif
5433                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5434                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5435                     }
5436                     break;
5437                 case 0x26:
5438 #ifdef TARGET_SPARC64
5439                     /* V9 stqf, store quad fpreg */
5440                     CHECK_FPU_FEATURE(dc, FLOAT128);
5441                     gen_address_mask(dc, cpu_addr);
5442                     /* ??? While stqf only requires 4-byte alignment, it is
5443                        legal for the cpu to signal the unaligned exception.
5444                        The OS trap handler is then required to fix it up.
5445                        For qemu, this avoids having to probe the second page
5446                        before performing the first write.  */
5447                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5448                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5449                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5450                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5451                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5452                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5453                                         dc->mem_idx, MO_TEUQ);
5454                     break;
5455 #else /* !TARGET_SPARC64 */
5456                     /* stdfq, store floating point queue */
5457 #if defined(CONFIG_USER_ONLY)
5458                     goto illegal_insn;
5459 #else
5460                     if (!supervisor(dc))
5461                         goto priv_insn;
5462                     if (gen_trap_ifnofpu(dc)) {
5463                         goto jmp_insn;
5464                     }
5465                     goto nfq_insn;
5466 #endif
5467 #endif
5468                 case 0x27: /* stdf, store double fpreg */
5469                     gen_address_mask(dc, cpu_addr);
5470                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5471                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5472                                         MO_TEUQ | MO_ALIGN_4);
5473                     break;
5474                 default:
5475                     goto illegal_insn;
5476                 }
5477             } else if (xop > 0x33 && xop < 0x3f) {
5478                 switch (xop) {
5479 #ifdef TARGET_SPARC64
5480                 case 0x34: /* V9 stfa */
5481                     if (gen_trap_ifnofpu(dc)) {
5482                         goto jmp_insn;
5483                     }
5484                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5485                     break;
5486                 case 0x36: /* V9 stqfa */
5487                     {
5488                         CHECK_FPU_FEATURE(dc, FLOAT128);
5489                         if (gen_trap_ifnofpu(dc)) {
5490                             goto jmp_insn;
5491                         }
5492                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5493                     }
5494                     break;
5495                 case 0x37: /* V9 stdfa */
5496                     if (gen_trap_ifnofpu(dc)) {
5497                         goto jmp_insn;
5498                     }
5499                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5500                     break;
5501                 case 0x3e: /* V9 casxa */
5502                     rs2 = GET_FIELD(insn, 27, 31);
5503                     cpu_src2 = gen_load_gpr(dc, rs2);
5504                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5505                     break;
5506 #else
5507                 case 0x34: /* stc */
5508                 case 0x35: /* stcsr */
5509                 case 0x36: /* stdcq */
5510                 case 0x37: /* stdc */
5511                     goto ncp_insn;
5512 #endif
5513 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5514                 case 0x3c: /* V9 or LEON3 casa */
5515 #ifndef TARGET_SPARC64
5516                     CHECK_IU_FEATURE(dc, CASA);
5517 #endif
5518                     rs2 = GET_FIELD(insn, 27, 31);
5519                     cpu_src2 = gen_load_gpr(dc, rs2);
5520                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5521                     break;
5522 #endif
5523                 default:
5524                     goto illegal_insn;
5525                 }
5526             } else {
5527                 goto illegal_insn;
5528             }
5529         }
5530         break;
5531     }
5532     advance_pc(dc);
5533  jmp_insn:
5534     return;
5535  illegal_insn:
5536     gen_exception(dc, TT_ILL_INSN);
5537     return;
5538 #if !defined(CONFIG_USER_ONLY)
5539  priv_insn:
5540     gen_exception(dc, TT_PRIV_INSN);
5541     return;
5542 #endif
5543  nfpu_insn:
5544     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5545     return;
5546 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5547  nfq_insn:
5548     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5549     return;
5550 #endif
5551 #ifndef TARGET_SPARC64
5552  ncp_insn:
5553     gen_exception(dc, TT_NCP_INSN);
5554     return;
5555 #endif
5556 }
5557 
5558 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5559 {
5560     DisasContext *dc = container_of(dcbase, DisasContext, base);
5561     CPUSPARCState *env = cpu_env(cs);
5562     int bound;
5563 
5564     dc->pc = dc->base.pc_first;
5565     dc->npc = (target_ulong)dc->base.tb->cs_base;
5566     dc->cc_op = CC_OP_DYNAMIC;
5567     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5568     dc->def = &env->def;
5569     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5570     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5571 #ifndef CONFIG_USER_ONLY
5572     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5573 #endif
5574 #ifdef TARGET_SPARC64
5575     dc->fprs_dirty = 0;
5576     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5577 #ifndef CONFIG_USER_ONLY
5578     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5579 #endif
5580 #endif
5581     /*
5582      * if we reach a page boundary, we stop generation so that the
5583      * PC of a TT_TFAULT exception is always in the right page
5584      */
5585     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5586     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5587 }
5588 
5589 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5590 {
5591 }
5592 
5593 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5594 {
5595     DisasContext *dc = container_of(dcbase, DisasContext, base);
5596     target_ulong npc = dc->npc;
5597 
5598     if (npc & 3) {
5599         switch (npc) {
5600         case JUMP_PC:
5601             assert(dc->jump_pc[1] == dc->pc + 4);
5602             npc = dc->jump_pc[0] | JUMP_PC;
5603             break;
5604         case DYNAMIC_PC:
5605         case DYNAMIC_PC_LOOKUP:
5606             npc = DYNAMIC_PC;
5607             break;
5608         default:
5609             g_assert_not_reached();
5610         }
5611     }
5612     tcg_gen_insn_start(dc->pc, npc);
5613 }
5614 
5615 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5616 {
5617     DisasContext *dc = container_of(dcbase, DisasContext, base);
5618     CPUSPARCState *env = cpu_env(cs);
5619     unsigned int insn;
5620 
5621     insn = translator_ldl(env, &dc->base, dc->pc);
5622     dc->base.pc_next += 4;
5623 
5624     if (!decode(dc, insn)) {
5625         disas_sparc_legacy(dc, insn);
5626     }
5627 
5628     if (dc->base.is_jmp == DISAS_NORETURN) {
5629         return;
5630     }
5631     if (dc->pc != dc->base.pc_next) {
5632         dc->base.is_jmp = DISAS_TOO_MANY;
5633     }
5634 }
5635 
5636 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5637 {
5638     DisasContext *dc = container_of(dcbase, DisasContext, base);
5639     DisasDelayException *e, *e_next;
5640     bool may_lookup;
5641 
5642     switch (dc->base.is_jmp) {
5643     case DISAS_NEXT:
5644     case DISAS_TOO_MANY:
5645         if (((dc->pc | dc->npc) & 3) == 0) {
5646             /* static PC and NPC: we can use direct chaining */
5647             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5648             break;
5649         }
5650 
5651         may_lookup = true;
5652         if (dc->pc & 3) {
5653             switch (dc->pc) {
5654             case DYNAMIC_PC_LOOKUP:
5655                 break;
5656             case DYNAMIC_PC:
5657                 may_lookup = false;
5658                 break;
5659             default:
5660                 g_assert_not_reached();
5661             }
5662         } else {
5663             tcg_gen_movi_tl(cpu_pc, dc->pc);
5664         }
5665 
5666         if (dc->npc & 3) {
5667             switch (dc->npc) {
5668             case JUMP_PC:
5669                 gen_generic_branch(dc);
5670                 break;
5671             case DYNAMIC_PC:
5672                 may_lookup = false;
5673                 break;
5674             case DYNAMIC_PC_LOOKUP:
5675                 break;
5676             default:
5677                 g_assert_not_reached();
5678             }
5679         } else {
5680             tcg_gen_movi_tl(cpu_npc, dc->npc);
5681         }
5682         if (may_lookup) {
5683             tcg_gen_lookup_and_goto_ptr();
5684         } else {
5685             tcg_gen_exit_tb(NULL, 0);
5686         }
5687         break;
5688 
5689     case DISAS_NORETURN:
5690        break;
5691 
5692     case DISAS_EXIT:
5693         /* Exit TB */
5694         save_state(dc);
5695         tcg_gen_exit_tb(NULL, 0);
5696         break;
5697 
5698     default:
5699         g_assert_not_reached();
5700     }
5701 
5702     for (e = dc->delay_excp_list; e ; e = e_next) {
5703         gen_set_label(e->lab);
5704 
5705         tcg_gen_movi_tl(cpu_pc, e->pc);
5706         if (e->npc % 4 == 0) {
5707             tcg_gen_movi_tl(cpu_npc, e->npc);
5708         }
5709         gen_helper_raise_exception(tcg_env, e->excp);
5710 
5711         e_next = e->next;
5712         g_free(e);
5713     }
5714 }
5715 
5716 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5717                                CPUState *cpu, FILE *logfile)
5718 {
5719     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5720     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5721 }
5722 
5723 static const TranslatorOps sparc_tr_ops = {
5724     .init_disas_context = sparc_tr_init_disas_context,
5725     .tb_start           = sparc_tr_tb_start,
5726     .insn_start         = sparc_tr_insn_start,
5727     .translate_insn     = sparc_tr_translate_insn,
5728     .tb_stop            = sparc_tr_tb_stop,
5729     .disas_log          = sparc_tr_disas_log,
5730 };
5731 
5732 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5733                            target_ulong pc, void *host_pc)
5734 {
5735     DisasContext dc = {};
5736 
5737     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5738 }
5739 
5740 void sparc_tcg_init(void)
5741 {
5742     static const char gregnames[32][4] = {
5743         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5744         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5745         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5746         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5747     };
5748     static const char fregnames[32][4] = {
5749         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5750         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5751         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5752         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5753     };
5754 
5755     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5756 #ifdef TARGET_SPARC64
5757         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5758         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5759 #else
5760         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5761 #endif
5762         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5763         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5764     };
5765 
5766     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5767 #ifdef TARGET_SPARC64
5768         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5769         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5770         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5771         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5772           "hstick_cmpr" },
5773         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5774         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5775         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5776         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5777         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5778 #endif
5779         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5780         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5781         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5782         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5783         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5784         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5785         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5786         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5787 #ifndef CONFIG_USER_ONLY
5788         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5789 #endif
5790     };
5791 
5792     unsigned int i;
5793 
5794     cpu_regwptr = tcg_global_mem_new_ptr(tcg_env,
5795                                          offsetof(CPUSPARCState, regwptr),
5796                                          "regwptr");
5797 
5798     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5799         *r32[i].ptr = tcg_global_mem_new_i32(tcg_env, r32[i].off, r32[i].name);
5800     }
5801 
5802     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5803         *rtl[i].ptr = tcg_global_mem_new(tcg_env, rtl[i].off, rtl[i].name);
5804     }
5805 
5806     cpu_regs[0] = NULL;
5807     for (i = 1; i < 8; ++i) {
5808         cpu_regs[i] = tcg_global_mem_new(tcg_env,
5809                                          offsetof(CPUSPARCState, gregs[i]),
5810                                          gregnames[i]);
5811     }
5812 
5813     for (i = 8; i < 32; ++i) {
5814         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5815                                          (i - 8) * sizeof(target_ulong),
5816                                          gregnames[i]);
5817     }
5818 
5819     for (i = 0; i < TARGET_DPREGS; i++) {
5820         cpu_fpr[i] = tcg_global_mem_new_i64(tcg_env,
5821                                             offsetof(CPUSPARCState, fpr[i]),
5822                                             fregnames[i]);
5823     }
5824 }
5825 
5826 void sparc_restore_state_to_opc(CPUState *cs,
5827                                 const TranslationBlock *tb,
5828                                 const uint64_t *data)
5829 {
5830     SPARCCPU *cpu = SPARC_CPU(cs);
5831     CPUSPARCState *env = &cpu->env;
5832     target_ulong pc = data[0];
5833     target_ulong npc = data[1];
5834 
5835     env->pc = pc;
5836     if (npc == DYNAMIC_PC) {
5837         /* dynamic NPC: already stored */
5838     } else if (npc & JUMP_PC) {
5839         /* jump PC: use 'cond' and the jump targets of the translation */
5840         if (env->cond) {
5841             env->npc = npc & ~3;
5842         } else {
5843             env->npc = pc + 4;
5844         }
5845     } else {
5846         env->npc = npc;
5847     }
5848 }
5849