xref: /openbmc/qemu/target/sparc/translate.c (revision 6d2a0768426554ae5bcaca2445d0c9c53335809a)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 
29 #include "exec/helper-gen.h"
30 
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 #include "asi.h"
34 
35 #define HELPER_H "helper.h"
36 #include "exec/helper-info.c.inc"
37 #undef  HELPER_H
38 
39 /* Dynamic PC, must exit to main loop. */
40 #define DYNAMIC_PC         1
41 /* Dynamic PC, one of two values according to jump_pc[T2]. */
42 #define JUMP_PC            2
43 /* Dynamic PC, may lookup next TB. */
44 #define DYNAMIC_PC_LOOKUP  3
45 
46 #define DISAS_EXIT  DISAS_TARGET_0
47 
48 /* global register indexes */
49 static TCGv_ptr cpu_regwptr;
50 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
51 static TCGv_i32 cpu_cc_op;
52 static TCGv_i32 cpu_psr;
53 static TCGv cpu_fsr, cpu_pc, cpu_npc;
54 static TCGv cpu_regs[32];
55 static TCGv cpu_y;
56 #ifndef CONFIG_USER_ONLY
57 static TCGv cpu_tbr;
58 #endif
59 static TCGv cpu_cond;
60 #ifdef TARGET_SPARC64
61 static TCGv_i32 cpu_xcc, cpu_fprs;
62 static TCGv cpu_gsr;
63 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
64 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
65 #else
66 static TCGv cpu_wim;
67 #endif
68 /* Floating point registers */
69 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
70 
71 typedef struct DisasDelayException {
72     struct DisasDelayException *next;
73     TCGLabel *lab;
74     TCGv_i32 excp;
75     /* Saved state at parent insn. */
76     target_ulong pc;
77     target_ulong npc;
78 } DisasDelayException;
79 
80 typedef struct DisasContext {
81     DisasContextBase base;
82     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
83     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
84     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
85     int mem_idx;
86     bool fpu_enabled;
87     bool address_mask_32bit;
88 #ifndef CONFIG_USER_ONLY
89     bool supervisor;
90 #ifdef TARGET_SPARC64
91     bool hypervisor;
92 #endif
93 #endif
94 
95     uint32_t cc_op;  /* current CC operation */
96     sparc_def_t *def;
97 #ifdef TARGET_SPARC64
98     int fprs_dirty;
99     int asi;
100 #endif
101     DisasDelayException *delay_excp_list;
102 } DisasContext;
103 
104 typedef struct {
105     TCGCond cond;
106     bool is_bool;
107     TCGv c1, c2;
108 } DisasCompare;
109 
110 // This function uses non-native bit order
111 #define GET_FIELD(X, FROM, TO)                                  \
112     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
113 
114 // This function uses the order in the manuals, i.e. bit 0 is 2^0
115 #define GET_FIELD_SP(X, FROM, TO)               \
116     GET_FIELD(X, 31 - (TO), 31 - (FROM))
117 
118 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
119 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
120 
121 #ifdef TARGET_SPARC64
122 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
123 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
124 #else
125 #define DFPREG(r) (r & 0x1e)
126 #define QFPREG(r) (r & 0x1c)
127 #endif
128 
129 #define UA2005_HTRAP_MASK 0xff
130 #define V8_TRAP_MASK 0x7f
131 
132 static int sign_extend(int x, int len)
133 {
134     len = 32 - len;
135     return (x << len) >> len;
136 }
137 
138 #define IS_IMM (insn & (1<<13))
139 
140 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     int bit = (rd < 32) ? 1 : 2;
144     /* If we know we've already set this bit within the TB,
145        we can avoid setting it again.  */
146     if (!(dc->fprs_dirty & bit)) {
147         dc->fprs_dirty |= bit;
148         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
149     }
150 #endif
151 }
152 
153 /* floating point registers moves */
154 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
155 {
156     TCGv_i32 ret = tcg_temp_new_i32();
157     if (src & 1) {
158         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
159     } else {
160         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
161     }
162     return ret;
163 }
164 
165 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
166 {
167     TCGv_i64 t = tcg_temp_new_i64();
168 
169     tcg_gen_extu_i32_i64(t, v);
170     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
171                         (dst & 1 ? 0 : 32), 32);
172     gen_update_fprs_dirty(dc, dst);
173 }
174 
175 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
176 {
177     return tcg_temp_new_i32();
178 }
179 
180 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
181 {
182     src = DFPREG(src);
183     return cpu_fpr[src / 2];
184 }
185 
186 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
187 {
188     dst = DFPREG(dst);
189     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
190     gen_update_fprs_dirty(dc, dst);
191 }
192 
193 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
194 {
195     return cpu_fpr[DFPREG(dst) / 2];
196 }
197 
198 static void gen_op_load_fpr_QT0(unsigned int src)
199 {
200     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
201                    offsetof(CPU_QuadU, ll.upper));
202     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.lower));
204 }
205 
206 static void gen_op_load_fpr_QT1(unsigned int src)
207 {
208     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt1) +
209                    offsetof(CPU_QuadU, ll.upper));
210     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.lower));
212 }
213 
214 static void gen_op_store_QT0_fpr(unsigned int dst)
215 {
216     tcg_gen_ld_i64(cpu_fpr[dst / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221 
222 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
223                             TCGv_i64 v1, TCGv_i64 v2)
224 {
225     dst = QFPREG(dst);
226 
227     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
228     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
229     gen_update_fprs_dirty(dc, dst);
230 }
231 
232 #ifdef TARGET_SPARC64
233 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
234 {
235     src = QFPREG(src);
236     return cpu_fpr[src / 2];
237 }
238 
239 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
240 {
241     src = QFPREG(src);
242     return cpu_fpr[src / 2 + 1];
243 }
244 
245 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
246 {
247     rd = QFPREG(rd);
248     rs = QFPREG(rs);
249 
250     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
251     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
252     gen_update_fprs_dirty(dc, rd);
253 }
254 #endif
255 
256 /* moves */
257 #ifdef CONFIG_USER_ONLY
258 #define supervisor(dc) 0
259 #ifdef TARGET_SPARC64
260 #define hypervisor(dc) 0
261 #endif
262 #else
263 #ifdef TARGET_SPARC64
264 #define hypervisor(dc) (dc->hypervisor)
265 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
266 #else
267 #define supervisor(dc) (dc->supervisor)
268 #endif
269 #endif
270 
271 #if !defined(TARGET_SPARC64)
272 # define AM_CHECK(dc)  false
273 #elif defined(TARGET_ABI32)
274 # define AM_CHECK(dc)  true
275 #elif defined(CONFIG_USER_ONLY)
276 # define AM_CHECK(dc)  false
277 #else
278 # define AM_CHECK(dc)  ((dc)->address_mask_32bit)
279 #endif
280 
281 static void gen_address_mask(DisasContext *dc, TCGv addr)
282 {
283     if (AM_CHECK(dc)) {
284         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
285     }
286 }
287 
288 static target_ulong address_mask_i(DisasContext *dc, target_ulong addr)
289 {
290     return AM_CHECK(dc) ? (uint32_t)addr : addr;
291 }
292 
293 static TCGv gen_load_gpr(DisasContext *dc, int reg)
294 {
295     if (reg > 0) {
296         assert(reg < 32);
297         return cpu_regs[reg];
298     } else {
299         TCGv t = tcg_temp_new();
300         tcg_gen_movi_tl(t, 0);
301         return t;
302     }
303 }
304 
305 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
306 {
307     if (reg > 0) {
308         assert(reg < 32);
309         tcg_gen_mov_tl(cpu_regs[reg], v);
310     }
311 }
312 
313 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
314 {
315     if (reg > 0) {
316         assert(reg < 32);
317         return cpu_regs[reg];
318     } else {
319         return tcg_temp_new();
320     }
321 }
322 
323 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
324 {
325     return translator_use_goto_tb(&s->base, pc) &&
326            translator_use_goto_tb(&s->base, npc);
327 }
328 
329 static void gen_goto_tb(DisasContext *s, int tb_num,
330                         target_ulong pc, target_ulong npc)
331 {
332     if (use_goto_tb(s, pc, npc))  {
333         /* jump to same page: we can use a direct jump */
334         tcg_gen_goto_tb(tb_num);
335         tcg_gen_movi_tl(cpu_pc, pc);
336         tcg_gen_movi_tl(cpu_npc, npc);
337         tcg_gen_exit_tb(s->base.tb, tb_num);
338     } else {
339         /* jump to another page: we can use an indirect jump */
340         tcg_gen_movi_tl(cpu_pc, pc);
341         tcg_gen_movi_tl(cpu_npc, npc);
342         tcg_gen_lookup_and_goto_ptr();
343     }
344 }
345 
346 // XXX suboptimal
347 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
348 {
349     tcg_gen_extu_i32_tl(reg, src);
350     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
351 }
352 
353 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
354 {
355     tcg_gen_extu_i32_tl(reg, src);
356     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
357 }
358 
359 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
360 {
361     tcg_gen_extu_i32_tl(reg, src);
362     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
363 }
364 
365 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
366 {
367     tcg_gen_extu_i32_tl(reg, src);
368     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
369 }
370 
371 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
372 {
373     tcg_gen_mov_tl(cpu_cc_src, src1);
374     tcg_gen_mov_tl(cpu_cc_src2, src2);
375     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
376     tcg_gen_mov_tl(dst, cpu_cc_dst);
377 }
378 
379 static TCGv_i32 gen_add32_carry32(void)
380 {
381     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
382 
383     /* Carry is computed from a previous add: (dst < src)  */
384 #if TARGET_LONG_BITS == 64
385     cc_src1_32 = tcg_temp_new_i32();
386     cc_src2_32 = tcg_temp_new_i32();
387     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
388     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
389 #else
390     cc_src1_32 = cpu_cc_dst;
391     cc_src2_32 = cpu_cc_src;
392 #endif
393 
394     carry_32 = tcg_temp_new_i32();
395     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
396 
397     return carry_32;
398 }
399 
400 static TCGv_i32 gen_sub32_carry32(void)
401 {
402     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
403 
404     /* Carry is computed from a previous borrow: (src1 < src2)  */
405 #if TARGET_LONG_BITS == 64
406     cc_src1_32 = tcg_temp_new_i32();
407     cc_src2_32 = tcg_temp_new_i32();
408     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
409     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
410 #else
411     cc_src1_32 = cpu_cc_src;
412     cc_src2_32 = cpu_cc_src2;
413 #endif
414 
415     carry_32 = tcg_temp_new_i32();
416     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
417 
418     return carry_32;
419 }
420 
421 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
422                             TCGv src2, int update_cc)
423 {
424     TCGv_i32 carry_32;
425     TCGv carry;
426 
427     switch (dc->cc_op) {
428     case CC_OP_DIV:
429     case CC_OP_LOGIC:
430         /* Carry is known to be zero.  Fall back to plain ADD.  */
431         if (update_cc) {
432             gen_op_add_cc(dst, src1, src2);
433         } else {
434             tcg_gen_add_tl(dst, src1, src2);
435         }
436         return;
437 
438     case CC_OP_ADD:
439     case CC_OP_TADD:
440     case CC_OP_TADDTV:
441         if (TARGET_LONG_BITS == 32) {
442             /* We can re-use the host's hardware carry generation by using
443                an ADD2 opcode.  We discard the low part of the output.
444                Ideally we'd combine this operation with the add that
445                generated the carry in the first place.  */
446             carry = tcg_temp_new();
447             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
448             goto add_done;
449         }
450         carry_32 = gen_add32_carry32();
451         break;
452 
453     case CC_OP_SUB:
454     case CC_OP_TSUB:
455     case CC_OP_TSUBTV:
456         carry_32 = gen_sub32_carry32();
457         break;
458 
459     default:
460         /* We need external help to produce the carry.  */
461         carry_32 = tcg_temp_new_i32();
462         gen_helper_compute_C_icc(carry_32, tcg_env);
463         break;
464     }
465 
466 #if TARGET_LONG_BITS == 64
467     carry = tcg_temp_new();
468     tcg_gen_extu_i32_i64(carry, carry_32);
469 #else
470     carry = carry_32;
471 #endif
472 
473     tcg_gen_add_tl(dst, src1, src2);
474     tcg_gen_add_tl(dst, dst, carry);
475 
476  add_done:
477     if (update_cc) {
478         tcg_gen_mov_tl(cpu_cc_src, src1);
479         tcg_gen_mov_tl(cpu_cc_src2, src2);
480         tcg_gen_mov_tl(cpu_cc_dst, dst);
481         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
482         dc->cc_op = CC_OP_ADDX;
483     }
484 }
485 
486 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
487 {
488     tcg_gen_mov_tl(cpu_cc_src, src1);
489     tcg_gen_mov_tl(cpu_cc_src2, src2);
490     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
491     tcg_gen_mov_tl(dst, cpu_cc_dst);
492 }
493 
494 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
495                             TCGv src2, int update_cc)
496 {
497     TCGv_i32 carry_32;
498     TCGv carry;
499 
500     switch (dc->cc_op) {
501     case CC_OP_DIV:
502     case CC_OP_LOGIC:
503         /* Carry is known to be zero.  Fall back to plain SUB.  */
504         if (update_cc) {
505             gen_op_sub_cc(dst, src1, src2);
506         } else {
507             tcg_gen_sub_tl(dst, src1, src2);
508         }
509         return;
510 
511     case CC_OP_ADD:
512     case CC_OP_TADD:
513     case CC_OP_TADDTV:
514         carry_32 = gen_add32_carry32();
515         break;
516 
517     case CC_OP_SUB:
518     case CC_OP_TSUB:
519     case CC_OP_TSUBTV:
520         if (TARGET_LONG_BITS == 32) {
521             /* We can re-use the host's hardware carry generation by using
522                a SUB2 opcode.  We discard the low part of the output.
523                Ideally we'd combine this operation with the add that
524                generated the carry in the first place.  */
525             carry = tcg_temp_new();
526             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
527             goto sub_done;
528         }
529         carry_32 = gen_sub32_carry32();
530         break;
531 
532     default:
533         /* We need external help to produce the carry.  */
534         carry_32 = tcg_temp_new_i32();
535         gen_helper_compute_C_icc(carry_32, tcg_env);
536         break;
537     }
538 
539 #if TARGET_LONG_BITS == 64
540     carry = tcg_temp_new();
541     tcg_gen_extu_i32_i64(carry, carry_32);
542 #else
543     carry = carry_32;
544 #endif
545 
546     tcg_gen_sub_tl(dst, src1, src2);
547     tcg_gen_sub_tl(dst, dst, carry);
548 
549  sub_done:
550     if (update_cc) {
551         tcg_gen_mov_tl(cpu_cc_src, src1);
552         tcg_gen_mov_tl(cpu_cc_src2, src2);
553         tcg_gen_mov_tl(cpu_cc_dst, dst);
554         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
555         dc->cc_op = CC_OP_SUBX;
556     }
557 }
558 
559 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
560 {
561     TCGv r_temp, zero, t0;
562 
563     r_temp = tcg_temp_new();
564     t0 = tcg_temp_new();
565 
566     /* old op:
567     if (!(env->y & 1))
568         T1 = 0;
569     */
570     zero = tcg_constant_tl(0);
571     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
572     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
573     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
574     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
575                        zero, cpu_cc_src2);
576 
577     // b2 = T0 & 1;
578     // env->y = (b2 << 31) | (env->y >> 1);
579     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
580     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
581 
582     // b1 = N ^ V;
583     gen_mov_reg_N(t0, cpu_psr);
584     gen_mov_reg_V(r_temp, cpu_psr);
585     tcg_gen_xor_tl(t0, t0, r_temp);
586 
587     // T0 = (b1 << 31) | (T0 >> 1);
588     // src1 = T0;
589     tcg_gen_shli_tl(t0, t0, 31);
590     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
591     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
592 
593     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
594 
595     tcg_gen_mov_tl(dst, cpu_cc_dst);
596 }
597 
598 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
599 {
600 #if TARGET_LONG_BITS == 32
601     if (sign_ext) {
602         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
603     } else {
604         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
605     }
606 #else
607     TCGv t0 = tcg_temp_new_i64();
608     TCGv t1 = tcg_temp_new_i64();
609 
610     if (sign_ext) {
611         tcg_gen_ext32s_i64(t0, src1);
612         tcg_gen_ext32s_i64(t1, src2);
613     } else {
614         tcg_gen_ext32u_i64(t0, src1);
615         tcg_gen_ext32u_i64(t1, src2);
616     }
617 
618     tcg_gen_mul_i64(dst, t0, t1);
619     tcg_gen_shri_i64(cpu_y, dst, 32);
620 #endif
621 }
622 
623 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
624 {
625     /* zero-extend truncated operands before multiplication */
626     gen_op_multiply(dst, src1, src2, 0);
627 }
628 
629 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
630 {
631     /* sign-extend truncated operands before multiplication */
632     gen_op_multiply(dst, src1, src2, 1);
633 }
634 
635 // 1
636 static void gen_op_eval_ba(TCGv dst)
637 {
638     tcg_gen_movi_tl(dst, 1);
639 }
640 
641 // Z
642 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
643 {
644     gen_mov_reg_Z(dst, src);
645 }
646 
647 // Z | (N ^ V)
648 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
649 {
650     TCGv t0 = tcg_temp_new();
651     gen_mov_reg_N(t0, src);
652     gen_mov_reg_V(dst, src);
653     tcg_gen_xor_tl(dst, dst, t0);
654     gen_mov_reg_Z(t0, src);
655     tcg_gen_or_tl(dst, dst, t0);
656 }
657 
658 // N ^ V
659 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
660 {
661     TCGv t0 = tcg_temp_new();
662     gen_mov_reg_V(t0, src);
663     gen_mov_reg_N(dst, src);
664     tcg_gen_xor_tl(dst, dst, t0);
665 }
666 
667 // C | Z
668 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
669 {
670     TCGv t0 = tcg_temp_new();
671     gen_mov_reg_Z(t0, src);
672     gen_mov_reg_C(dst, src);
673     tcg_gen_or_tl(dst, dst, t0);
674 }
675 
676 // C
677 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
678 {
679     gen_mov_reg_C(dst, src);
680 }
681 
682 // V
683 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
684 {
685     gen_mov_reg_V(dst, src);
686 }
687 
688 // 0
689 static void gen_op_eval_bn(TCGv dst)
690 {
691     tcg_gen_movi_tl(dst, 0);
692 }
693 
694 // N
695 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
696 {
697     gen_mov_reg_N(dst, src);
698 }
699 
700 // !Z
701 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
702 {
703     gen_mov_reg_Z(dst, src);
704     tcg_gen_xori_tl(dst, dst, 0x1);
705 }
706 
707 // !(Z | (N ^ V))
708 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
709 {
710     gen_op_eval_ble(dst, src);
711     tcg_gen_xori_tl(dst, dst, 0x1);
712 }
713 
714 // !(N ^ V)
715 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
716 {
717     gen_op_eval_bl(dst, src);
718     tcg_gen_xori_tl(dst, dst, 0x1);
719 }
720 
721 // !(C | Z)
722 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
723 {
724     gen_op_eval_bleu(dst, src);
725     tcg_gen_xori_tl(dst, dst, 0x1);
726 }
727 
728 // !C
729 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
730 {
731     gen_mov_reg_C(dst, src);
732     tcg_gen_xori_tl(dst, dst, 0x1);
733 }
734 
735 // !N
736 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
737 {
738     gen_mov_reg_N(dst, src);
739     tcg_gen_xori_tl(dst, dst, 0x1);
740 }
741 
742 // !V
743 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
744 {
745     gen_mov_reg_V(dst, src);
746     tcg_gen_xori_tl(dst, dst, 0x1);
747 }
748 
749 /*
750   FPSR bit field FCC1 | FCC0:
751    0 =
752    1 <
753    2 >
754    3 unordered
755 */
756 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
757                                     unsigned int fcc_offset)
758 {
759     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
760     tcg_gen_andi_tl(reg, reg, 0x1);
761 }
762 
763 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
764 {
765     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
766     tcg_gen_andi_tl(reg, reg, 0x1);
767 }
768 
769 // !0: FCC0 | FCC1
770 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
771 {
772     TCGv t0 = tcg_temp_new();
773     gen_mov_reg_FCC0(dst, src, fcc_offset);
774     gen_mov_reg_FCC1(t0, src, fcc_offset);
775     tcg_gen_or_tl(dst, dst, t0);
776 }
777 
778 // 1 or 2: FCC0 ^ FCC1
779 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
780 {
781     TCGv t0 = tcg_temp_new();
782     gen_mov_reg_FCC0(dst, src, fcc_offset);
783     gen_mov_reg_FCC1(t0, src, fcc_offset);
784     tcg_gen_xor_tl(dst, dst, t0);
785 }
786 
787 // 1 or 3: FCC0
788 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
789 {
790     gen_mov_reg_FCC0(dst, src, fcc_offset);
791 }
792 
793 // 1: FCC0 & !FCC1
794 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
795 {
796     TCGv t0 = tcg_temp_new();
797     gen_mov_reg_FCC0(dst, src, fcc_offset);
798     gen_mov_reg_FCC1(t0, src, fcc_offset);
799     tcg_gen_andc_tl(dst, dst, t0);
800 }
801 
802 // 2 or 3: FCC1
803 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
804 {
805     gen_mov_reg_FCC1(dst, src, fcc_offset);
806 }
807 
808 // 2: !FCC0 & FCC1
809 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
810 {
811     TCGv t0 = tcg_temp_new();
812     gen_mov_reg_FCC0(dst, src, fcc_offset);
813     gen_mov_reg_FCC1(t0, src, fcc_offset);
814     tcg_gen_andc_tl(dst, t0, dst);
815 }
816 
817 // 3: FCC0 & FCC1
818 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
819 {
820     TCGv t0 = tcg_temp_new();
821     gen_mov_reg_FCC0(dst, src, fcc_offset);
822     gen_mov_reg_FCC1(t0, src, fcc_offset);
823     tcg_gen_and_tl(dst, dst, t0);
824 }
825 
826 // 0: !(FCC0 | FCC1)
827 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
828 {
829     TCGv t0 = tcg_temp_new();
830     gen_mov_reg_FCC0(dst, src, fcc_offset);
831     gen_mov_reg_FCC1(t0, src, fcc_offset);
832     tcg_gen_or_tl(dst, dst, t0);
833     tcg_gen_xori_tl(dst, dst, 0x1);
834 }
835 
836 // 0 or 3: !(FCC0 ^ FCC1)
837 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
838 {
839     TCGv t0 = tcg_temp_new();
840     gen_mov_reg_FCC0(dst, src, fcc_offset);
841     gen_mov_reg_FCC1(t0, src, fcc_offset);
842     tcg_gen_xor_tl(dst, dst, t0);
843     tcg_gen_xori_tl(dst, dst, 0x1);
844 }
845 
846 // 0 or 2: !FCC0
847 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
848 {
849     gen_mov_reg_FCC0(dst, src, fcc_offset);
850     tcg_gen_xori_tl(dst, dst, 0x1);
851 }
852 
853 // !1: !(FCC0 & !FCC1)
854 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
855 {
856     TCGv t0 = tcg_temp_new();
857     gen_mov_reg_FCC0(dst, src, fcc_offset);
858     gen_mov_reg_FCC1(t0, src, fcc_offset);
859     tcg_gen_andc_tl(dst, dst, t0);
860     tcg_gen_xori_tl(dst, dst, 0x1);
861 }
862 
863 // 0 or 1: !FCC1
864 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
865 {
866     gen_mov_reg_FCC1(dst, src, fcc_offset);
867     tcg_gen_xori_tl(dst, dst, 0x1);
868 }
869 
870 // !2: !(!FCC0 & FCC1)
871 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
872 {
873     TCGv t0 = tcg_temp_new();
874     gen_mov_reg_FCC0(dst, src, fcc_offset);
875     gen_mov_reg_FCC1(t0, src, fcc_offset);
876     tcg_gen_andc_tl(dst, t0, dst);
877     tcg_gen_xori_tl(dst, dst, 0x1);
878 }
879 
880 // !3: !(FCC0 & FCC1)
881 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
882 {
883     TCGv t0 = tcg_temp_new();
884     gen_mov_reg_FCC0(dst, src, fcc_offset);
885     gen_mov_reg_FCC1(t0, src, fcc_offset);
886     tcg_gen_and_tl(dst, dst, t0);
887     tcg_gen_xori_tl(dst, dst, 0x1);
888 }
889 
890 static void gen_branch2(DisasContext *dc, target_ulong pc1,
891                         target_ulong pc2, TCGv r_cond)
892 {
893     TCGLabel *l1 = gen_new_label();
894 
895     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
896 
897     gen_goto_tb(dc, 0, pc1, pc1 + 4);
898 
899     gen_set_label(l1);
900     gen_goto_tb(dc, 1, pc2, pc2 + 4);
901 }
902 
903 static void gen_generic_branch(DisasContext *dc)
904 {
905     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
906     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
907     TCGv zero = tcg_constant_tl(0);
908 
909     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
910 }
911 
912 /* call this function before using the condition register as it may
913    have been set for a jump */
914 static void flush_cond(DisasContext *dc)
915 {
916     if (dc->npc == JUMP_PC) {
917         gen_generic_branch(dc);
918         dc->npc = DYNAMIC_PC_LOOKUP;
919     }
920 }
921 
922 static void save_npc(DisasContext *dc)
923 {
924     if (dc->npc & 3) {
925         switch (dc->npc) {
926         case JUMP_PC:
927             gen_generic_branch(dc);
928             dc->npc = DYNAMIC_PC_LOOKUP;
929             break;
930         case DYNAMIC_PC:
931         case DYNAMIC_PC_LOOKUP:
932             break;
933         default:
934             g_assert_not_reached();
935         }
936     } else {
937         tcg_gen_movi_tl(cpu_npc, dc->npc);
938     }
939 }
940 
941 static void update_psr(DisasContext *dc)
942 {
943     if (dc->cc_op != CC_OP_FLAGS) {
944         dc->cc_op = CC_OP_FLAGS;
945         gen_helper_compute_psr(tcg_env);
946     }
947 }
948 
949 static void save_state(DisasContext *dc)
950 {
951     tcg_gen_movi_tl(cpu_pc, dc->pc);
952     save_npc(dc);
953 }
954 
955 static void gen_exception(DisasContext *dc, int which)
956 {
957     save_state(dc);
958     gen_helper_raise_exception(tcg_env, tcg_constant_i32(which));
959     dc->base.is_jmp = DISAS_NORETURN;
960 }
961 
962 static TCGLabel *delay_exceptionv(DisasContext *dc, TCGv_i32 excp)
963 {
964     DisasDelayException *e = g_new0(DisasDelayException, 1);
965 
966     e->next = dc->delay_excp_list;
967     dc->delay_excp_list = e;
968 
969     e->lab = gen_new_label();
970     e->excp = excp;
971     e->pc = dc->pc;
972     /* Caller must have used flush_cond before branch. */
973     assert(e->npc != JUMP_PC);
974     e->npc = dc->npc;
975 
976     return e->lab;
977 }
978 
979 static TCGLabel *delay_exception(DisasContext *dc, int excp)
980 {
981     return delay_exceptionv(dc, tcg_constant_i32(excp));
982 }
983 
984 static void gen_check_align(DisasContext *dc, TCGv addr, int mask)
985 {
986     TCGv t = tcg_temp_new();
987     TCGLabel *lab;
988 
989     tcg_gen_andi_tl(t, addr, mask);
990 
991     flush_cond(dc);
992     lab = delay_exception(dc, TT_UNALIGNED);
993     tcg_gen_brcondi_tl(TCG_COND_NE, t, 0, lab);
994 }
995 
996 static void gen_mov_pc_npc(DisasContext *dc)
997 {
998     if (dc->npc & 3) {
999         switch (dc->npc) {
1000         case JUMP_PC:
1001             gen_generic_branch(dc);
1002             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1003             dc->pc = DYNAMIC_PC_LOOKUP;
1004             break;
1005         case DYNAMIC_PC:
1006         case DYNAMIC_PC_LOOKUP:
1007             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1008             dc->pc = dc->npc;
1009             break;
1010         default:
1011             g_assert_not_reached();
1012         }
1013     } else {
1014         dc->pc = dc->npc;
1015     }
1016 }
1017 
1018 static void gen_op_next_insn(void)
1019 {
1020     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1021     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1022 }
1023 
1024 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1025                         DisasContext *dc)
1026 {
1027     static int subcc_cond[16] = {
1028         TCG_COND_NEVER,
1029         TCG_COND_EQ,
1030         TCG_COND_LE,
1031         TCG_COND_LT,
1032         TCG_COND_LEU,
1033         TCG_COND_LTU,
1034         -1, /* neg */
1035         -1, /* overflow */
1036         TCG_COND_ALWAYS,
1037         TCG_COND_NE,
1038         TCG_COND_GT,
1039         TCG_COND_GE,
1040         TCG_COND_GTU,
1041         TCG_COND_GEU,
1042         -1, /* pos */
1043         -1, /* no overflow */
1044     };
1045 
1046     static int logic_cond[16] = {
1047         TCG_COND_NEVER,
1048         TCG_COND_EQ,     /* eq:  Z */
1049         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1050         TCG_COND_LT,     /* lt:  N ^ V -> N */
1051         TCG_COND_EQ,     /* leu: C | Z -> Z */
1052         TCG_COND_NEVER,  /* ltu: C -> 0 */
1053         TCG_COND_LT,     /* neg: N */
1054         TCG_COND_NEVER,  /* vs:  V -> 0 */
1055         TCG_COND_ALWAYS,
1056         TCG_COND_NE,     /* ne:  !Z */
1057         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1058         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1059         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1060         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1061         TCG_COND_GE,     /* pos: !N */
1062         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1063     };
1064 
1065     TCGv_i32 r_src;
1066     TCGv r_dst;
1067 
1068 #ifdef TARGET_SPARC64
1069     if (xcc) {
1070         r_src = cpu_xcc;
1071     } else {
1072         r_src = cpu_psr;
1073     }
1074 #else
1075     r_src = cpu_psr;
1076 #endif
1077 
1078     switch (dc->cc_op) {
1079     case CC_OP_LOGIC:
1080         cmp->cond = logic_cond[cond];
1081     do_compare_dst_0:
1082         cmp->is_bool = false;
1083         cmp->c2 = tcg_constant_tl(0);
1084 #ifdef TARGET_SPARC64
1085         if (!xcc) {
1086             cmp->c1 = tcg_temp_new();
1087             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1088             break;
1089         }
1090 #endif
1091         cmp->c1 = cpu_cc_dst;
1092         break;
1093 
1094     case CC_OP_SUB:
1095         switch (cond) {
1096         case 6:  /* neg */
1097         case 14: /* pos */
1098             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1099             goto do_compare_dst_0;
1100 
1101         case 7: /* overflow */
1102         case 15: /* !overflow */
1103             goto do_dynamic;
1104 
1105         default:
1106             cmp->cond = subcc_cond[cond];
1107             cmp->is_bool = false;
1108 #ifdef TARGET_SPARC64
1109             if (!xcc) {
1110                 /* Note that sign-extension works for unsigned compares as
1111                    long as both operands are sign-extended.  */
1112                 cmp->c1 = tcg_temp_new();
1113                 cmp->c2 = tcg_temp_new();
1114                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1115                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1116                 break;
1117             }
1118 #endif
1119             cmp->c1 = cpu_cc_src;
1120             cmp->c2 = cpu_cc_src2;
1121             break;
1122         }
1123         break;
1124 
1125     default:
1126     do_dynamic:
1127         gen_helper_compute_psr(tcg_env);
1128         dc->cc_op = CC_OP_FLAGS;
1129         /* FALLTHRU */
1130 
1131     case CC_OP_FLAGS:
1132         /* We're going to generate a boolean result.  */
1133         cmp->cond = TCG_COND_NE;
1134         cmp->is_bool = true;
1135         cmp->c1 = r_dst = tcg_temp_new();
1136         cmp->c2 = tcg_constant_tl(0);
1137 
1138         switch (cond) {
1139         case 0x0:
1140             gen_op_eval_bn(r_dst);
1141             break;
1142         case 0x1:
1143             gen_op_eval_be(r_dst, r_src);
1144             break;
1145         case 0x2:
1146             gen_op_eval_ble(r_dst, r_src);
1147             break;
1148         case 0x3:
1149             gen_op_eval_bl(r_dst, r_src);
1150             break;
1151         case 0x4:
1152             gen_op_eval_bleu(r_dst, r_src);
1153             break;
1154         case 0x5:
1155             gen_op_eval_bcs(r_dst, r_src);
1156             break;
1157         case 0x6:
1158             gen_op_eval_bneg(r_dst, r_src);
1159             break;
1160         case 0x7:
1161             gen_op_eval_bvs(r_dst, r_src);
1162             break;
1163         case 0x8:
1164             gen_op_eval_ba(r_dst);
1165             break;
1166         case 0x9:
1167             gen_op_eval_bne(r_dst, r_src);
1168             break;
1169         case 0xa:
1170             gen_op_eval_bg(r_dst, r_src);
1171             break;
1172         case 0xb:
1173             gen_op_eval_bge(r_dst, r_src);
1174             break;
1175         case 0xc:
1176             gen_op_eval_bgu(r_dst, r_src);
1177             break;
1178         case 0xd:
1179             gen_op_eval_bcc(r_dst, r_src);
1180             break;
1181         case 0xe:
1182             gen_op_eval_bpos(r_dst, r_src);
1183             break;
1184         case 0xf:
1185             gen_op_eval_bvc(r_dst, r_src);
1186             break;
1187         }
1188         break;
1189     }
1190 }
1191 
1192 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1193 {
1194     unsigned int offset;
1195     TCGv r_dst;
1196 
1197     /* For now we still generate a straight boolean result.  */
1198     cmp->cond = TCG_COND_NE;
1199     cmp->is_bool = true;
1200     cmp->c1 = r_dst = tcg_temp_new();
1201     cmp->c2 = tcg_constant_tl(0);
1202 
1203     switch (cc) {
1204     default:
1205     case 0x0:
1206         offset = 0;
1207         break;
1208     case 0x1:
1209         offset = 32 - 10;
1210         break;
1211     case 0x2:
1212         offset = 34 - 10;
1213         break;
1214     case 0x3:
1215         offset = 36 - 10;
1216         break;
1217     }
1218 
1219     switch (cond) {
1220     case 0x0:
1221         gen_op_eval_bn(r_dst);
1222         break;
1223     case 0x1:
1224         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1225         break;
1226     case 0x2:
1227         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1228         break;
1229     case 0x3:
1230         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1231         break;
1232     case 0x4:
1233         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1234         break;
1235     case 0x5:
1236         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1237         break;
1238     case 0x6:
1239         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1240         break;
1241     case 0x7:
1242         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1243         break;
1244     case 0x8:
1245         gen_op_eval_ba(r_dst);
1246         break;
1247     case 0x9:
1248         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1249         break;
1250     case 0xa:
1251         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1252         break;
1253     case 0xb:
1254         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1255         break;
1256     case 0xc:
1257         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1258         break;
1259     case 0xd:
1260         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1261         break;
1262     case 0xe:
1263         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1264         break;
1265     case 0xf:
1266         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1267         break;
1268     }
1269 }
1270 
1271 // Inverted logic
1272 static const TCGCond gen_tcg_cond_reg[8] = {
1273     TCG_COND_NEVER,  /* reserved */
1274     TCG_COND_NE,
1275     TCG_COND_GT,
1276     TCG_COND_GE,
1277     TCG_COND_NEVER,  /* reserved */
1278     TCG_COND_EQ,
1279     TCG_COND_LE,
1280     TCG_COND_LT,
1281 };
1282 
1283 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1284 {
1285     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1286     cmp->is_bool = false;
1287     cmp->c1 = r_src;
1288     cmp->c2 = tcg_constant_tl(0);
1289 }
1290 
1291 #ifdef TARGET_SPARC64
1292 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1293 {
1294     switch (fccno) {
1295     case 0:
1296         gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1297         break;
1298     case 1:
1299         gen_helper_fcmps_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1300         break;
1301     case 2:
1302         gen_helper_fcmps_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1303         break;
1304     case 3:
1305         gen_helper_fcmps_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1306         break;
1307     }
1308 }
1309 
1310 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1311 {
1312     switch (fccno) {
1313     case 0:
1314         gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1315         break;
1316     case 1:
1317         gen_helper_fcmpd_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1318         break;
1319     case 2:
1320         gen_helper_fcmpd_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1321         break;
1322     case 3:
1323         gen_helper_fcmpd_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1324         break;
1325     }
1326 }
1327 
1328 static void gen_op_fcmpq(int fccno)
1329 {
1330     switch (fccno) {
1331     case 0:
1332         gen_helper_fcmpq(cpu_fsr, tcg_env);
1333         break;
1334     case 1:
1335         gen_helper_fcmpq_fcc1(cpu_fsr, tcg_env);
1336         break;
1337     case 2:
1338         gen_helper_fcmpq_fcc2(cpu_fsr, tcg_env);
1339         break;
1340     case 3:
1341         gen_helper_fcmpq_fcc3(cpu_fsr, tcg_env);
1342         break;
1343     }
1344 }
1345 
1346 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1347 {
1348     switch (fccno) {
1349     case 0:
1350         gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1351         break;
1352     case 1:
1353         gen_helper_fcmpes_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1354         break;
1355     case 2:
1356         gen_helper_fcmpes_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1357         break;
1358     case 3:
1359         gen_helper_fcmpes_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1360         break;
1361     }
1362 }
1363 
1364 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1365 {
1366     switch (fccno) {
1367     case 0:
1368         gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1369         break;
1370     case 1:
1371         gen_helper_fcmped_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1372         break;
1373     case 2:
1374         gen_helper_fcmped_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1375         break;
1376     case 3:
1377         gen_helper_fcmped_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1378         break;
1379     }
1380 }
1381 
1382 static void gen_op_fcmpeq(int fccno)
1383 {
1384     switch (fccno) {
1385     case 0:
1386         gen_helper_fcmpeq(cpu_fsr, tcg_env);
1387         break;
1388     case 1:
1389         gen_helper_fcmpeq_fcc1(cpu_fsr, tcg_env);
1390         break;
1391     case 2:
1392         gen_helper_fcmpeq_fcc2(cpu_fsr, tcg_env);
1393         break;
1394     case 3:
1395         gen_helper_fcmpeq_fcc3(cpu_fsr, tcg_env);
1396         break;
1397     }
1398 }
1399 
1400 #else
1401 
1402 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1403 {
1404     gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1405 }
1406 
1407 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1408 {
1409     gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1410 }
1411 
1412 static void gen_op_fcmpq(int fccno)
1413 {
1414     gen_helper_fcmpq(cpu_fsr, tcg_env);
1415 }
1416 
1417 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1418 {
1419     gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1420 }
1421 
1422 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1423 {
1424     gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1425 }
1426 
1427 static void gen_op_fcmpeq(int fccno)
1428 {
1429     gen_helper_fcmpeq(cpu_fsr, tcg_env);
1430 }
1431 #endif
1432 
1433 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1434 {
1435     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1436     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1437     gen_exception(dc, TT_FP_EXCP);
1438 }
1439 
1440 static int gen_trap_ifnofpu(DisasContext *dc)
1441 {
1442 #if !defined(CONFIG_USER_ONLY)
1443     if (!dc->fpu_enabled) {
1444         gen_exception(dc, TT_NFPU_INSN);
1445         return 1;
1446     }
1447 #endif
1448     return 0;
1449 }
1450 
1451 static void gen_op_clear_ieee_excp_and_FTT(void)
1452 {
1453     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1454 }
1455 
1456 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1457                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1458 {
1459     TCGv_i32 dst, src;
1460 
1461     src = gen_load_fpr_F(dc, rs);
1462     dst = gen_dest_fpr_F(dc);
1463 
1464     gen(dst, tcg_env, src);
1465     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1466 
1467     gen_store_fpr_F(dc, rd, dst);
1468 }
1469 
1470 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1471                           void (*gen)(TCGv_i32, TCGv_i32))
1472 {
1473     TCGv_i32 dst, src;
1474 
1475     src = gen_load_fpr_F(dc, rs);
1476     dst = gen_dest_fpr_F(dc);
1477 
1478     gen(dst, src);
1479 
1480     gen_store_fpr_F(dc, rd, dst);
1481 }
1482 
1483 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1484                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1485 {
1486     TCGv_i32 dst, src1, src2;
1487 
1488     src1 = gen_load_fpr_F(dc, rs1);
1489     src2 = gen_load_fpr_F(dc, rs2);
1490     dst = gen_dest_fpr_F(dc);
1491 
1492     gen(dst, tcg_env, src1, src2);
1493     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1494 
1495     gen_store_fpr_F(dc, rd, dst);
1496 }
1497 
1498 #ifdef TARGET_SPARC64
1499 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1500                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1501 {
1502     TCGv_i32 dst, src1, src2;
1503 
1504     src1 = gen_load_fpr_F(dc, rs1);
1505     src2 = gen_load_fpr_F(dc, rs2);
1506     dst = gen_dest_fpr_F(dc);
1507 
1508     gen(dst, src1, src2);
1509 
1510     gen_store_fpr_F(dc, rd, dst);
1511 }
1512 #endif
1513 
1514 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1515                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1516 {
1517     TCGv_i64 dst, src;
1518 
1519     src = gen_load_fpr_D(dc, rs);
1520     dst = gen_dest_fpr_D(dc, rd);
1521 
1522     gen(dst, tcg_env, src);
1523     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1524 
1525     gen_store_fpr_D(dc, rd, dst);
1526 }
1527 
1528 #ifdef TARGET_SPARC64
1529 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1530                           void (*gen)(TCGv_i64, TCGv_i64))
1531 {
1532     TCGv_i64 dst, src;
1533 
1534     src = gen_load_fpr_D(dc, rs);
1535     dst = gen_dest_fpr_D(dc, rd);
1536 
1537     gen(dst, src);
1538 
1539     gen_store_fpr_D(dc, rd, dst);
1540 }
1541 #endif
1542 
1543 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1544                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1545 {
1546     TCGv_i64 dst, src1, src2;
1547 
1548     src1 = gen_load_fpr_D(dc, rs1);
1549     src2 = gen_load_fpr_D(dc, rs2);
1550     dst = gen_dest_fpr_D(dc, rd);
1551 
1552     gen(dst, tcg_env, src1, src2);
1553     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1554 
1555     gen_store_fpr_D(dc, rd, dst);
1556 }
1557 
1558 #ifdef TARGET_SPARC64
1559 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1560                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1561 {
1562     TCGv_i64 dst, src1, src2;
1563 
1564     src1 = gen_load_fpr_D(dc, rs1);
1565     src2 = gen_load_fpr_D(dc, rs2);
1566     dst = gen_dest_fpr_D(dc, rd);
1567 
1568     gen(dst, src1, src2);
1569 
1570     gen_store_fpr_D(dc, rd, dst);
1571 }
1572 
1573 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1574                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1575 {
1576     TCGv_i64 dst, src1, src2;
1577 
1578     src1 = gen_load_fpr_D(dc, rs1);
1579     src2 = gen_load_fpr_D(dc, rs2);
1580     dst = gen_dest_fpr_D(dc, rd);
1581 
1582     gen(dst, cpu_gsr, src1, src2);
1583 
1584     gen_store_fpr_D(dc, rd, dst);
1585 }
1586 
1587 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1588                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1589 {
1590     TCGv_i64 dst, src0, src1, src2;
1591 
1592     src1 = gen_load_fpr_D(dc, rs1);
1593     src2 = gen_load_fpr_D(dc, rs2);
1594     src0 = gen_load_fpr_D(dc, rd);
1595     dst = gen_dest_fpr_D(dc, rd);
1596 
1597     gen(dst, src0, src1, src2);
1598 
1599     gen_store_fpr_D(dc, rd, dst);
1600 }
1601 #endif
1602 
1603 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1604                        void (*gen)(TCGv_ptr))
1605 {
1606     gen_op_load_fpr_QT1(QFPREG(rs));
1607 
1608     gen(tcg_env);
1609     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1610 
1611     gen_op_store_QT0_fpr(QFPREG(rd));
1612     gen_update_fprs_dirty(dc, QFPREG(rd));
1613 }
1614 
1615 #ifdef TARGET_SPARC64
1616 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1617                           void (*gen)(TCGv_ptr))
1618 {
1619     gen_op_load_fpr_QT1(QFPREG(rs));
1620 
1621     gen(tcg_env);
1622 
1623     gen_op_store_QT0_fpr(QFPREG(rd));
1624     gen_update_fprs_dirty(dc, QFPREG(rd));
1625 }
1626 #endif
1627 
1628 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1629                         void (*gen)(TCGv_ptr))
1630 {
1631     gen_op_load_fpr_QT0(QFPREG(rs1));
1632     gen_op_load_fpr_QT1(QFPREG(rs2));
1633 
1634     gen(tcg_env);
1635     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1636 
1637     gen_op_store_QT0_fpr(QFPREG(rd));
1638     gen_update_fprs_dirty(dc, QFPREG(rd));
1639 }
1640 
1641 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1642                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1643 {
1644     TCGv_i64 dst;
1645     TCGv_i32 src1, src2;
1646 
1647     src1 = gen_load_fpr_F(dc, rs1);
1648     src2 = gen_load_fpr_F(dc, rs2);
1649     dst = gen_dest_fpr_D(dc, rd);
1650 
1651     gen(dst, tcg_env, src1, src2);
1652     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1653 
1654     gen_store_fpr_D(dc, rd, dst);
1655 }
1656 
1657 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1658                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1659 {
1660     TCGv_i64 src1, src2;
1661 
1662     src1 = gen_load_fpr_D(dc, rs1);
1663     src2 = gen_load_fpr_D(dc, rs2);
1664 
1665     gen(tcg_env, src1, src2);
1666     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1667 
1668     gen_op_store_QT0_fpr(QFPREG(rd));
1669     gen_update_fprs_dirty(dc, QFPREG(rd));
1670 }
1671 
1672 #ifdef TARGET_SPARC64
1673 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1674                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1675 {
1676     TCGv_i64 dst;
1677     TCGv_i32 src;
1678 
1679     src = gen_load_fpr_F(dc, rs);
1680     dst = gen_dest_fpr_D(dc, rd);
1681 
1682     gen(dst, tcg_env, src);
1683     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1684 
1685     gen_store_fpr_D(dc, rd, dst);
1686 }
1687 #endif
1688 
1689 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1690                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1691 {
1692     TCGv_i64 dst;
1693     TCGv_i32 src;
1694 
1695     src = gen_load_fpr_F(dc, rs);
1696     dst = gen_dest_fpr_D(dc, rd);
1697 
1698     gen(dst, tcg_env, src);
1699 
1700     gen_store_fpr_D(dc, rd, dst);
1701 }
1702 
1703 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1704                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1705 {
1706     TCGv_i32 dst;
1707     TCGv_i64 src;
1708 
1709     src = gen_load_fpr_D(dc, rs);
1710     dst = gen_dest_fpr_F(dc);
1711 
1712     gen(dst, tcg_env, src);
1713     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1714 
1715     gen_store_fpr_F(dc, rd, dst);
1716 }
1717 
1718 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1719                        void (*gen)(TCGv_i32, TCGv_ptr))
1720 {
1721     TCGv_i32 dst;
1722 
1723     gen_op_load_fpr_QT1(QFPREG(rs));
1724     dst = gen_dest_fpr_F(dc);
1725 
1726     gen(dst, tcg_env);
1727     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1728 
1729     gen_store_fpr_F(dc, rd, dst);
1730 }
1731 
1732 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1733                        void (*gen)(TCGv_i64, TCGv_ptr))
1734 {
1735     TCGv_i64 dst;
1736 
1737     gen_op_load_fpr_QT1(QFPREG(rs));
1738     dst = gen_dest_fpr_D(dc, rd);
1739 
1740     gen(dst, tcg_env);
1741     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1742 
1743     gen_store_fpr_D(dc, rd, dst);
1744 }
1745 
1746 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1747                           void (*gen)(TCGv_ptr, TCGv_i32))
1748 {
1749     TCGv_i32 src;
1750 
1751     src = gen_load_fpr_F(dc, rs);
1752 
1753     gen(tcg_env, src);
1754 
1755     gen_op_store_QT0_fpr(QFPREG(rd));
1756     gen_update_fprs_dirty(dc, QFPREG(rd));
1757 }
1758 
1759 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1760                           void (*gen)(TCGv_ptr, TCGv_i64))
1761 {
1762     TCGv_i64 src;
1763 
1764     src = gen_load_fpr_D(dc, rs);
1765 
1766     gen(tcg_env, src);
1767 
1768     gen_op_store_QT0_fpr(QFPREG(rd));
1769     gen_update_fprs_dirty(dc, QFPREG(rd));
1770 }
1771 
1772 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1773                      TCGv addr, int mmu_idx, MemOp memop)
1774 {
1775     gen_address_mask(dc, addr);
1776     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1777 }
1778 
1779 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1780 {
1781     TCGv m1 = tcg_constant_tl(0xff);
1782     gen_address_mask(dc, addr);
1783     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1784 }
1785 
1786 /* asi moves */
1787 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1788 typedef enum {
1789     GET_ASI_HELPER,
1790     GET_ASI_EXCP,
1791     GET_ASI_DIRECT,
1792     GET_ASI_DTWINX,
1793     GET_ASI_BLOCK,
1794     GET_ASI_SHORT,
1795     GET_ASI_BCOPY,
1796     GET_ASI_BFILL,
1797 } ASIType;
1798 
1799 typedef struct {
1800     ASIType type;
1801     int asi;
1802     int mem_idx;
1803     MemOp memop;
1804 } DisasASI;
1805 
1806 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1807 {
1808     int asi = GET_FIELD(insn, 19, 26);
1809     ASIType type = GET_ASI_HELPER;
1810     int mem_idx = dc->mem_idx;
1811 
1812 #ifndef TARGET_SPARC64
1813     /* Before v9, all asis are immediate and privileged.  */
1814     if (IS_IMM) {
1815         gen_exception(dc, TT_ILL_INSN);
1816         type = GET_ASI_EXCP;
1817     } else if (supervisor(dc)
1818                /* Note that LEON accepts ASI_USERDATA in user mode, for
1819                   use with CASA.  Also note that previous versions of
1820                   QEMU allowed (and old versions of gcc emitted) ASI_P
1821                   for LEON, which is incorrect.  */
1822                || (asi == ASI_USERDATA
1823                    && (dc->def->features & CPU_FEATURE_CASA))) {
1824         switch (asi) {
1825         case ASI_USERDATA:   /* User data access */
1826             mem_idx = MMU_USER_IDX;
1827             type = GET_ASI_DIRECT;
1828             break;
1829         case ASI_KERNELDATA: /* Supervisor data access */
1830             mem_idx = MMU_KERNEL_IDX;
1831             type = GET_ASI_DIRECT;
1832             break;
1833         case ASI_M_BYPASS:    /* MMU passthrough */
1834         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1835             mem_idx = MMU_PHYS_IDX;
1836             type = GET_ASI_DIRECT;
1837             break;
1838         case ASI_M_BCOPY: /* Block copy, sta access */
1839             mem_idx = MMU_KERNEL_IDX;
1840             type = GET_ASI_BCOPY;
1841             break;
1842         case ASI_M_BFILL: /* Block fill, stda access */
1843             mem_idx = MMU_KERNEL_IDX;
1844             type = GET_ASI_BFILL;
1845             break;
1846         }
1847 
1848         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1849          * permissions check in get_physical_address(..).
1850          */
1851         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1852     } else {
1853         gen_exception(dc, TT_PRIV_INSN);
1854         type = GET_ASI_EXCP;
1855     }
1856 #else
1857     if (IS_IMM) {
1858         asi = dc->asi;
1859     }
1860     /* With v9, all asis below 0x80 are privileged.  */
1861     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1862        down that bit into DisasContext.  For the moment that's ok,
1863        since the direct implementations below doesn't have any ASIs
1864        in the restricted [0x30, 0x7f] range, and the check will be
1865        done properly in the helper.  */
1866     if (!supervisor(dc) && asi < 0x80) {
1867         gen_exception(dc, TT_PRIV_ACT);
1868         type = GET_ASI_EXCP;
1869     } else {
1870         switch (asi) {
1871         case ASI_REAL:      /* Bypass */
1872         case ASI_REAL_IO:   /* Bypass, non-cacheable */
1873         case ASI_REAL_L:    /* Bypass LE */
1874         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
1875         case ASI_TWINX_REAL:   /* Real address, twinx */
1876         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
1877         case ASI_QUAD_LDD_PHYS:
1878         case ASI_QUAD_LDD_PHYS_L:
1879             mem_idx = MMU_PHYS_IDX;
1880             break;
1881         case ASI_N:  /* Nucleus */
1882         case ASI_NL: /* Nucleus LE */
1883         case ASI_TWINX_N:
1884         case ASI_TWINX_NL:
1885         case ASI_NUCLEUS_QUAD_LDD:
1886         case ASI_NUCLEUS_QUAD_LDD_L:
1887             if (hypervisor(dc)) {
1888                 mem_idx = MMU_PHYS_IDX;
1889             } else {
1890                 mem_idx = MMU_NUCLEUS_IDX;
1891             }
1892             break;
1893         case ASI_AIUP:  /* As if user primary */
1894         case ASI_AIUPL: /* As if user primary LE */
1895         case ASI_TWINX_AIUP:
1896         case ASI_TWINX_AIUP_L:
1897         case ASI_BLK_AIUP_4V:
1898         case ASI_BLK_AIUP_L_4V:
1899         case ASI_BLK_AIUP:
1900         case ASI_BLK_AIUPL:
1901             mem_idx = MMU_USER_IDX;
1902             break;
1903         case ASI_AIUS:  /* As if user secondary */
1904         case ASI_AIUSL: /* As if user secondary LE */
1905         case ASI_TWINX_AIUS:
1906         case ASI_TWINX_AIUS_L:
1907         case ASI_BLK_AIUS_4V:
1908         case ASI_BLK_AIUS_L_4V:
1909         case ASI_BLK_AIUS:
1910         case ASI_BLK_AIUSL:
1911             mem_idx = MMU_USER_SECONDARY_IDX;
1912             break;
1913         case ASI_S:  /* Secondary */
1914         case ASI_SL: /* Secondary LE */
1915         case ASI_TWINX_S:
1916         case ASI_TWINX_SL:
1917         case ASI_BLK_COMMIT_S:
1918         case ASI_BLK_S:
1919         case ASI_BLK_SL:
1920         case ASI_FL8_S:
1921         case ASI_FL8_SL:
1922         case ASI_FL16_S:
1923         case ASI_FL16_SL:
1924             if (mem_idx == MMU_USER_IDX) {
1925                 mem_idx = MMU_USER_SECONDARY_IDX;
1926             } else if (mem_idx == MMU_KERNEL_IDX) {
1927                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
1928             }
1929             break;
1930         case ASI_P:  /* Primary */
1931         case ASI_PL: /* Primary LE */
1932         case ASI_TWINX_P:
1933         case ASI_TWINX_PL:
1934         case ASI_BLK_COMMIT_P:
1935         case ASI_BLK_P:
1936         case ASI_BLK_PL:
1937         case ASI_FL8_P:
1938         case ASI_FL8_PL:
1939         case ASI_FL16_P:
1940         case ASI_FL16_PL:
1941             break;
1942         }
1943         switch (asi) {
1944         case ASI_REAL:
1945         case ASI_REAL_IO:
1946         case ASI_REAL_L:
1947         case ASI_REAL_IO_L:
1948         case ASI_N:
1949         case ASI_NL:
1950         case ASI_AIUP:
1951         case ASI_AIUPL:
1952         case ASI_AIUS:
1953         case ASI_AIUSL:
1954         case ASI_S:
1955         case ASI_SL:
1956         case ASI_P:
1957         case ASI_PL:
1958             type = GET_ASI_DIRECT;
1959             break;
1960         case ASI_TWINX_REAL:
1961         case ASI_TWINX_REAL_L:
1962         case ASI_TWINX_N:
1963         case ASI_TWINX_NL:
1964         case ASI_TWINX_AIUP:
1965         case ASI_TWINX_AIUP_L:
1966         case ASI_TWINX_AIUS:
1967         case ASI_TWINX_AIUS_L:
1968         case ASI_TWINX_P:
1969         case ASI_TWINX_PL:
1970         case ASI_TWINX_S:
1971         case ASI_TWINX_SL:
1972         case ASI_QUAD_LDD_PHYS:
1973         case ASI_QUAD_LDD_PHYS_L:
1974         case ASI_NUCLEUS_QUAD_LDD:
1975         case ASI_NUCLEUS_QUAD_LDD_L:
1976             type = GET_ASI_DTWINX;
1977             break;
1978         case ASI_BLK_COMMIT_P:
1979         case ASI_BLK_COMMIT_S:
1980         case ASI_BLK_AIUP_4V:
1981         case ASI_BLK_AIUP_L_4V:
1982         case ASI_BLK_AIUP:
1983         case ASI_BLK_AIUPL:
1984         case ASI_BLK_AIUS_4V:
1985         case ASI_BLK_AIUS_L_4V:
1986         case ASI_BLK_AIUS:
1987         case ASI_BLK_AIUSL:
1988         case ASI_BLK_S:
1989         case ASI_BLK_SL:
1990         case ASI_BLK_P:
1991         case ASI_BLK_PL:
1992             type = GET_ASI_BLOCK;
1993             break;
1994         case ASI_FL8_S:
1995         case ASI_FL8_SL:
1996         case ASI_FL8_P:
1997         case ASI_FL8_PL:
1998             memop = MO_UB;
1999             type = GET_ASI_SHORT;
2000             break;
2001         case ASI_FL16_S:
2002         case ASI_FL16_SL:
2003         case ASI_FL16_P:
2004         case ASI_FL16_PL:
2005             memop = MO_TEUW;
2006             type = GET_ASI_SHORT;
2007             break;
2008         }
2009         /* The little-endian asis all have bit 3 set.  */
2010         if (asi & 8) {
2011             memop ^= MO_BSWAP;
2012         }
2013     }
2014 #endif
2015 
2016     return (DisasASI){ type, asi, mem_idx, memop };
2017 }
2018 
2019 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2020                        int insn, MemOp memop)
2021 {
2022     DisasASI da = get_asi(dc, insn, memop);
2023 
2024     switch (da.type) {
2025     case GET_ASI_EXCP:
2026         break;
2027     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2028         gen_exception(dc, TT_ILL_INSN);
2029         break;
2030     case GET_ASI_DIRECT:
2031         gen_address_mask(dc, addr);
2032         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2033         break;
2034     default:
2035         {
2036             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2037             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2038 
2039             save_state(dc);
2040 #ifdef TARGET_SPARC64
2041             gen_helper_ld_asi(dst, tcg_env, addr, r_asi, r_mop);
2042 #else
2043             {
2044                 TCGv_i64 t64 = tcg_temp_new_i64();
2045                 gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2046                 tcg_gen_trunc_i64_tl(dst, t64);
2047             }
2048 #endif
2049         }
2050         break;
2051     }
2052 }
2053 
2054 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2055                        int insn, MemOp memop)
2056 {
2057     DisasASI da = get_asi(dc, insn, memop);
2058 
2059     switch (da.type) {
2060     case GET_ASI_EXCP:
2061         break;
2062     case GET_ASI_DTWINX: /* Reserved for stda.  */
2063 #ifndef TARGET_SPARC64
2064         gen_exception(dc, TT_ILL_INSN);
2065         break;
2066 #else
2067         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2068             /* Pre OpenSPARC CPUs don't have these */
2069             gen_exception(dc, TT_ILL_INSN);
2070             return;
2071         }
2072         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2073          * are ST_BLKINIT_ ASIs */
2074 #endif
2075         /* fall through */
2076     case GET_ASI_DIRECT:
2077         gen_address_mask(dc, addr);
2078         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2079         break;
2080 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2081     case GET_ASI_BCOPY:
2082         /* Copy 32 bytes from the address in SRC to ADDR.  */
2083         /* ??? The original qemu code suggests 4-byte alignment, dropping
2084            the low bits, but the only place I can see this used is in the
2085            Linux kernel with 32 byte alignment, which would make more sense
2086            as a cacheline-style operation.  */
2087         {
2088             TCGv saddr = tcg_temp_new();
2089             TCGv daddr = tcg_temp_new();
2090             TCGv four = tcg_constant_tl(4);
2091             TCGv_i32 tmp = tcg_temp_new_i32();
2092             int i;
2093 
2094             tcg_gen_andi_tl(saddr, src, -4);
2095             tcg_gen_andi_tl(daddr, addr, -4);
2096             for (i = 0; i < 32; i += 4) {
2097                 /* Since the loads and stores are paired, allow the
2098                    copy to happen in the host endianness.  */
2099                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2100                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2101                 tcg_gen_add_tl(saddr, saddr, four);
2102                 tcg_gen_add_tl(daddr, daddr, four);
2103             }
2104         }
2105         break;
2106 #endif
2107     default:
2108         {
2109             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2110             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2111 
2112             save_state(dc);
2113 #ifdef TARGET_SPARC64
2114             gen_helper_st_asi(tcg_env, addr, src, r_asi, r_mop);
2115 #else
2116             {
2117                 TCGv_i64 t64 = tcg_temp_new_i64();
2118                 tcg_gen_extu_tl_i64(t64, src);
2119                 gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2120             }
2121 #endif
2122 
2123             /* A write to a TLB register may alter page maps.  End the TB. */
2124             dc->npc = DYNAMIC_PC;
2125         }
2126         break;
2127     }
2128 }
2129 
2130 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2131                          TCGv addr, int insn)
2132 {
2133     DisasASI da = get_asi(dc, insn, MO_TEUL);
2134 
2135     switch (da.type) {
2136     case GET_ASI_EXCP:
2137         break;
2138     case GET_ASI_DIRECT:
2139         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2140         break;
2141     default:
2142         /* ??? Should be DAE_invalid_asi.  */
2143         gen_exception(dc, TT_DATA_ACCESS);
2144         break;
2145     }
2146 }
2147 
2148 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2149                         int insn, int rd)
2150 {
2151     DisasASI da = get_asi(dc, insn, MO_TEUL);
2152     TCGv oldv;
2153 
2154     switch (da.type) {
2155     case GET_ASI_EXCP:
2156         return;
2157     case GET_ASI_DIRECT:
2158         oldv = tcg_temp_new();
2159         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2160                                   da.mem_idx, da.memop | MO_ALIGN);
2161         gen_store_gpr(dc, rd, oldv);
2162         break;
2163     default:
2164         /* ??? Should be DAE_invalid_asi.  */
2165         gen_exception(dc, TT_DATA_ACCESS);
2166         break;
2167     }
2168 }
2169 
2170 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2171 {
2172     DisasASI da = get_asi(dc, insn, MO_UB);
2173 
2174     switch (da.type) {
2175     case GET_ASI_EXCP:
2176         break;
2177     case GET_ASI_DIRECT:
2178         gen_ldstub(dc, dst, addr, da.mem_idx);
2179         break;
2180     default:
2181         /* ??? In theory, this should be raise DAE_invalid_asi.
2182            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2183         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2184             gen_helper_exit_atomic(tcg_env);
2185         } else {
2186             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2187             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2188             TCGv_i64 s64, t64;
2189 
2190             save_state(dc);
2191             t64 = tcg_temp_new_i64();
2192             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2193 
2194             s64 = tcg_constant_i64(0xff);
2195             gen_helper_st_asi(tcg_env, addr, s64, r_asi, r_mop);
2196 
2197             tcg_gen_trunc_i64_tl(dst, t64);
2198 
2199             /* End the TB.  */
2200             dc->npc = DYNAMIC_PC;
2201         }
2202         break;
2203     }
2204 }
2205 #endif
2206 
2207 #ifdef TARGET_SPARC64
2208 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2209                         int insn, int size, int rd)
2210 {
2211     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2212     TCGv_i32 d32;
2213     TCGv_i64 d64;
2214 
2215     switch (da.type) {
2216     case GET_ASI_EXCP:
2217         break;
2218 
2219     case GET_ASI_DIRECT:
2220         gen_address_mask(dc, addr);
2221         switch (size) {
2222         case 4:
2223             d32 = gen_dest_fpr_F(dc);
2224             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2225             gen_store_fpr_F(dc, rd, d32);
2226             break;
2227         case 8:
2228             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2229                                 da.memop | MO_ALIGN_4);
2230             break;
2231         case 16:
2232             d64 = tcg_temp_new_i64();
2233             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2234             tcg_gen_addi_tl(addr, addr, 8);
2235             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2236                                 da.memop | MO_ALIGN_4);
2237             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2238             break;
2239         default:
2240             g_assert_not_reached();
2241         }
2242         break;
2243 
2244     case GET_ASI_BLOCK:
2245         /* Valid for lddfa on aligned registers only.  */
2246         if (size == 8 && (rd & 7) == 0) {
2247             MemOp memop;
2248             TCGv eight;
2249             int i;
2250 
2251             gen_address_mask(dc, addr);
2252 
2253             /* The first operation checks required alignment.  */
2254             memop = da.memop | MO_ALIGN_64;
2255             eight = tcg_constant_tl(8);
2256             for (i = 0; ; ++i) {
2257                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2258                                     da.mem_idx, memop);
2259                 if (i == 7) {
2260                     break;
2261                 }
2262                 tcg_gen_add_tl(addr, addr, eight);
2263                 memop = da.memop;
2264             }
2265         } else {
2266             gen_exception(dc, TT_ILL_INSN);
2267         }
2268         break;
2269 
2270     case GET_ASI_SHORT:
2271         /* Valid for lddfa only.  */
2272         if (size == 8) {
2273             gen_address_mask(dc, addr);
2274             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2275                                 da.memop | MO_ALIGN);
2276         } else {
2277             gen_exception(dc, TT_ILL_INSN);
2278         }
2279         break;
2280 
2281     default:
2282         {
2283             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2284             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2285 
2286             save_state(dc);
2287             /* According to the table in the UA2011 manual, the only
2288                other asis that are valid for ldfa/lddfa/ldqfa are
2289                the NO_FAULT asis.  We still need a helper for these,
2290                but we can just use the integer asi helper for them.  */
2291             switch (size) {
2292             case 4:
2293                 d64 = tcg_temp_new_i64();
2294                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2295                 d32 = gen_dest_fpr_F(dc);
2296                 tcg_gen_extrl_i64_i32(d32, d64);
2297                 gen_store_fpr_F(dc, rd, d32);
2298                 break;
2299             case 8:
2300                 gen_helper_ld_asi(cpu_fpr[rd / 2], tcg_env, addr, r_asi, r_mop);
2301                 break;
2302             case 16:
2303                 d64 = tcg_temp_new_i64();
2304                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2305                 tcg_gen_addi_tl(addr, addr, 8);
2306                 gen_helper_ld_asi(cpu_fpr[rd/2+1], tcg_env, addr, r_asi, r_mop);
2307                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2308                 break;
2309             default:
2310                 g_assert_not_reached();
2311             }
2312         }
2313         break;
2314     }
2315 }
2316 
2317 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2318                         int insn, int size, int rd)
2319 {
2320     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2321     TCGv_i32 d32;
2322 
2323     switch (da.type) {
2324     case GET_ASI_EXCP:
2325         break;
2326 
2327     case GET_ASI_DIRECT:
2328         gen_address_mask(dc, addr);
2329         switch (size) {
2330         case 4:
2331             d32 = gen_load_fpr_F(dc, rd);
2332             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2333             break;
2334         case 8:
2335             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2336                                 da.memop | MO_ALIGN_4);
2337             break;
2338         case 16:
2339             /* Only 4-byte alignment required.  However, it is legal for the
2340                cpu to signal the alignment fault, and the OS trap handler is
2341                required to fix it up.  Requiring 16-byte alignment here avoids
2342                having to probe the second page before performing the first
2343                write.  */
2344             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2345                                 da.memop | MO_ALIGN_16);
2346             tcg_gen_addi_tl(addr, addr, 8);
2347             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2348             break;
2349         default:
2350             g_assert_not_reached();
2351         }
2352         break;
2353 
2354     case GET_ASI_BLOCK:
2355         /* Valid for stdfa on aligned registers only.  */
2356         if (size == 8 && (rd & 7) == 0) {
2357             MemOp memop;
2358             TCGv eight;
2359             int i;
2360 
2361             gen_address_mask(dc, addr);
2362 
2363             /* The first operation checks required alignment.  */
2364             memop = da.memop | MO_ALIGN_64;
2365             eight = tcg_constant_tl(8);
2366             for (i = 0; ; ++i) {
2367                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2368                                     da.mem_idx, memop);
2369                 if (i == 7) {
2370                     break;
2371                 }
2372                 tcg_gen_add_tl(addr, addr, eight);
2373                 memop = da.memop;
2374             }
2375         } else {
2376             gen_exception(dc, TT_ILL_INSN);
2377         }
2378         break;
2379 
2380     case GET_ASI_SHORT:
2381         /* Valid for stdfa only.  */
2382         if (size == 8) {
2383             gen_address_mask(dc, addr);
2384             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2385                                 da.memop | MO_ALIGN);
2386         } else {
2387             gen_exception(dc, TT_ILL_INSN);
2388         }
2389         break;
2390 
2391     default:
2392         /* According to the table in the UA2011 manual, the only
2393            other asis that are valid for ldfa/lddfa/ldqfa are
2394            the PST* asis, which aren't currently handled.  */
2395         gen_exception(dc, TT_ILL_INSN);
2396         break;
2397     }
2398 }
2399 
2400 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2401 {
2402     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2403     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2404     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2405 
2406     switch (da.type) {
2407     case GET_ASI_EXCP:
2408         return;
2409 
2410     case GET_ASI_DTWINX:
2411         gen_address_mask(dc, addr);
2412         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2413         tcg_gen_addi_tl(addr, addr, 8);
2414         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2415         break;
2416 
2417     case GET_ASI_DIRECT:
2418         {
2419             TCGv_i64 tmp = tcg_temp_new_i64();
2420 
2421             gen_address_mask(dc, addr);
2422             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2423 
2424             /* Note that LE ldda acts as if each 32-bit register
2425                result is byte swapped.  Having just performed one
2426                64-bit bswap, we need now to swap the writebacks.  */
2427             if ((da.memop & MO_BSWAP) == MO_TE) {
2428                 tcg_gen_extr32_i64(lo, hi, tmp);
2429             } else {
2430                 tcg_gen_extr32_i64(hi, lo, tmp);
2431             }
2432         }
2433         break;
2434 
2435     default:
2436         /* ??? In theory we've handled all of the ASIs that are valid
2437            for ldda, and this should raise DAE_invalid_asi.  However,
2438            real hardware allows others.  This can be seen with e.g.
2439            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2440         {
2441             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2442             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2443             TCGv_i64 tmp = tcg_temp_new_i64();
2444 
2445             save_state(dc);
2446             gen_helper_ld_asi(tmp, tcg_env, addr, r_asi, r_mop);
2447 
2448             /* See above.  */
2449             if ((da.memop & MO_BSWAP) == MO_TE) {
2450                 tcg_gen_extr32_i64(lo, hi, tmp);
2451             } else {
2452                 tcg_gen_extr32_i64(hi, lo, tmp);
2453             }
2454         }
2455         break;
2456     }
2457 
2458     gen_store_gpr(dc, rd, hi);
2459     gen_store_gpr(dc, rd + 1, lo);
2460 }
2461 
2462 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2463                          int insn, int rd)
2464 {
2465     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2466     TCGv lo = gen_load_gpr(dc, rd + 1);
2467 
2468     switch (da.type) {
2469     case GET_ASI_EXCP:
2470         break;
2471 
2472     case GET_ASI_DTWINX:
2473         gen_address_mask(dc, addr);
2474         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2475         tcg_gen_addi_tl(addr, addr, 8);
2476         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2477         break;
2478 
2479     case GET_ASI_DIRECT:
2480         {
2481             TCGv_i64 t64 = tcg_temp_new_i64();
2482 
2483             /* Note that LE stda acts as if each 32-bit register result is
2484                byte swapped.  We will perform one 64-bit LE store, so now
2485                we must swap the order of the construction.  */
2486             if ((da.memop & MO_BSWAP) == MO_TE) {
2487                 tcg_gen_concat32_i64(t64, lo, hi);
2488             } else {
2489                 tcg_gen_concat32_i64(t64, hi, lo);
2490             }
2491             gen_address_mask(dc, addr);
2492             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2493         }
2494         break;
2495 
2496     default:
2497         /* ??? In theory we've handled all of the ASIs that are valid
2498            for stda, and this should raise DAE_invalid_asi.  */
2499         {
2500             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2501             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2502             TCGv_i64 t64 = tcg_temp_new_i64();
2503 
2504             /* See above.  */
2505             if ((da.memop & MO_BSWAP) == MO_TE) {
2506                 tcg_gen_concat32_i64(t64, lo, hi);
2507             } else {
2508                 tcg_gen_concat32_i64(t64, hi, lo);
2509             }
2510 
2511             save_state(dc);
2512             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2513         }
2514         break;
2515     }
2516 }
2517 
2518 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2519                          int insn, int rd)
2520 {
2521     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2522     TCGv oldv;
2523 
2524     switch (da.type) {
2525     case GET_ASI_EXCP:
2526         return;
2527     case GET_ASI_DIRECT:
2528         oldv = tcg_temp_new();
2529         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2530                                   da.mem_idx, da.memop | MO_ALIGN);
2531         gen_store_gpr(dc, rd, oldv);
2532         break;
2533     default:
2534         /* ??? Should be DAE_invalid_asi.  */
2535         gen_exception(dc, TT_DATA_ACCESS);
2536         break;
2537     }
2538 }
2539 
2540 #elif !defined(CONFIG_USER_ONLY)
2541 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2542 {
2543     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2544        whereby "rd + 1" elicits "error: array subscript is above array".
2545        Since we have already asserted that rd is even, the semantics
2546        are unchanged.  */
2547     TCGv lo = gen_dest_gpr(dc, rd | 1);
2548     TCGv hi = gen_dest_gpr(dc, rd);
2549     TCGv_i64 t64 = tcg_temp_new_i64();
2550     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2551 
2552     switch (da.type) {
2553     case GET_ASI_EXCP:
2554         return;
2555     case GET_ASI_DIRECT:
2556         gen_address_mask(dc, addr);
2557         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2558         break;
2559     default:
2560         {
2561             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2562             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2563 
2564             save_state(dc);
2565             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2566         }
2567         break;
2568     }
2569 
2570     tcg_gen_extr_i64_i32(lo, hi, t64);
2571     gen_store_gpr(dc, rd | 1, lo);
2572     gen_store_gpr(dc, rd, hi);
2573 }
2574 
2575 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2576                          int insn, int rd)
2577 {
2578     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2579     TCGv lo = gen_load_gpr(dc, rd + 1);
2580     TCGv_i64 t64 = tcg_temp_new_i64();
2581 
2582     tcg_gen_concat_tl_i64(t64, lo, hi);
2583 
2584     switch (da.type) {
2585     case GET_ASI_EXCP:
2586         break;
2587     case GET_ASI_DIRECT:
2588         gen_address_mask(dc, addr);
2589         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2590         break;
2591     case GET_ASI_BFILL:
2592         /* Store 32 bytes of T64 to ADDR.  */
2593         /* ??? The original qemu code suggests 8-byte alignment, dropping
2594            the low bits, but the only place I can see this used is in the
2595            Linux kernel with 32 byte alignment, which would make more sense
2596            as a cacheline-style operation.  */
2597         {
2598             TCGv d_addr = tcg_temp_new();
2599             TCGv eight = tcg_constant_tl(8);
2600             int i;
2601 
2602             tcg_gen_andi_tl(d_addr, addr, -8);
2603             for (i = 0; i < 32; i += 8) {
2604                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2605                 tcg_gen_add_tl(d_addr, d_addr, eight);
2606             }
2607         }
2608         break;
2609     default:
2610         {
2611             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2612             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2613 
2614             save_state(dc);
2615             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2616         }
2617         break;
2618     }
2619 }
2620 #endif
2621 
2622 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2623 {
2624     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2625     return gen_load_gpr(dc, rs1);
2626 }
2627 
2628 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2629 {
2630     if (IS_IMM) { /* immediate */
2631         target_long simm = GET_FIELDs(insn, 19, 31);
2632         TCGv t = tcg_temp_new();
2633         tcg_gen_movi_tl(t, simm);
2634         return t;
2635     } else {      /* register */
2636         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2637         return gen_load_gpr(dc, rs2);
2638     }
2639 }
2640 
2641 #ifdef TARGET_SPARC64
2642 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2643 {
2644     TCGv_i32 c32, zero, dst, s1, s2;
2645 
2646     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2647        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2648        the later.  */
2649     c32 = tcg_temp_new_i32();
2650     if (cmp->is_bool) {
2651         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2652     } else {
2653         TCGv_i64 c64 = tcg_temp_new_i64();
2654         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2655         tcg_gen_extrl_i64_i32(c32, c64);
2656     }
2657 
2658     s1 = gen_load_fpr_F(dc, rs);
2659     s2 = gen_load_fpr_F(dc, rd);
2660     dst = gen_dest_fpr_F(dc);
2661     zero = tcg_constant_i32(0);
2662 
2663     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2664 
2665     gen_store_fpr_F(dc, rd, dst);
2666 }
2667 
2668 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2669 {
2670     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2671     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2672                         gen_load_fpr_D(dc, rs),
2673                         gen_load_fpr_D(dc, rd));
2674     gen_store_fpr_D(dc, rd, dst);
2675 }
2676 
2677 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2678 {
2679     int qd = QFPREG(rd);
2680     int qs = QFPREG(rs);
2681 
2682     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2683                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2684     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2685                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2686 
2687     gen_update_fprs_dirty(dc, qd);
2688 }
2689 
2690 #ifndef CONFIG_USER_ONLY
2691 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env tcg_env)
2692 {
2693     TCGv_i32 r_tl = tcg_temp_new_i32();
2694 
2695     /* load env->tl into r_tl */
2696     tcg_gen_ld_i32(r_tl, tcg_env, offsetof(CPUSPARCState, tl));
2697 
2698     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2699     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2700 
2701     /* calculate offset to current trap state from env->ts, reuse r_tl */
2702     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2703     tcg_gen_addi_ptr(r_tsptr, tcg_env, offsetof(CPUSPARCState, ts));
2704 
2705     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2706     {
2707         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2708         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2709         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2710     }
2711 }
2712 #endif
2713 
2714 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2715                      int width, bool cc, bool left)
2716 {
2717     TCGv lo1, lo2;
2718     uint64_t amask, tabl, tabr;
2719     int shift, imask, omask;
2720 
2721     if (cc) {
2722         tcg_gen_mov_tl(cpu_cc_src, s1);
2723         tcg_gen_mov_tl(cpu_cc_src2, s2);
2724         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2725         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2726         dc->cc_op = CC_OP_SUB;
2727     }
2728 
2729     /* Theory of operation: there are two tables, left and right (not to
2730        be confused with the left and right versions of the opcode).  These
2731        are indexed by the low 3 bits of the inputs.  To make things "easy",
2732        these tables are loaded into two constants, TABL and TABR below.
2733        The operation index = (input & imask) << shift calculates the index
2734        into the constant, while val = (table >> index) & omask calculates
2735        the value we're looking for.  */
2736     switch (width) {
2737     case 8:
2738         imask = 0x7;
2739         shift = 3;
2740         omask = 0xff;
2741         if (left) {
2742             tabl = 0x80c0e0f0f8fcfeffULL;
2743             tabr = 0xff7f3f1f0f070301ULL;
2744         } else {
2745             tabl = 0x0103070f1f3f7fffULL;
2746             tabr = 0xfffefcf8f0e0c080ULL;
2747         }
2748         break;
2749     case 16:
2750         imask = 0x6;
2751         shift = 1;
2752         omask = 0xf;
2753         if (left) {
2754             tabl = 0x8cef;
2755             tabr = 0xf731;
2756         } else {
2757             tabl = 0x137f;
2758             tabr = 0xfec8;
2759         }
2760         break;
2761     case 32:
2762         imask = 0x4;
2763         shift = 0;
2764         omask = 0x3;
2765         if (left) {
2766             tabl = (2 << 2) | 3;
2767             tabr = (3 << 2) | 1;
2768         } else {
2769             tabl = (1 << 2) | 3;
2770             tabr = (3 << 2) | 2;
2771         }
2772         break;
2773     default:
2774         abort();
2775     }
2776 
2777     lo1 = tcg_temp_new();
2778     lo2 = tcg_temp_new();
2779     tcg_gen_andi_tl(lo1, s1, imask);
2780     tcg_gen_andi_tl(lo2, s2, imask);
2781     tcg_gen_shli_tl(lo1, lo1, shift);
2782     tcg_gen_shli_tl(lo2, lo2, shift);
2783 
2784     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2785     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2786     tcg_gen_andi_tl(lo1, lo1, omask);
2787     tcg_gen_andi_tl(lo2, lo2, omask);
2788 
2789     amask = -8;
2790     if (AM_CHECK(dc)) {
2791         amask &= 0xffffffffULL;
2792     }
2793     tcg_gen_andi_tl(s1, s1, amask);
2794     tcg_gen_andi_tl(s2, s2, amask);
2795 
2796     /* Compute dst = (s1 == s2 ? lo1 : lo1 & lo2). */
2797     tcg_gen_and_tl(lo2, lo2, lo1);
2798     tcg_gen_movcond_tl(TCG_COND_EQ, dst, s1, s2, lo1, lo2);
2799 }
2800 
2801 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2802 {
2803     TCGv tmp = tcg_temp_new();
2804 
2805     tcg_gen_add_tl(tmp, s1, s2);
2806     tcg_gen_andi_tl(dst, tmp, -8);
2807     if (left) {
2808         tcg_gen_neg_tl(tmp, tmp);
2809     }
2810     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2811 }
2812 
2813 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2814 {
2815     TCGv t1, t2, shift;
2816 
2817     t1 = tcg_temp_new();
2818     t2 = tcg_temp_new();
2819     shift = tcg_temp_new();
2820 
2821     tcg_gen_andi_tl(shift, gsr, 7);
2822     tcg_gen_shli_tl(shift, shift, 3);
2823     tcg_gen_shl_tl(t1, s1, shift);
2824 
2825     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2826        shift of (up to 63) followed by a constant shift of 1.  */
2827     tcg_gen_xori_tl(shift, shift, 63);
2828     tcg_gen_shr_tl(t2, s2, shift);
2829     tcg_gen_shri_tl(t2, t2, 1);
2830 
2831     tcg_gen_or_tl(dst, t1, t2);
2832 }
2833 #endif
2834 
2835 /* Include the auto-generated decoder.  */
2836 #include "decode-insns.c.inc"
2837 
2838 #define TRANS(NAME, AVAIL, FUNC, ...) \
2839     static bool trans_##NAME(DisasContext *dc, arg_##NAME *a) \
2840     { return avail_##AVAIL(dc) && FUNC(dc, __VA_ARGS__); }
2841 
2842 #define avail_ALL(C)      true
2843 #ifdef TARGET_SPARC64
2844 # define avail_32(C)      false
2845 # define avail_64(C)      true
2846 #else
2847 # define avail_32(C)      true
2848 # define avail_64(C)      false
2849 #endif
2850 
2851 /* Default case for non jump instructions. */
2852 static bool advance_pc(DisasContext *dc)
2853 {
2854     if (dc->npc & 3) {
2855         switch (dc->npc) {
2856         case DYNAMIC_PC:
2857         case DYNAMIC_PC_LOOKUP:
2858             dc->pc = dc->npc;
2859             gen_op_next_insn();
2860             break;
2861         case JUMP_PC:
2862             /* we can do a static jump */
2863             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
2864             dc->base.is_jmp = DISAS_NORETURN;
2865             break;
2866         default:
2867             g_assert_not_reached();
2868         }
2869     } else {
2870         dc->pc = dc->npc;
2871         dc->npc = dc->npc + 4;
2872     }
2873     return true;
2874 }
2875 
2876 /*
2877  * Major opcodes 00 and 01 -- branches, call, and sethi
2878  */
2879 
2880 static bool advance_jump_uncond_never(DisasContext *dc, bool annul)
2881 {
2882     if (annul) {
2883         dc->pc = dc->npc + 4;
2884         dc->npc = dc->pc + 4;
2885     } else {
2886         dc->pc = dc->npc;
2887         dc->npc = dc->pc + 4;
2888     }
2889     return true;
2890 }
2891 
2892 static bool advance_jump_uncond_always(DisasContext *dc, bool annul,
2893                                        target_ulong dest)
2894 {
2895     if (annul) {
2896         dc->pc = dest;
2897         dc->npc = dest + 4;
2898     } else {
2899         dc->pc = dc->npc;
2900         dc->npc = dest;
2901         tcg_gen_mov_tl(cpu_pc, cpu_npc);
2902     }
2903     return true;
2904 }
2905 
2906 static bool advance_jump_cond(DisasContext *dc, DisasCompare *cmp,
2907                               bool annul, target_ulong dest)
2908 {
2909     target_ulong npc = dc->npc;
2910 
2911     if (annul) {
2912         TCGLabel *l1 = gen_new_label();
2913 
2914         tcg_gen_brcond_tl(tcg_invert_cond(cmp->cond), cmp->c1, cmp->c2, l1);
2915         gen_goto_tb(dc, 0, npc, dest);
2916         gen_set_label(l1);
2917         gen_goto_tb(dc, 1, npc + 4, npc + 8);
2918 
2919         dc->base.is_jmp = DISAS_NORETURN;
2920     } else {
2921         if (npc & 3) {
2922             switch (npc) {
2923             case DYNAMIC_PC:
2924             case DYNAMIC_PC_LOOKUP:
2925                 tcg_gen_mov_tl(cpu_pc, cpu_npc);
2926                 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
2927                 tcg_gen_movcond_tl(cmp->cond, cpu_npc,
2928                                    cmp->c1, cmp->c2,
2929                                    tcg_constant_tl(dest), cpu_npc);
2930                 dc->pc = npc;
2931                 break;
2932             default:
2933                 g_assert_not_reached();
2934             }
2935         } else {
2936             dc->pc = npc;
2937             dc->jump_pc[0] = dest;
2938             dc->jump_pc[1] = npc + 4;
2939             dc->npc = JUMP_PC;
2940             if (cmp->is_bool) {
2941                 tcg_gen_mov_tl(cpu_cond, cmp->c1);
2942             } else {
2943                 tcg_gen_setcond_tl(cmp->cond, cpu_cond, cmp->c1, cmp->c2);
2944             }
2945         }
2946     }
2947     return true;
2948 }
2949 
2950 static bool do_bpcc(DisasContext *dc, arg_bcc *a)
2951 {
2952     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2953     DisasCompare cmp;
2954 
2955     switch (a->cond) {
2956     case 0x0:
2957         return advance_jump_uncond_never(dc, a->a);
2958     case 0x8:
2959         return advance_jump_uncond_always(dc, a->a, target);
2960     default:
2961         flush_cond(dc);
2962 
2963         gen_compare(&cmp, a->cc, a->cond, dc);
2964         return advance_jump_cond(dc, &cmp, a->a, target);
2965     }
2966 }
2967 
2968 TRANS(Bicc, ALL, do_bpcc, a)
2969 TRANS(BPcc,  64, do_bpcc, a)
2970 
2971 static bool do_fbpfcc(DisasContext *dc, arg_bcc *a)
2972 {
2973     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2974     DisasCompare cmp;
2975 
2976     if (gen_trap_ifnofpu(dc)) {
2977         return true;
2978     }
2979     switch (a->cond) {
2980     case 0x0:
2981         return advance_jump_uncond_never(dc, a->a);
2982     case 0x8:
2983         return advance_jump_uncond_always(dc, a->a, target);
2984     default:
2985         flush_cond(dc);
2986 
2987         gen_fcompare(&cmp, a->cc, a->cond);
2988         return advance_jump_cond(dc, &cmp, a->a, target);
2989     }
2990 }
2991 
2992 TRANS(FBPfcc,  64, do_fbpfcc, a)
2993 TRANS(FBfcc,  ALL, do_fbpfcc, a)
2994 
2995 static bool trans_BPr(DisasContext *dc, arg_BPr *a)
2996 {
2997     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2998     DisasCompare cmp;
2999 
3000     if (!avail_64(dc)) {
3001         return false;
3002     }
3003     if (gen_tcg_cond_reg[a->cond] == TCG_COND_NEVER) {
3004         return false;
3005     }
3006 
3007     flush_cond(dc);
3008     gen_compare_reg(&cmp, a->cond, gen_load_gpr(dc, a->rs1));
3009     return advance_jump_cond(dc, &cmp, a->a, target);
3010 }
3011 
3012 static bool trans_CALL(DisasContext *dc, arg_CALL *a)
3013 {
3014     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3015 
3016     gen_store_gpr(dc, 15, tcg_constant_tl(dc->pc));
3017     gen_mov_pc_npc(dc);
3018     dc->npc = target;
3019     return true;
3020 }
3021 
3022 static bool trans_NCP(DisasContext *dc, arg_NCP *a)
3023 {
3024     /*
3025      * For sparc32, always generate the no-coprocessor exception.
3026      * For sparc64, always generate illegal instruction.
3027      */
3028 #ifdef TARGET_SPARC64
3029     return false;
3030 #else
3031     gen_exception(dc, TT_NCP_INSN);
3032     return true;
3033 #endif
3034 }
3035 
3036 static bool trans_SETHI(DisasContext *dc, arg_SETHI *a)
3037 {
3038     /* Special-case %g0 because that's the canonical nop.  */
3039     if (a->rd) {
3040         gen_store_gpr(dc, a->rd, tcg_constant_tl((uint32_t)a->i << 10));
3041     }
3042     return advance_pc(dc);
3043 }
3044 
3045 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3046     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3047         goto illegal_insn;
3048 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3049     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3050         goto nfpu_insn;
3051 
3052 /* before an instruction, dc->pc must be static */
3053 static void disas_sparc_legacy(DisasContext *dc, unsigned int insn)
3054 {
3055     unsigned int opc, rs1, rs2, rd;
3056     TCGv cpu_src1, cpu_src2;
3057     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3058     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3059     target_long simm;
3060 
3061     opc = GET_FIELD(insn, 0, 1);
3062     rd = GET_FIELD(insn, 2, 6);
3063 
3064     switch (opc) {
3065     case 0:
3066         goto illegal_insn; /* in decodetree */
3067     case 1:
3068         g_assert_not_reached(); /* in decodetree */
3069     case 2:                     /* FPU & Logical Operations */
3070         {
3071             unsigned int xop = GET_FIELD(insn, 7, 12);
3072             TCGv cpu_dst = tcg_temp_new();
3073             TCGv cpu_tmp0;
3074 
3075             if (xop == 0x3a) {  /* generate trap */
3076                 int cond = GET_FIELD(insn, 3, 6);
3077                 TCGv_i32 trap;
3078                 TCGLabel *l1 = NULL;
3079                 int mask;
3080 
3081                 if (cond == 0) {
3082                     /* Trap never.  */
3083                     break;
3084                 }
3085 
3086                 save_state(dc);
3087 
3088                 if (cond != 8) {
3089                     /* Conditional trap.  */
3090                     DisasCompare cmp;
3091 #ifdef TARGET_SPARC64
3092                     /* V9 icc/xcc */
3093                     int cc = GET_FIELD_SP(insn, 11, 12);
3094                     if (cc == 0) {
3095                         gen_compare(&cmp, 0, cond, dc);
3096                     } else if (cc == 2) {
3097                         gen_compare(&cmp, 1, cond, dc);
3098                     } else {
3099                         goto illegal_insn;
3100                     }
3101 #else
3102                     gen_compare(&cmp, 0, cond, dc);
3103 #endif
3104                     l1 = gen_new_label();
3105                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3106                                       cmp.c1, cmp.c2, l1);
3107                 }
3108 
3109                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3110                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3111 
3112                 /* Don't use the normal temporaries, as they may well have
3113                    gone out of scope with the branch above.  While we're
3114                    doing that we might as well pre-truncate to 32-bit.  */
3115                 trap = tcg_temp_new_i32();
3116 
3117                 rs1 = GET_FIELD_SP(insn, 14, 18);
3118                 if (IS_IMM) {
3119                     rs2 = GET_FIELD_SP(insn, 0, 7);
3120                     if (rs1 == 0) {
3121                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3122                         /* Signal that the trap value is fully constant.  */
3123                         mask = 0;
3124                     } else {
3125                         TCGv t1 = gen_load_gpr(dc, rs1);
3126                         tcg_gen_trunc_tl_i32(trap, t1);
3127                         tcg_gen_addi_i32(trap, trap, rs2);
3128                     }
3129                 } else {
3130                     TCGv t1, t2;
3131                     rs2 = GET_FIELD_SP(insn, 0, 4);
3132                     t1 = gen_load_gpr(dc, rs1);
3133                     t2 = gen_load_gpr(dc, rs2);
3134                     tcg_gen_add_tl(t1, t1, t2);
3135                     tcg_gen_trunc_tl_i32(trap, t1);
3136                 }
3137                 if (mask != 0) {
3138                     tcg_gen_andi_i32(trap, trap, mask);
3139                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3140                 }
3141 
3142                 gen_helper_raise_exception(tcg_env, trap);
3143 
3144                 if (cond == 8) {
3145                     /* An unconditional trap ends the TB.  */
3146                     dc->base.is_jmp = DISAS_NORETURN;
3147                     goto jmp_insn;
3148                 } else {
3149                     /* A conditional trap falls through to the next insn.  */
3150                     gen_set_label(l1);
3151                     break;
3152                 }
3153             } else if (xop == 0x28) {
3154                 rs1 = GET_FIELD(insn, 13, 17);
3155                 switch(rs1) {
3156                 case 0: /* rdy */
3157 #ifndef TARGET_SPARC64
3158                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3159                                        manual, rdy on the microSPARC
3160                                        II */
3161                 case 0x0f:          /* stbar in the SPARCv8 manual,
3162                                        rdy on the microSPARC II */
3163                 case 0x10 ... 0x1f: /* implementation-dependent in the
3164                                        SPARCv8 manual, rdy on the
3165                                        microSPARC II */
3166                     /* Read Asr17 */
3167                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3168                         TCGv t = gen_dest_gpr(dc, rd);
3169                         /* Read Asr17 for a Leon3 monoprocessor */
3170                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3171                         gen_store_gpr(dc, rd, t);
3172                         break;
3173                     }
3174 #endif
3175                     gen_store_gpr(dc, rd, cpu_y);
3176                     break;
3177 #ifdef TARGET_SPARC64
3178                 case 0x2: /* V9 rdccr */
3179                     update_psr(dc);
3180                     gen_helper_rdccr(cpu_dst, tcg_env);
3181                     gen_store_gpr(dc, rd, cpu_dst);
3182                     break;
3183                 case 0x3: /* V9 rdasi */
3184                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3185                     gen_store_gpr(dc, rd, cpu_dst);
3186                     break;
3187                 case 0x4: /* V9 rdtick */
3188                     {
3189                         TCGv_ptr r_tickptr;
3190                         TCGv_i32 r_const;
3191 
3192                         r_tickptr = tcg_temp_new_ptr();
3193                         r_const = tcg_constant_i32(dc->mem_idx);
3194                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3195                                        offsetof(CPUSPARCState, tick));
3196                         if (translator_io_start(&dc->base)) {
3197                             dc->base.is_jmp = DISAS_EXIT;
3198                         }
3199                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3200                                                   r_const);
3201                         gen_store_gpr(dc, rd, cpu_dst);
3202                     }
3203                     break;
3204                 case 0x5: /* V9 rdpc */
3205                     {
3206                         TCGv t = gen_dest_gpr(dc, rd);
3207                         if (unlikely(AM_CHECK(dc))) {
3208                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3209                         } else {
3210                             tcg_gen_movi_tl(t, dc->pc);
3211                         }
3212                         gen_store_gpr(dc, rd, t);
3213                     }
3214                     break;
3215                 case 0x6: /* V9 rdfprs */
3216                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3217                     gen_store_gpr(dc, rd, cpu_dst);
3218                     break;
3219                 case 0xf: /* V9 membar */
3220                     break; /* no effect */
3221                 case 0x13: /* Graphics Status */
3222                     if (gen_trap_ifnofpu(dc)) {
3223                         goto jmp_insn;
3224                     }
3225                     gen_store_gpr(dc, rd, cpu_gsr);
3226                     break;
3227                 case 0x16: /* Softint */
3228                     tcg_gen_ld32s_tl(cpu_dst, tcg_env,
3229                                      offsetof(CPUSPARCState, softint));
3230                     gen_store_gpr(dc, rd, cpu_dst);
3231                     break;
3232                 case 0x17: /* Tick compare */
3233                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3234                     break;
3235                 case 0x18: /* System tick */
3236                     {
3237                         TCGv_ptr r_tickptr;
3238                         TCGv_i32 r_const;
3239 
3240                         r_tickptr = tcg_temp_new_ptr();
3241                         r_const = tcg_constant_i32(dc->mem_idx);
3242                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3243                                        offsetof(CPUSPARCState, stick));
3244                         if (translator_io_start(&dc->base)) {
3245                             dc->base.is_jmp = DISAS_EXIT;
3246                         }
3247                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3248                                                   r_const);
3249                         gen_store_gpr(dc, rd, cpu_dst);
3250                     }
3251                     break;
3252                 case 0x19: /* System tick compare */
3253                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3254                     break;
3255                 case 0x1a: /* UltraSPARC-T1 Strand status */
3256                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3257                      * this ASR as impl. dep
3258                      */
3259                     CHECK_IU_FEATURE(dc, HYPV);
3260                     {
3261                         TCGv t = gen_dest_gpr(dc, rd);
3262                         tcg_gen_movi_tl(t, 1UL);
3263                         gen_store_gpr(dc, rd, t);
3264                     }
3265                     break;
3266                 case 0x10: /* Performance Control */
3267                 case 0x11: /* Performance Instrumentation Counter */
3268                 case 0x12: /* Dispatch Control */
3269                 case 0x14: /* Softint set, WO */
3270                 case 0x15: /* Softint clear, WO */
3271 #endif
3272                 default:
3273                     goto illegal_insn;
3274                 }
3275 #if !defined(CONFIG_USER_ONLY)
3276             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3277 #ifndef TARGET_SPARC64
3278                 if (!supervisor(dc)) {
3279                     goto priv_insn;
3280                 }
3281                 update_psr(dc);
3282                 gen_helper_rdpsr(cpu_dst, tcg_env);
3283 #else
3284                 CHECK_IU_FEATURE(dc, HYPV);
3285                 if (!hypervisor(dc))
3286                     goto priv_insn;
3287                 rs1 = GET_FIELD(insn, 13, 17);
3288                 switch (rs1) {
3289                 case 0: // hpstate
3290                     tcg_gen_ld_i64(cpu_dst, tcg_env,
3291                                    offsetof(CPUSPARCState, hpstate));
3292                     break;
3293                 case 1: // htstate
3294                     // gen_op_rdhtstate();
3295                     break;
3296                 case 3: // hintp
3297                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3298                     break;
3299                 case 5: // htba
3300                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3301                     break;
3302                 case 6: // hver
3303                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3304                     break;
3305                 case 31: // hstick_cmpr
3306                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3307                     break;
3308                 default:
3309                     goto illegal_insn;
3310                 }
3311 #endif
3312                 gen_store_gpr(dc, rd, cpu_dst);
3313                 break;
3314             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3315                 if (!supervisor(dc)) {
3316                     goto priv_insn;
3317                 }
3318                 cpu_tmp0 = tcg_temp_new();
3319 #ifdef TARGET_SPARC64
3320                 rs1 = GET_FIELD(insn, 13, 17);
3321                 switch (rs1) {
3322                 case 0: // tpc
3323                     {
3324                         TCGv_ptr r_tsptr;
3325 
3326                         r_tsptr = tcg_temp_new_ptr();
3327                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3328                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3329                                       offsetof(trap_state, tpc));
3330                     }
3331                     break;
3332                 case 1: // tnpc
3333                     {
3334                         TCGv_ptr r_tsptr;
3335 
3336                         r_tsptr = tcg_temp_new_ptr();
3337                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3338                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3339                                       offsetof(trap_state, tnpc));
3340                     }
3341                     break;
3342                 case 2: // tstate
3343                     {
3344                         TCGv_ptr r_tsptr;
3345 
3346                         r_tsptr = tcg_temp_new_ptr();
3347                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3348                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3349                                       offsetof(trap_state, tstate));
3350                     }
3351                     break;
3352                 case 3: // tt
3353                     {
3354                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3355 
3356                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3357                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3358                                          offsetof(trap_state, tt));
3359                     }
3360                     break;
3361                 case 4: // tick
3362                     {
3363                         TCGv_ptr r_tickptr;
3364                         TCGv_i32 r_const;
3365 
3366                         r_tickptr = tcg_temp_new_ptr();
3367                         r_const = tcg_constant_i32(dc->mem_idx);
3368                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3369                                        offsetof(CPUSPARCState, tick));
3370                         if (translator_io_start(&dc->base)) {
3371                             dc->base.is_jmp = DISAS_EXIT;
3372                         }
3373                         gen_helper_tick_get_count(cpu_tmp0, tcg_env,
3374                                                   r_tickptr, r_const);
3375                     }
3376                     break;
3377                 case 5: // tba
3378                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3379                     break;
3380                 case 6: // pstate
3381                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3382                                      offsetof(CPUSPARCState, pstate));
3383                     break;
3384                 case 7: // tl
3385                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3386                                      offsetof(CPUSPARCState, tl));
3387                     break;
3388                 case 8: // pil
3389                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3390                                      offsetof(CPUSPARCState, psrpil));
3391                     break;
3392                 case 9: // cwp
3393                     gen_helper_rdcwp(cpu_tmp0, tcg_env);
3394                     break;
3395                 case 10: // cansave
3396                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3397                                      offsetof(CPUSPARCState, cansave));
3398                     break;
3399                 case 11: // canrestore
3400                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3401                                      offsetof(CPUSPARCState, canrestore));
3402                     break;
3403                 case 12: // cleanwin
3404                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3405                                      offsetof(CPUSPARCState, cleanwin));
3406                     break;
3407                 case 13: // otherwin
3408                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3409                                      offsetof(CPUSPARCState, otherwin));
3410                     break;
3411                 case 14: // wstate
3412                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3413                                      offsetof(CPUSPARCState, wstate));
3414                     break;
3415                 case 16: // UA2005 gl
3416                     CHECK_IU_FEATURE(dc, GL);
3417                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3418                                      offsetof(CPUSPARCState, gl));
3419                     break;
3420                 case 26: // UA2005 strand status
3421                     CHECK_IU_FEATURE(dc, HYPV);
3422                     if (!hypervisor(dc))
3423                         goto priv_insn;
3424                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3425                     break;
3426                 case 31: // ver
3427                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3428                     break;
3429                 case 15: // fq
3430                 default:
3431                     goto illegal_insn;
3432                 }
3433 #else
3434                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3435 #endif
3436                 gen_store_gpr(dc, rd, cpu_tmp0);
3437                 break;
3438 #endif
3439 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3440             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3441 #ifdef TARGET_SPARC64
3442                 gen_helper_flushw(tcg_env);
3443 #else
3444                 if (!supervisor(dc))
3445                     goto priv_insn;
3446                 gen_store_gpr(dc, rd, cpu_tbr);
3447 #endif
3448                 break;
3449 #endif
3450             } else if (xop == 0x34) {   /* FPU Operations */
3451                 if (gen_trap_ifnofpu(dc)) {
3452                     goto jmp_insn;
3453                 }
3454                 gen_op_clear_ieee_excp_and_FTT();
3455                 rs1 = GET_FIELD(insn, 13, 17);
3456                 rs2 = GET_FIELD(insn, 27, 31);
3457                 xop = GET_FIELD(insn, 18, 26);
3458 
3459                 switch (xop) {
3460                 case 0x1: /* fmovs */
3461                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3462                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3463                     break;
3464                 case 0x5: /* fnegs */
3465                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3466                     break;
3467                 case 0x9: /* fabss */
3468                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3469                     break;
3470                 case 0x29: /* fsqrts */
3471                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3472                     break;
3473                 case 0x2a: /* fsqrtd */
3474                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3475                     break;
3476                 case 0x2b: /* fsqrtq */
3477                     CHECK_FPU_FEATURE(dc, FLOAT128);
3478                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3479                     break;
3480                 case 0x41: /* fadds */
3481                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3482                     break;
3483                 case 0x42: /* faddd */
3484                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3485                     break;
3486                 case 0x43: /* faddq */
3487                     CHECK_FPU_FEATURE(dc, FLOAT128);
3488                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3489                     break;
3490                 case 0x45: /* fsubs */
3491                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3492                     break;
3493                 case 0x46: /* fsubd */
3494                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3495                     break;
3496                 case 0x47: /* fsubq */
3497                     CHECK_FPU_FEATURE(dc, FLOAT128);
3498                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3499                     break;
3500                 case 0x49: /* fmuls */
3501                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3502                     break;
3503                 case 0x4a: /* fmuld */
3504                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3505                     break;
3506                 case 0x4b: /* fmulq */
3507                     CHECK_FPU_FEATURE(dc, FLOAT128);
3508                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3509                     break;
3510                 case 0x4d: /* fdivs */
3511                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3512                     break;
3513                 case 0x4e: /* fdivd */
3514                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3515                     break;
3516                 case 0x4f: /* fdivq */
3517                     CHECK_FPU_FEATURE(dc, FLOAT128);
3518                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3519                     break;
3520                 case 0x69: /* fsmuld */
3521                     CHECK_FPU_FEATURE(dc, FSMULD);
3522                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3523                     break;
3524                 case 0x6e: /* fdmulq */
3525                     CHECK_FPU_FEATURE(dc, FLOAT128);
3526                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3527                     break;
3528                 case 0xc4: /* fitos */
3529                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3530                     break;
3531                 case 0xc6: /* fdtos */
3532                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3533                     break;
3534                 case 0xc7: /* fqtos */
3535                     CHECK_FPU_FEATURE(dc, FLOAT128);
3536                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3537                     break;
3538                 case 0xc8: /* fitod */
3539                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3540                     break;
3541                 case 0xc9: /* fstod */
3542                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3543                     break;
3544                 case 0xcb: /* fqtod */
3545                     CHECK_FPU_FEATURE(dc, FLOAT128);
3546                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3547                     break;
3548                 case 0xcc: /* fitoq */
3549                     CHECK_FPU_FEATURE(dc, FLOAT128);
3550                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3551                     break;
3552                 case 0xcd: /* fstoq */
3553                     CHECK_FPU_FEATURE(dc, FLOAT128);
3554                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3555                     break;
3556                 case 0xce: /* fdtoq */
3557                     CHECK_FPU_FEATURE(dc, FLOAT128);
3558                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3559                     break;
3560                 case 0xd1: /* fstoi */
3561                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3562                     break;
3563                 case 0xd2: /* fdtoi */
3564                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3565                     break;
3566                 case 0xd3: /* fqtoi */
3567                     CHECK_FPU_FEATURE(dc, FLOAT128);
3568                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3569                     break;
3570 #ifdef TARGET_SPARC64
3571                 case 0x2: /* V9 fmovd */
3572                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3573                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3574                     break;
3575                 case 0x3: /* V9 fmovq */
3576                     CHECK_FPU_FEATURE(dc, FLOAT128);
3577                     gen_move_Q(dc, rd, rs2);
3578                     break;
3579                 case 0x6: /* V9 fnegd */
3580                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3581                     break;
3582                 case 0x7: /* V9 fnegq */
3583                     CHECK_FPU_FEATURE(dc, FLOAT128);
3584                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3585                     break;
3586                 case 0xa: /* V9 fabsd */
3587                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3588                     break;
3589                 case 0xb: /* V9 fabsq */
3590                     CHECK_FPU_FEATURE(dc, FLOAT128);
3591                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3592                     break;
3593                 case 0x81: /* V9 fstox */
3594                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3595                     break;
3596                 case 0x82: /* V9 fdtox */
3597                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3598                     break;
3599                 case 0x83: /* V9 fqtox */
3600                     CHECK_FPU_FEATURE(dc, FLOAT128);
3601                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3602                     break;
3603                 case 0x84: /* V9 fxtos */
3604                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3605                     break;
3606                 case 0x88: /* V9 fxtod */
3607                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3608                     break;
3609                 case 0x8c: /* V9 fxtoq */
3610                     CHECK_FPU_FEATURE(dc, FLOAT128);
3611                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3612                     break;
3613 #endif
3614                 default:
3615                     goto illegal_insn;
3616                 }
3617             } else if (xop == 0x35) {   /* FPU Operations */
3618 #ifdef TARGET_SPARC64
3619                 int cond;
3620 #endif
3621                 if (gen_trap_ifnofpu(dc)) {
3622                     goto jmp_insn;
3623                 }
3624                 gen_op_clear_ieee_excp_and_FTT();
3625                 rs1 = GET_FIELD(insn, 13, 17);
3626                 rs2 = GET_FIELD(insn, 27, 31);
3627                 xop = GET_FIELD(insn, 18, 26);
3628 
3629 #ifdef TARGET_SPARC64
3630 #define FMOVR(sz)                                                  \
3631                 do {                                               \
3632                     DisasCompare cmp;                              \
3633                     cond = GET_FIELD_SP(insn, 10, 12);             \
3634                     cpu_src1 = get_src1(dc, insn);                 \
3635                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3636                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3637                 } while (0)
3638 
3639                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3640                     FMOVR(s);
3641                     break;
3642                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3643                     FMOVR(d);
3644                     break;
3645                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3646                     CHECK_FPU_FEATURE(dc, FLOAT128);
3647                     FMOVR(q);
3648                     break;
3649                 }
3650 #undef FMOVR
3651 #endif
3652                 switch (xop) {
3653 #ifdef TARGET_SPARC64
3654 #define FMOVCC(fcc, sz)                                                 \
3655                     do {                                                \
3656                         DisasCompare cmp;                               \
3657                         cond = GET_FIELD_SP(insn, 14, 17);              \
3658                         gen_fcompare(&cmp, fcc, cond);                  \
3659                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3660                     } while (0)
3661 
3662                     case 0x001: /* V9 fmovscc %fcc0 */
3663                         FMOVCC(0, s);
3664                         break;
3665                     case 0x002: /* V9 fmovdcc %fcc0 */
3666                         FMOVCC(0, d);
3667                         break;
3668                     case 0x003: /* V9 fmovqcc %fcc0 */
3669                         CHECK_FPU_FEATURE(dc, FLOAT128);
3670                         FMOVCC(0, q);
3671                         break;
3672                     case 0x041: /* V9 fmovscc %fcc1 */
3673                         FMOVCC(1, s);
3674                         break;
3675                     case 0x042: /* V9 fmovdcc %fcc1 */
3676                         FMOVCC(1, d);
3677                         break;
3678                     case 0x043: /* V9 fmovqcc %fcc1 */
3679                         CHECK_FPU_FEATURE(dc, FLOAT128);
3680                         FMOVCC(1, q);
3681                         break;
3682                     case 0x081: /* V9 fmovscc %fcc2 */
3683                         FMOVCC(2, s);
3684                         break;
3685                     case 0x082: /* V9 fmovdcc %fcc2 */
3686                         FMOVCC(2, d);
3687                         break;
3688                     case 0x083: /* V9 fmovqcc %fcc2 */
3689                         CHECK_FPU_FEATURE(dc, FLOAT128);
3690                         FMOVCC(2, q);
3691                         break;
3692                     case 0x0c1: /* V9 fmovscc %fcc3 */
3693                         FMOVCC(3, s);
3694                         break;
3695                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3696                         FMOVCC(3, d);
3697                         break;
3698                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3699                         CHECK_FPU_FEATURE(dc, FLOAT128);
3700                         FMOVCC(3, q);
3701                         break;
3702 #undef FMOVCC
3703 #define FMOVCC(xcc, sz)                                                 \
3704                     do {                                                \
3705                         DisasCompare cmp;                               \
3706                         cond = GET_FIELD_SP(insn, 14, 17);              \
3707                         gen_compare(&cmp, xcc, cond, dc);               \
3708                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3709                     } while (0)
3710 
3711                     case 0x101: /* V9 fmovscc %icc */
3712                         FMOVCC(0, s);
3713                         break;
3714                     case 0x102: /* V9 fmovdcc %icc */
3715                         FMOVCC(0, d);
3716                         break;
3717                     case 0x103: /* V9 fmovqcc %icc */
3718                         CHECK_FPU_FEATURE(dc, FLOAT128);
3719                         FMOVCC(0, q);
3720                         break;
3721                     case 0x181: /* V9 fmovscc %xcc */
3722                         FMOVCC(1, s);
3723                         break;
3724                     case 0x182: /* V9 fmovdcc %xcc */
3725                         FMOVCC(1, d);
3726                         break;
3727                     case 0x183: /* V9 fmovqcc %xcc */
3728                         CHECK_FPU_FEATURE(dc, FLOAT128);
3729                         FMOVCC(1, q);
3730                         break;
3731 #undef FMOVCC
3732 #endif
3733                     case 0x51: /* fcmps, V9 %fcc */
3734                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3735                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3736                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3737                         break;
3738                     case 0x52: /* fcmpd, V9 %fcc */
3739                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3740                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3741                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3742                         break;
3743                     case 0x53: /* fcmpq, V9 %fcc */
3744                         CHECK_FPU_FEATURE(dc, FLOAT128);
3745                         gen_op_load_fpr_QT0(QFPREG(rs1));
3746                         gen_op_load_fpr_QT1(QFPREG(rs2));
3747                         gen_op_fcmpq(rd & 3);
3748                         break;
3749                     case 0x55: /* fcmpes, V9 %fcc */
3750                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3751                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3752                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3753                         break;
3754                     case 0x56: /* fcmped, V9 %fcc */
3755                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3756                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3757                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3758                         break;
3759                     case 0x57: /* fcmpeq, V9 %fcc */
3760                         CHECK_FPU_FEATURE(dc, FLOAT128);
3761                         gen_op_load_fpr_QT0(QFPREG(rs1));
3762                         gen_op_load_fpr_QT1(QFPREG(rs2));
3763                         gen_op_fcmpeq(rd & 3);
3764                         break;
3765                     default:
3766                         goto illegal_insn;
3767                 }
3768             } else if (xop == 0x2) {
3769                 TCGv dst = gen_dest_gpr(dc, rd);
3770                 rs1 = GET_FIELD(insn, 13, 17);
3771                 if (rs1 == 0) {
3772                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3773                     if (IS_IMM) {       /* immediate */
3774                         simm = GET_FIELDs(insn, 19, 31);
3775                         tcg_gen_movi_tl(dst, simm);
3776                         gen_store_gpr(dc, rd, dst);
3777                     } else {            /* register */
3778                         rs2 = GET_FIELD(insn, 27, 31);
3779                         if (rs2 == 0) {
3780                             tcg_gen_movi_tl(dst, 0);
3781                             gen_store_gpr(dc, rd, dst);
3782                         } else {
3783                             cpu_src2 = gen_load_gpr(dc, rs2);
3784                             gen_store_gpr(dc, rd, cpu_src2);
3785                         }
3786                     }
3787                 } else {
3788                     cpu_src1 = get_src1(dc, insn);
3789                     if (IS_IMM) {       /* immediate */
3790                         simm = GET_FIELDs(insn, 19, 31);
3791                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3792                         gen_store_gpr(dc, rd, dst);
3793                     } else {            /* register */
3794                         rs2 = GET_FIELD(insn, 27, 31);
3795                         if (rs2 == 0) {
3796                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3797                             gen_store_gpr(dc, rd, cpu_src1);
3798                         } else {
3799                             cpu_src2 = gen_load_gpr(dc, rs2);
3800                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3801                             gen_store_gpr(dc, rd, dst);
3802                         }
3803                     }
3804                 }
3805 #ifdef TARGET_SPARC64
3806             } else if (xop == 0x25) { /* sll, V9 sllx */
3807                 cpu_src1 = get_src1(dc, insn);
3808                 if (IS_IMM) {   /* immediate */
3809                     simm = GET_FIELDs(insn, 20, 31);
3810                     if (insn & (1 << 12)) {
3811                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3812                     } else {
3813                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3814                     }
3815                 } else {                /* register */
3816                     rs2 = GET_FIELD(insn, 27, 31);
3817                     cpu_src2 = gen_load_gpr(dc, rs2);
3818                     cpu_tmp0 = tcg_temp_new();
3819                     if (insn & (1 << 12)) {
3820                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3821                     } else {
3822                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3823                     }
3824                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3825                 }
3826                 gen_store_gpr(dc, rd, cpu_dst);
3827             } else if (xop == 0x26) { /* srl, V9 srlx */
3828                 cpu_src1 = get_src1(dc, insn);
3829                 if (IS_IMM) {   /* immediate */
3830                     simm = GET_FIELDs(insn, 20, 31);
3831                     if (insn & (1 << 12)) {
3832                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3833                     } else {
3834                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3835                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3836                     }
3837                 } else {                /* register */
3838                     rs2 = GET_FIELD(insn, 27, 31);
3839                     cpu_src2 = gen_load_gpr(dc, rs2);
3840                     cpu_tmp0 = tcg_temp_new();
3841                     if (insn & (1 << 12)) {
3842                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3843                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3844                     } else {
3845                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3846                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3847                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3848                     }
3849                 }
3850                 gen_store_gpr(dc, rd, cpu_dst);
3851             } else if (xop == 0x27) { /* sra, V9 srax */
3852                 cpu_src1 = get_src1(dc, insn);
3853                 if (IS_IMM) {   /* immediate */
3854                     simm = GET_FIELDs(insn, 20, 31);
3855                     if (insn & (1 << 12)) {
3856                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3857                     } else {
3858                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3859                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3860                     }
3861                 } else {                /* register */
3862                     rs2 = GET_FIELD(insn, 27, 31);
3863                     cpu_src2 = gen_load_gpr(dc, rs2);
3864                     cpu_tmp0 = tcg_temp_new();
3865                     if (insn & (1 << 12)) {
3866                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3867                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3868                     } else {
3869                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3870                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3871                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3872                     }
3873                 }
3874                 gen_store_gpr(dc, rd, cpu_dst);
3875 #endif
3876             } else if (xop < 0x36) {
3877                 if (xop < 0x20) {
3878                     cpu_src1 = get_src1(dc, insn);
3879                     cpu_src2 = get_src2(dc, insn);
3880                     switch (xop & ~0x10) {
3881                     case 0x0: /* add */
3882                         if (xop & 0x10) {
3883                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3884                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3885                             dc->cc_op = CC_OP_ADD;
3886                         } else {
3887                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3888                         }
3889                         break;
3890                     case 0x1: /* and */
3891                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3892                         if (xop & 0x10) {
3893                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3894                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3895                             dc->cc_op = CC_OP_LOGIC;
3896                         }
3897                         break;
3898                     case 0x2: /* or */
3899                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3900                         if (xop & 0x10) {
3901                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3902                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3903                             dc->cc_op = CC_OP_LOGIC;
3904                         }
3905                         break;
3906                     case 0x3: /* xor */
3907                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3908                         if (xop & 0x10) {
3909                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3910                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3911                             dc->cc_op = CC_OP_LOGIC;
3912                         }
3913                         break;
3914                     case 0x4: /* sub */
3915                         if (xop & 0x10) {
3916                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3917                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3918                             dc->cc_op = CC_OP_SUB;
3919                         } else {
3920                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3921                         }
3922                         break;
3923                     case 0x5: /* andn */
3924                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3925                         if (xop & 0x10) {
3926                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3927                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3928                             dc->cc_op = CC_OP_LOGIC;
3929                         }
3930                         break;
3931                     case 0x6: /* orn */
3932                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3933                         if (xop & 0x10) {
3934                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3935                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3936                             dc->cc_op = CC_OP_LOGIC;
3937                         }
3938                         break;
3939                     case 0x7: /* xorn */
3940                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3941                         if (xop & 0x10) {
3942                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3943                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3944                             dc->cc_op = CC_OP_LOGIC;
3945                         }
3946                         break;
3947                     case 0x8: /* addx, V9 addc */
3948                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3949                                         (xop & 0x10));
3950                         break;
3951 #ifdef TARGET_SPARC64
3952                     case 0x9: /* V9 mulx */
3953                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3954                         break;
3955 #endif
3956                     case 0xa: /* umul */
3957                         CHECK_IU_FEATURE(dc, MUL);
3958                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3959                         if (xop & 0x10) {
3960                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3961                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3962                             dc->cc_op = CC_OP_LOGIC;
3963                         }
3964                         break;
3965                     case 0xb: /* smul */
3966                         CHECK_IU_FEATURE(dc, MUL);
3967                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3968                         if (xop & 0x10) {
3969                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3970                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3971                             dc->cc_op = CC_OP_LOGIC;
3972                         }
3973                         break;
3974                     case 0xc: /* subx, V9 subc */
3975                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3976                                         (xop & 0x10));
3977                         break;
3978 #ifdef TARGET_SPARC64
3979                     case 0xd: /* V9 udivx */
3980                         gen_helper_udivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
3981                         break;
3982 #endif
3983                     case 0xe: /* udiv */
3984                         CHECK_IU_FEATURE(dc, DIV);
3985                         if (xop & 0x10) {
3986                             gen_helper_udiv_cc(cpu_dst, tcg_env, cpu_src1,
3987                                                cpu_src2);
3988                             dc->cc_op = CC_OP_DIV;
3989                         } else {
3990                             gen_helper_udiv(cpu_dst, tcg_env, cpu_src1,
3991                                             cpu_src2);
3992                         }
3993                         break;
3994                     case 0xf: /* sdiv */
3995                         CHECK_IU_FEATURE(dc, DIV);
3996                         if (xop & 0x10) {
3997                             gen_helper_sdiv_cc(cpu_dst, tcg_env, cpu_src1,
3998                                                cpu_src2);
3999                             dc->cc_op = CC_OP_DIV;
4000                         } else {
4001                             gen_helper_sdiv(cpu_dst, tcg_env, cpu_src1,
4002                                             cpu_src2);
4003                         }
4004                         break;
4005                     default:
4006                         goto illegal_insn;
4007                     }
4008                     gen_store_gpr(dc, rd, cpu_dst);
4009                 } else {
4010                     cpu_src1 = get_src1(dc, insn);
4011                     cpu_src2 = get_src2(dc, insn);
4012                     switch (xop) {
4013                     case 0x20: /* taddcc */
4014                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4015                         gen_store_gpr(dc, rd, cpu_dst);
4016                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4017                         dc->cc_op = CC_OP_TADD;
4018                         break;
4019                     case 0x21: /* tsubcc */
4020                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4021                         gen_store_gpr(dc, rd, cpu_dst);
4022                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4023                         dc->cc_op = CC_OP_TSUB;
4024                         break;
4025                     case 0x22: /* taddcctv */
4026                         gen_helper_taddcctv(cpu_dst, tcg_env,
4027                                             cpu_src1, cpu_src2);
4028                         gen_store_gpr(dc, rd, cpu_dst);
4029                         dc->cc_op = CC_OP_TADDTV;
4030                         break;
4031                     case 0x23: /* tsubcctv */
4032                         gen_helper_tsubcctv(cpu_dst, tcg_env,
4033                                             cpu_src1, cpu_src2);
4034                         gen_store_gpr(dc, rd, cpu_dst);
4035                         dc->cc_op = CC_OP_TSUBTV;
4036                         break;
4037                     case 0x24: /* mulscc */
4038                         update_psr(dc);
4039                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4040                         gen_store_gpr(dc, rd, cpu_dst);
4041                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4042                         dc->cc_op = CC_OP_ADD;
4043                         break;
4044 #ifndef TARGET_SPARC64
4045                     case 0x25:  /* sll */
4046                         if (IS_IMM) { /* immediate */
4047                             simm = GET_FIELDs(insn, 20, 31);
4048                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4049                         } else { /* register */
4050                             cpu_tmp0 = tcg_temp_new();
4051                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4052                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4053                         }
4054                         gen_store_gpr(dc, rd, cpu_dst);
4055                         break;
4056                     case 0x26:  /* srl */
4057                         if (IS_IMM) { /* immediate */
4058                             simm = GET_FIELDs(insn, 20, 31);
4059                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4060                         } else { /* register */
4061                             cpu_tmp0 = tcg_temp_new();
4062                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4063                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4064                         }
4065                         gen_store_gpr(dc, rd, cpu_dst);
4066                         break;
4067                     case 0x27:  /* sra */
4068                         if (IS_IMM) { /* immediate */
4069                             simm = GET_FIELDs(insn, 20, 31);
4070                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4071                         } else { /* register */
4072                             cpu_tmp0 = tcg_temp_new();
4073                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4074                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4075                         }
4076                         gen_store_gpr(dc, rd, cpu_dst);
4077                         break;
4078 #endif
4079                     case 0x30:
4080                         {
4081                             cpu_tmp0 = tcg_temp_new();
4082                             switch(rd) {
4083                             case 0: /* wry */
4084                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4085                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4086                                 break;
4087 #ifndef TARGET_SPARC64
4088                             case 0x01 ... 0x0f: /* undefined in the
4089                                                    SPARCv8 manual, nop
4090                                                    on the microSPARC
4091                                                    II */
4092                             case 0x10 ... 0x1f: /* implementation-dependent
4093                                                    in the SPARCv8
4094                                                    manual, nop on the
4095                                                    microSPARC II */
4096                                 if ((rd == 0x13) && (dc->def->features &
4097                                                      CPU_FEATURE_POWERDOWN)) {
4098                                     /* LEON3 power-down */
4099                                     save_state(dc);
4100                                     gen_helper_power_down(tcg_env);
4101                                 }
4102                                 break;
4103 #else
4104                             case 0x2: /* V9 wrccr */
4105                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4106                                 gen_helper_wrccr(tcg_env, cpu_tmp0);
4107                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4108                                 dc->cc_op = CC_OP_FLAGS;
4109                                 break;
4110                             case 0x3: /* V9 wrasi */
4111                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4112                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4113                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4114                                                 offsetof(CPUSPARCState, asi));
4115                                 /*
4116                                  * End TB to notice changed ASI.
4117                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4118                                  * update DisasContext and not exit the TB.
4119                                  */
4120                                 save_state(dc);
4121                                 gen_op_next_insn();
4122                                 tcg_gen_lookup_and_goto_ptr();
4123                                 dc->base.is_jmp = DISAS_NORETURN;
4124                                 break;
4125                             case 0x6: /* V9 wrfprs */
4126                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4127                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4128                                 dc->fprs_dirty = 0;
4129                                 save_state(dc);
4130                                 gen_op_next_insn();
4131                                 tcg_gen_exit_tb(NULL, 0);
4132                                 dc->base.is_jmp = DISAS_NORETURN;
4133                                 break;
4134                             case 0xf: /* V9 sir, nop if user */
4135 #if !defined(CONFIG_USER_ONLY)
4136                                 if (supervisor(dc)) {
4137                                     ; // XXX
4138                                 }
4139 #endif
4140                                 break;
4141                             case 0x13: /* Graphics Status */
4142                                 if (gen_trap_ifnofpu(dc)) {
4143                                     goto jmp_insn;
4144                                 }
4145                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4146                                 break;
4147                             case 0x14: /* Softint set */
4148                                 if (!supervisor(dc))
4149                                     goto illegal_insn;
4150                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4151                                 gen_helper_set_softint(tcg_env, cpu_tmp0);
4152                                 break;
4153                             case 0x15: /* Softint clear */
4154                                 if (!supervisor(dc))
4155                                     goto illegal_insn;
4156                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4157                                 gen_helper_clear_softint(tcg_env, cpu_tmp0);
4158                                 break;
4159                             case 0x16: /* Softint write */
4160                                 if (!supervisor(dc))
4161                                     goto illegal_insn;
4162                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4163                                 gen_helper_write_softint(tcg_env, cpu_tmp0);
4164                                 break;
4165                             case 0x17: /* Tick compare */
4166 #if !defined(CONFIG_USER_ONLY)
4167                                 if (!supervisor(dc))
4168                                     goto illegal_insn;
4169 #endif
4170                                 {
4171                                     TCGv_ptr r_tickptr;
4172 
4173                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4174                                                    cpu_src2);
4175                                     r_tickptr = tcg_temp_new_ptr();
4176                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4177                                                    offsetof(CPUSPARCState, tick));
4178                                     translator_io_start(&dc->base);
4179                                     gen_helper_tick_set_limit(r_tickptr,
4180                                                               cpu_tick_cmpr);
4181                                     /* End TB to handle timer interrupt */
4182                                     dc->base.is_jmp = DISAS_EXIT;
4183                                 }
4184                                 break;
4185                             case 0x18: /* System tick */
4186 #if !defined(CONFIG_USER_ONLY)
4187                                 if (!supervisor(dc))
4188                                     goto illegal_insn;
4189 #endif
4190                                 {
4191                                     TCGv_ptr r_tickptr;
4192 
4193                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4194                                                    cpu_src2);
4195                                     r_tickptr = tcg_temp_new_ptr();
4196                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4197                                                    offsetof(CPUSPARCState, stick));
4198                                     translator_io_start(&dc->base);
4199                                     gen_helper_tick_set_count(r_tickptr,
4200                                                               cpu_tmp0);
4201                                     /* End TB to handle timer interrupt */
4202                                     dc->base.is_jmp = DISAS_EXIT;
4203                                 }
4204                                 break;
4205                             case 0x19: /* System tick compare */
4206 #if !defined(CONFIG_USER_ONLY)
4207                                 if (!supervisor(dc))
4208                                     goto illegal_insn;
4209 #endif
4210                                 {
4211                                     TCGv_ptr r_tickptr;
4212 
4213                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4214                                                    cpu_src2);
4215                                     r_tickptr = tcg_temp_new_ptr();
4216                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4217                                                    offsetof(CPUSPARCState, stick));
4218                                     translator_io_start(&dc->base);
4219                                     gen_helper_tick_set_limit(r_tickptr,
4220                                                               cpu_stick_cmpr);
4221                                     /* End TB to handle timer interrupt */
4222                                     dc->base.is_jmp = DISAS_EXIT;
4223                                 }
4224                                 break;
4225 
4226                             case 0x10: /* Performance Control */
4227                             case 0x11: /* Performance Instrumentation
4228                                           Counter */
4229                             case 0x12: /* Dispatch Control */
4230 #endif
4231                             default:
4232                                 goto illegal_insn;
4233                             }
4234                         }
4235                         break;
4236 #if !defined(CONFIG_USER_ONLY)
4237                     case 0x31: /* wrpsr, V9 saved, restored */
4238                         {
4239                             if (!supervisor(dc))
4240                                 goto priv_insn;
4241 #ifdef TARGET_SPARC64
4242                             switch (rd) {
4243                             case 0:
4244                                 gen_helper_saved(tcg_env);
4245                                 break;
4246                             case 1:
4247                                 gen_helper_restored(tcg_env);
4248                                 break;
4249                             case 2: /* UA2005 allclean */
4250                             case 3: /* UA2005 otherw */
4251                             case 4: /* UA2005 normalw */
4252                             case 5: /* UA2005 invalw */
4253                                 // XXX
4254                             default:
4255                                 goto illegal_insn;
4256                             }
4257 #else
4258                             cpu_tmp0 = tcg_temp_new();
4259                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4260                             gen_helper_wrpsr(tcg_env, cpu_tmp0);
4261                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4262                             dc->cc_op = CC_OP_FLAGS;
4263                             save_state(dc);
4264                             gen_op_next_insn();
4265                             tcg_gen_exit_tb(NULL, 0);
4266                             dc->base.is_jmp = DISAS_NORETURN;
4267 #endif
4268                         }
4269                         break;
4270                     case 0x32: /* wrwim, V9 wrpr */
4271                         {
4272                             if (!supervisor(dc))
4273                                 goto priv_insn;
4274                             cpu_tmp0 = tcg_temp_new();
4275                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4276 #ifdef TARGET_SPARC64
4277                             switch (rd) {
4278                             case 0: // tpc
4279                                 {
4280                                     TCGv_ptr r_tsptr;
4281 
4282                                     r_tsptr = tcg_temp_new_ptr();
4283                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4284                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4285                                                   offsetof(trap_state, tpc));
4286                                 }
4287                                 break;
4288                             case 1: // tnpc
4289                                 {
4290                                     TCGv_ptr r_tsptr;
4291 
4292                                     r_tsptr = tcg_temp_new_ptr();
4293                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4294                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4295                                                   offsetof(trap_state, tnpc));
4296                                 }
4297                                 break;
4298                             case 2: // tstate
4299                                 {
4300                                     TCGv_ptr r_tsptr;
4301 
4302                                     r_tsptr = tcg_temp_new_ptr();
4303                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4304                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4305                                                   offsetof(trap_state,
4306                                                            tstate));
4307                                 }
4308                                 break;
4309                             case 3: // tt
4310                                 {
4311                                     TCGv_ptr r_tsptr;
4312 
4313                                     r_tsptr = tcg_temp_new_ptr();
4314                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4315                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4316                                                     offsetof(trap_state, tt));
4317                                 }
4318                                 break;
4319                             case 4: // tick
4320                                 {
4321                                     TCGv_ptr r_tickptr;
4322 
4323                                     r_tickptr = tcg_temp_new_ptr();
4324                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4325                                                    offsetof(CPUSPARCState, tick));
4326                                     translator_io_start(&dc->base);
4327                                     gen_helper_tick_set_count(r_tickptr,
4328                                                               cpu_tmp0);
4329                                     /* End TB to handle timer interrupt */
4330                                     dc->base.is_jmp = DISAS_EXIT;
4331                                 }
4332                                 break;
4333                             case 5: // tba
4334                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4335                                 break;
4336                             case 6: // pstate
4337                                 save_state(dc);
4338                                 if (translator_io_start(&dc->base)) {
4339                                     dc->base.is_jmp = DISAS_EXIT;
4340                                 }
4341                                 gen_helper_wrpstate(tcg_env, cpu_tmp0);
4342                                 dc->npc = DYNAMIC_PC;
4343                                 break;
4344                             case 7: // tl
4345                                 save_state(dc);
4346                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4347                                                offsetof(CPUSPARCState, tl));
4348                                 dc->npc = DYNAMIC_PC;
4349                                 break;
4350                             case 8: // pil
4351                                 if (translator_io_start(&dc->base)) {
4352                                     dc->base.is_jmp = DISAS_EXIT;
4353                                 }
4354                                 gen_helper_wrpil(tcg_env, cpu_tmp0);
4355                                 break;
4356                             case 9: // cwp
4357                                 gen_helper_wrcwp(tcg_env, cpu_tmp0);
4358                                 break;
4359                             case 10: // cansave
4360                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4361                                                 offsetof(CPUSPARCState,
4362                                                          cansave));
4363                                 break;
4364                             case 11: // canrestore
4365                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4366                                                 offsetof(CPUSPARCState,
4367                                                          canrestore));
4368                                 break;
4369                             case 12: // cleanwin
4370                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4371                                                 offsetof(CPUSPARCState,
4372                                                          cleanwin));
4373                                 break;
4374                             case 13: // otherwin
4375                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4376                                                 offsetof(CPUSPARCState,
4377                                                          otherwin));
4378                                 break;
4379                             case 14: // wstate
4380                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4381                                                 offsetof(CPUSPARCState,
4382                                                          wstate));
4383                                 break;
4384                             case 16: // UA2005 gl
4385                                 CHECK_IU_FEATURE(dc, GL);
4386                                 gen_helper_wrgl(tcg_env, cpu_tmp0);
4387                                 break;
4388                             case 26: // UA2005 strand status
4389                                 CHECK_IU_FEATURE(dc, HYPV);
4390                                 if (!hypervisor(dc))
4391                                     goto priv_insn;
4392                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4393                                 break;
4394                             default:
4395                                 goto illegal_insn;
4396                             }
4397 #else
4398                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4399                             if (dc->def->nwindows != 32) {
4400                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4401                                                 (1 << dc->def->nwindows) - 1);
4402                             }
4403 #endif
4404                         }
4405                         break;
4406                     case 0x33: /* wrtbr, UA2005 wrhpr */
4407                         {
4408 #ifndef TARGET_SPARC64
4409                             if (!supervisor(dc))
4410                                 goto priv_insn;
4411                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4412 #else
4413                             CHECK_IU_FEATURE(dc, HYPV);
4414                             if (!hypervisor(dc))
4415                                 goto priv_insn;
4416                             cpu_tmp0 = tcg_temp_new();
4417                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4418                             switch (rd) {
4419                             case 0: // hpstate
4420                                 tcg_gen_st_i64(cpu_tmp0, tcg_env,
4421                                                offsetof(CPUSPARCState,
4422                                                         hpstate));
4423                                 save_state(dc);
4424                                 gen_op_next_insn();
4425                                 tcg_gen_exit_tb(NULL, 0);
4426                                 dc->base.is_jmp = DISAS_NORETURN;
4427                                 break;
4428                             case 1: // htstate
4429                                 // XXX gen_op_wrhtstate();
4430                                 break;
4431                             case 3: // hintp
4432                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4433                                 break;
4434                             case 5: // htba
4435                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4436                                 break;
4437                             case 31: // hstick_cmpr
4438                                 {
4439                                     TCGv_ptr r_tickptr;
4440 
4441                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4442                                     r_tickptr = tcg_temp_new_ptr();
4443                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4444                                                    offsetof(CPUSPARCState, hstick));
4445                                     translator_io_start(&dc->base);
4446                                     gen_helper_tick_set_limit(r_tickptr,
4447                                                               cpu_hstick_cmpr);
4448                                     /* End TB to handle timer interrupt */
4449                                     dc->base.is_jmp = DISAS_EXIT;
4450                                 }
4451                                 break;
4452                             case 6: // hver readonly
4453                             default:
4454                                 goto illegal_insn;
4455                             }
4456 #endif
4457                         }
4458                         break;
4459 #endif
4460 #ifdef TARGET_SPARC64
4461                     case 0x2c: /* V9 movcc */
4462                         {
4463                             int cc = GET_FIELD_SP(insn, 11, 12);
4464                             int cond = GET_FIELD_SP(insn, 14, 17);
4465                             DisasCompare cmp;
4466                             TCGv dst;
4467 
4468                             if (insn & (1 << 18)) {
4469                                 if (cc == 0) {
4470                                     gen_compare(&cmp, 0, cond, dc);
4471                                 } else if (cc == 2) {
4472                                     gen_compare(&cmp, 1, cond, dc);
4473                                 } else {
4474                                     goto illegal_insn;
4475                                 }
4476                             } else {
4477                                 gen_fcompare(&cmp, cc, cond);
4478                             }
4479 
4480                             /* The get_src2 above loaded the normal 13-bit
4481                                immediate field, not the 11-bit field we have
4482                                in movcc.  But it did handle the reg case.  */
4483                             if (IS_IMM) {
4484                                 simm = GET_FIELD_SPs(insn, 0, 10);
4485                                 tcg_gen_movi_tl(cpu_src2, simm);
4486                             }
4487 
4488                             dst = gen_load_gpr(dc, rd);
4489                             tcg_gen_movcond_tl(cmp.cond, dst,
4490                                                cmp.c1, cmp.c2,
4491                                                cpu_src2, dst);
4492                             gen_store_gpr(dc, rd, dst);
4493                             break;
4494                         }
4495                     case 0x2d: /* V9 sdivx */
4496                         gen_helper_sdivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4497                         gen_store_gpr(dc, rd, cpu_dst);
4498                         break;
4499                     case 0x2e: /* V9 popc */
4500                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4501                         gen_store_gpr(dc, rd, cpu_dst);
4502                         break;
4503                     case 0x2f: /* V9 movr */
4504                         {
4505                             int cond = GET_FIELD_SP(insn, 10, 12);
4506                             DisasCompare cmp;
4507                             TCGv dst;
4508 
4509                             gen_compare_reg(&cmp, cond, cpu_src1);
4510 
4511                             /* The get_src2 above loaded the normal 13-bit
4512                                immediate field, not the 10-bit field we have
4513                                in movr.  But it did handle the reg case.  */
4514                             if (IS_IMM) {
4515                                 simm = GET_FIELD_SPs(insn, 0, 9);
4516                                 tcg_gen_movi_tl(cpu_src2, simm);
4517                             }
4518 
4519                             dst = gen_load_gpr(dc, rd);
4520                             tcg_gen_movcond_tl(cmp.cond, dst,
4521                                                cmp.c1, cmp.c2,
4522                                                cpu_src2, dst);
4523                             gen_store_gpr(dc, rd, dst);
4524                             break;
4525                         }
4526 #endif
4527                     default:
4528                         goto illegal_insn;
4529                     }
4530                 }
4531             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4532 #ifdef TARGET_SPARC64
4533                 int opf = GET_FIELD_SP(insn, 5, 13);
4534                 rs1 = GET_FIELD(insn, 13, 17);
4535                 rs2 = GET_FIELD(insn, 27, 31);
4536                 if (gen_trap_ifnofpu(dc)) {
4537                     goto jmp_insn;
4538                 }
4539 
4540                 switch (opf) {
4541                 case 0x000: /* VIS I edge8cc */
4542                     CHECK_FPU_FEATURE(dc, VIS1);
4543                     cpu_src1 = gen_load_gpr(dc, rs1);
4544                     cpu_src2 = gen_load_gpr(dc, rs2);
4545                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4546                     gen_store_gpr(dc, rd, cpu_dst);
4547                     break;
4548                 case 0x001: /* VIS II edge8n */
4549                     CHECK_FPU_FEATURE(dc, VIS2);
4550                     cpu_src1 = gen_load_gpr(dc, rs1);
4551                     cpu_src2 = gen_load_gpr(dc, rs2);
4552                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4553                     gen_store_gpr(dc, rd, cpu_dst);
4554                     break;
4555                 case 0x002: /* VIS I edge8lcc */
4556                     CHECK_FPU_FEATURE(dc, VIS1);
4557                     cpu_src1 = gen_load_gpr(dc, rs1);
4558                     cpu_src2 = gen_load_gpr(dc, rs2);
4559                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4560                     gen_store_gpr(dc, rd, cpu_dst);
4561                     break;
4562                 case 0x003: /* VIS II edge8ln */
4563                     CHECK_FPU_FEATURE(dc, VIS2);
4564                     cpu_src1 = gen_load_gpr(dc, rs1);
4565                     cpu_src2 = gen_load_gpr(dc, rs2);
4566                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4567                     gen_store_gpr(dc, rd, cpu_dst);
4568                     break;
4569                 case 0x004: /* VIS I edge16cc */
4570                     CHECK_FPU_FEATURE(dc, VIS1);
4571                     cpu_src1 = gen_load_gpr(dc, rs1);
4572                     cpu_src2 = gen_load_gpr(dc, rs2);
4573                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4574                     gen_store_gpr(dc, rd, cpu_dst);
4575                     break;
4576                 case 0x005: /* VIS II edge16n */
4577                     CHECK_FPU_FEATURE(dc, VIS2);
4578                     cpu_src1 = gen_load_gpr(dc, rs1);
4579                     cpu_src2 = gen_load_gpr(dc, rs2);
4580                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4581                     gen_store_gpr(dc, rd, cpu_dst);
4582                     break;
4583                 case 0x006: /* VIS I edge16lcc */
4584                     CHECK_FPU_FEATURE(dc, VIS1);
4585                     cpu_src1 = gen_load_gpr(dc, rs1);
4586                     cpu_src2 = gen_load_gpr(dc, rs2);
4587                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4588                     gen_store_gpr(dc, rd, cpu_dst);
4589                     break;
4590                 case 0x007: /* VIS II edge16ln */
4591                     CHECK_FPU_FEATURE(dc, VIS2);
4592                     cpu_src1 = gen_load_gpr(dc, rs1);
4593                     cpu_src2 = gen_load_gpr(dc, rs2);
4594                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4595                     gen_store_gpr(dc, rd, cpu_dst);
4596                     break;
4597                 case 0x008: /* VIS I edge32cc */
4598                     CHECK_FPU_FEATURE(dc, VIS1);
4599                     cpu_src1 = gen_load_gpr(dc, rs1);
4600                     cpu_src2 = gen_load_gpr(dc, rs2);
4601                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4602                     gen_store_gpr(dc, rd, cpu_dst);
4603                     break;
4604                 case 0x009: /* VIS II edge32n */
4605                     CHECK_FPU_FEATURE(dc, VIS2);
4606                     cpu_src1 = gen_load_gpr(dc, rs1);
4607                     cpu_src2 = gen_load_gpr(dc, rs2);
4608                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4609                     gen_store_gpr(dc, rd, cpu_dst);
4610                     break;
4611                 case 0x00a: /* VIS I edge32lcc */
4612                     CHECK_FPU_FEATURE(dc, VIS1);
4613                     cpu_src1 = gen_load_gpr(dc, rs1);
4614                     cpu_src2 = gen_load_gpr(dc, rs2);
4615                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4616                     gen_store_gpr(dc, rd, cpu_dst);
4617                     break;
4618                 case 0x00b: /* VIS II edge32ln */
4619                     CHECK_FPU_FEATURE(dc, VIS2);
4620                     cpu_src1 = gen_load_gpr(dc, rs1);
4621                     cpu_src2 = gen_load_gpr(dc, rs2);
4622                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4623                     gen_store_gpr(dc, rd, cpu_dst);
4624                     break;
4625                 case 0x010: /* VIS I array8 */
4626                     CHECK_FPU_FEATURE(dc, VIS1);
4627                     cpu_src1 = gen_load_gpr(dc, rs1);
4628                     cpu_src2 = gen_load_gpr(dc, rs2);
4629                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4630                     gen_store_gpr(dc, rd, cpu_dst);
4631                     break;
4632                 case 0x012: /* VIS I array16 */
4633                     CHECK_FPU_FEATURE(dc, VIS1);
4634                     cpu_src1 = gen_load_gpr(dc, rs1);
4635                     cpu_src2 = gen_load_gpr(dc, rs2);
4636                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4637                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4638                     gen_store_gpr(dc, rd, cpu_dst);
4639                     break;
4640                 case 0x014: /* VIS I array32 */
4641                     CHECK_FPU_FEATURE(dc, VIS1);
4642                     cpu_src1 = gen_load_gpr(dc, rs1);
4643                     cpu_src2 = gen_load_gpr(dc, rs2);
4644                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4645                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4646                     gen_store_gpr(dc, rd, cpu_dst);
4647                     break;
4648                 case 0x018: /* VIS I alignaddr */
4649                     CHECK_FPU_FEATURE(dc, VIS1);
4650                     cpu_src1 = gen_load_gpr(dc, rs1);
4651                     cpu_src2 = gen_load_gpr(dc, rs2);
4652                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4653                     gen_store_gpr(dc, rd, cpu_dst);
4654                     break;
4655                 case 0x01a: /* VIS I alignaddrl */
4656                     CHECK_FPU_FEATURE(dc, VIS1);
4657                     cpu_src1 = gen_load_gpr(dc, rs1);
4658                     cpu_src2 = gen_load_gpr(dc, rs2);
4659                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4660                     gen_store_gpr(dc, rd, cpu_dst);
4661                     break;
4662                 case 0x019: /* VIS II bmask */
4663                     CHECK_FPU_FEATURE(dc, VIS2);
4664                     cpu_src1 = gen_load_gpr(dc, rs1);
4665                     cpu_src2 = gen_load_gpr(dc, rs2);
4666                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4667                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4668                     gen_store_gpr(dc, rd, cpu_dst);
4669                     break;
4670                 case 0x020: /* VIS I fcmple16 */
4671                     CHECK_FPU_FEATURE(dc, VIS1);
4672                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4673                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4674                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4675                     gen_store_gpr(dc, rd, cpu_dst);
4676                     break;
4677                 case 0x022: /* VIS I fcmpne16 */
4678                     CHECK_FPU_FEATURE(dc, VIS1);
4679                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4680                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4681                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4682                     gen_store_gpr(dc, rd, cpu_dst);
4683                     break;
4684                 case 0x024: /* VIS I fcmple32 */
4685                     CHECK_FPU_FEATURE(dc, VIS1);
4686                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4687                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4688                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4689                     gen_store_gpr(dc, rd, cpu_dst);
4690                     break;
4691                 case 0x026: /* VIS I fcmpne32 */
4692                     CHECK_FPU_FEATURE(dc, VIS1);
4693                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4694                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4695                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4696                     gen_store_gpr(dc, rd, cpu_dst);
4697                     break;
4698                 case 0x028: /* VIS I fcmpgt16 */
4699                     CHECK_FPU_FEATURE(dc, VIS1);
4700                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4701                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4702                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4703                     gen_store_gpr(dc, rd, cpu_dst);
4704                     break;
4705                 case 0x02a: /* VIS I fcmpeq16 */
4706                     CHECK_FPU_FEATURE(dc, VIS1);
4707                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4708                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4709                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4710                     gen_store_gpr(dc, rd, cpu_dst);
4711                     break;
4712                 case 0x02c: /* VIS I fcmpgt32 */
4713                     CHECK_FPU_FEATURE(dc, VIS1);
4714                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4715                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4716                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4717                     gen_store_gpr(dc, rd, cpu_dst);
4718                     break;
4719                 case 0x02e: /* VIS I fcmpeq32 */
4720                     CHECK_FPU_FEATURE(dc, VIS1);
4721                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4722                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4723                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4724                     gen_store_gpr(dc, rd, cpu_dst);
4725                     break;
4726                 case 0x031: /* VIS I fmul8x16 */
4727                     CHECK_FPU_FEATURE(dc, VIS1);
4728                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4729                     break;
4730                 case 0x033: /* VIS I fmul8x16au */
4731                     CHECK_FPU_FEATURE(dc, VIS1);
4732                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4733                     break;
4734                 case 0x035: /* VIS I fmul8x16al */
4735                     CHECK_FPU_FEATURE(dc, VIS1);
4736                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4737                     break;
4738                 case 0x036: /* VIS I fmul8sux16 */
4739                     CHECK_FPU_FEATURE(dc, VIS1);
4740                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4741                     break;
4742                 case 0x037: /* VIS I fmul8ulx16 */
4743                     CHECK_FPU_FEATURE(dc, VIS1);
4744                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4745                     break;
4746                 case 0x038: /* VIS I fmuld8sux16 */
4747                     CHECK_FPU_FEATURE(dc, VIS1);
4748                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4749                     break;
4750                 case 0x039: /* VIS I fmuld8ulx16 */
4751                     CHECK_FPU_FEATURE(dc, VIS1);
4752                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4753                     break;
4754                 case 0x03a: /* VIS I fpack32 */
4755                     CHECK_FPU_FEATURE(dc, VIS1);
4756                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4757                     break;
4758                 case 0x03b: /* VIS I fpack16 */
4759                     CHECK_FPU_FEATURE(dc, VIS1);
4760                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4761                     cpu_dst_32 = gen_dest_fpr_F(dc);
4762                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4763                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4764                     break;
4765                 case 0x03d: /* VIS I fpackfix */
4766                     CHECK_FPU_FEATURE(dc, VIS1);
4767                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4768                     cpu_dst_32 = gen_dest_fpr_F(dc);
4769                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4770                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4771                     break;
4772                 case 0x03e: /* VIS I pdist */
4773                     CHECK_FPU_FEATURE(dc, VIS1);
4774                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4775                     break;
4776                 case 0x048: /* VIS I faligndata */
4777                     CHECK_FPU_FEATURE(dc, VIS1);
4778                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4779                     break;
4780                 case 0x04b: /* VIS I fpmerge */
4781                     CHECK_FPU_FEATURE(dc, VIS1);
4782                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4783                     break;
4784                 case 0x04c: /* VIS II bshuffle */
4785                     CHECK_FPU_FEATURE(dc, VIS2);
4786                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4787                     break;
4788                 case 0x04d: /* VIS I fexpand */
4789                     CHECK_FPU_FEATURE(dc, VIS1);
4790                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4791                     break;
4792                 case 0x050: /* VIS I fpadd16 */
4793                     CHECK_FPU_FEATURE(dc, VIS1);
4794                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4795                     break;
4796                 case 0x051: /* VIS I fpadd16s */
4797                     CHECK_FPU_FEATURE(dc, VIS1);
4798                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4799                     break;
4800                 case 0x052: /* VIS I fpadd32 */
4801                     CHECK_FPU_FEATURE(dc, VIS1);
4802                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4803                     break;
4804                 case 0x053: /* VIS I fpadd32s */
4805                     CHECK_FPU_FEATURE(dc, VIS1);
4806                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4807                     break;
4808                 case 0x054: /* VIS I fpsub16 */
4809                     CHECK_FPU_FEATURE(dc, VIS1);
4810                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4811                     break;
4812                 case 0x055: /* VIS I fpsub16s */
4813                     CHECK_FPU_FEATURE(dc, VIS1);
4814                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4815                     break;
4816                 case 0x056: /* VIS I fpsub32 */
4817                     CHECK_FPU_FEATURE(dc, VIS1);
4818                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4819                     break;
4820                 case 0x057: /* VIS I fpsub32s */
4821                     CHECK_FPU_FEATURE(dc, VIS1);
4822                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4823                     break;
4824                 case 0x060: /* VIS I fzero */
4825                     CHECK_FPU_FEATURE(dc, VIS1);
4826                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4827                     tcg_gen_movi_i64(cpu_dst_64, 0);
4828                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4829                     break;
4830                 case 0x061: /* VIS I fzeros */
4831                     CHECK_FPU_FEATURE(dc, VIS1);
4832                     cpu_dst_32 = gen_dest_fpr_F(dc);
4833                     tcg_gen_movi_i32(cpu_dst_32, 0);
4834                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4835                     break;
4836                 case 0x062: /* VIS I fnor */
4837                     CHECK_FPU_FEATURE(dc, VIS1);
4838                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4839                     break;
4840                 case 0x063: /* VIS I fnors */
4841                     CHECK_FPU_FEATURE(dc, VIS1);
4842                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4843                     break;
4844                 case 0x064: /* VIS I fandnot2 */
4845                     CHECK_FPU_FEATURE(dc, VIS1);
4846                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4847                     break;
4848                 case 0x065: /* VIS I fandnot2s */
4849                     CHECK_FPU_FEATURE(dc, VIS1);
4850                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4851                     break;
4852                 case 0x066: /* VIS I fnot2 */
4853                     CHECK_FPU_FEATURE(dc, VIS1);
4854                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4855                     break;
4856                 case 0x067: /* VIS I fnot2s */
4857                     CHECK_FPU_FEATURE(dc, VIS1);
4858                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4859                     break;
4860                 case 0x068: /* VIS I fandnot1 */
4861                     CHECK_FPU_FEATURE(dc, VIS1);
4862                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4863                     break;
4864                 case 0x069: /* VIS I fandnot1s */
4865                     CHECK_FPU_FEATURE(dc, VIS1);
4866                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4867                     break;
4868                 case 0x06a: /* VIS I fnot1 */
4869                     CHECK_FPU_FEATURE(dc, VIS1);
4870                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4871                     break;
4872                 case 0x06b: /* VIS I fnot1s */
4873                     CHECK_FPU_FEATURE(dc, VIS1);
4874                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4875                     break;
4876                 case 0x06c: /* VIS I fxor */
4877                     CHECK_FPU_FEATURE(dc, VIS1);
4878                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4879                     break;
4880                 case 0x06d: /* VIS I fxors */
4881                     CHECK_FPU_FEATURE(dc, VIS1);
4882                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4883                     break;
4884                 case 0x06e: /* VIS I fnand */
4885                     CHECK_FPU_FEATURE(dc, VIS1);
4886                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4887                     break;
4888                 case 0x06f: /* VIS I fnands */
4889                     CHECK_FPU_FEATURE(dc, VIS1);
4890                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4891                     break;
4892                 case 0x070: /* VIS I fand */
4893                     CHECK_FPU_FEATURE(dc, VIS1);
4894                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4895                     break;
4896                 case 0x071: /* VIS I fands */
4897                     CHECK_FPU_FEATURE(dc, VIS1);
4898                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4899                     break;
4900                 case 0x072: /* VIS I fxnor */
4901                     CHECK_FPU_FEATURE(dc, VIS1);
4902                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4903                     break;
4904                 case 0x073: /* VIS I fxnors */
4905                     CHECK_FPU_FEATURE(dc, VIS1);
4906                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4907                     break;
4908                 case 0x074: /* VIS I fsrc1 */
4909                     CHECK_FPU_FEATURE(dc, VIS1);
4910                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4911                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4912                     break;
4913                 case 0x075: /* VIS I fsrc1s */
4914                     CHECK_FPU_FEATURE(dc, VIS1);
4915                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4916                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4917                     break;
4918                 case 0x076: /* VIS I fornot2 */
4919                     CHECK_FPU_FEATURE(dc, VIS1);
4920                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4921                     break;
4922                 case 0x077: /* VIS I fornot2s */
4923                     CHECK_FPU_FEATURE(dc, VIS1);
4924                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4925                     break;
4926                 case 0x078: /* VIS I fsrc2 */
4927                     CHECK_FPU_FEATURE(dc, VIS1);
4928                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4929                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4930                     break;
4931                 case 0x079: /* VIS I fsrc2s */
4932                     CHECK_FPU_FEATURE(dc, VIS1);
4933                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4934                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4935                     break;
4936                 case 0x07a: /* VIS I fornot1 */
4937                     CHECK_FPU_FEATURE(dc, VIS1);
4938                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4939                     break;
4940                 case 0x07b: /* VIS I fornot1s */
4941                     CHECK_FPU_FEATURE(dc, VIS1);
4942                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4943                     break;
4944                 case 0x07c: /* VIS I for */
4945                     CHECK_FPU_FEATURE(dc, VIS1);
4946                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4947                     break;
4948                 case 0x07d: /* VIS I fors */
4949                     CHECK_FPU_FEATURE(dc, VIS1);
4950                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4951                     break;
4952                 case 0x07e: /* VIS I fone */
4953                     CHECK_FPU_FEATURE(dc, VIS1);
4954                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4955                     tcg_gen_movi_i64(cpu_dst_64, -1);
4956                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4957                     break;
4958                 case 0x07f: /* VIS I fones */
4959                     CHECK_FPU_FEATURE(dc, VIS1);
4960                     cpu_dst_32 = gen_dest_fpr_F(dc);
4961                     tcg_gen_movi_i32(cpu_dst_32, -1);
4962                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4963                     break;
4964                 case 0x080: /* VIS I shutdown */
4965                 case 0x081: /* VIS II siam */
4966                     // XXX
4967                     goto illegal_insn;
4968                 default:
4969                     goto illegal_insn;
4970                 }
4971 #else
4972                 goto ncp_insn;
4973 #endif
4974             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4975 #ifdef TARGET_SPARC64
4976                 goto illegal_insn;
4977 #else
4978                 goto ncp_insn;
4979 #endif
4980 #ifdef TARGET_SPARC64
4981             } else if (xop == 0x39) { /* V9 return */
4982                 save_state(dc);
4983                 cpu_src1 = get_src1(dc, insn);
4984                 cpu_tmp0 = tcg_temp_new();
4985                 if (IS_IMM) {   /* immediate */
4986                     simm = GET_FIELDs(insn, 19, 31);
4987                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4988                 } else {                /* register */
4989                     rs2 = GET_FIELD(insn, 27, 31);
4990                     if (rs2) {
4991                         cpu_src2 = gen_load_gpr(dc, rs2);
4992                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4993                     } else {
4994                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4995                     }
4996                 }
4997                 gen_check_align(dc, cpu_tmp0, 3);
4998                 gen_helper_restore(tcg_env);
4999                 gen_mov_pc_npc(dc);
5000                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5001                 dc->npc = DYNAMIC_PC_LOOKUP;
5002                 goto jmp_insn;
5003 #endif
5004             } else {
5005                 cpu_src1 = get_src1(dc, insn);
5006                 cpu_tmp0 = tcg_temp_new();
5007                 if (IS_IMM) {   /* immediate */
5008                     simm = GET_FIELDs(insn, 19, 31);
5009                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5010                 } else {                /* register */
5011                     rs2 = GET_FIELD(insn, 27, 31);
5012                     if (rs2) {
5013                         cpu_src2 = gen_load_gpr(dc, rs2);
5014                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5015                     } else {
5016                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5017                     }
5018                 }
5019                 switch (xop) {
5020                 case 0x38:      /* jmpl */
5021                     {
5022                         gen_check_align(dc, cpu_tmp0, 3);
5023                         gen_store_gpr(dc, rd, tcg_constant_tl(dc->pc));
5024                         gen_mov_pc_npc(dc);
5025                         gen_address_mask(dc, cpu_tmp0);
5026                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5027                         dc->npc = DYNAMIC_PC_LOOKUP;
5028                     }
5029                     goto jmp_insn;
5030 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5031                 case 0x39:      /* rett, V9 return */
5032                     {
5033                         if (!supervisor(dc))
5034                             goto priv_insn;
5035                         gen_check_align(dc, cpu_tmp0, 3);
5036                         gen_mov_pc_npc(dc);
5037                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5038                         dc->npc = DYNAMIC_PC;
5039                         gen_helper_rett(tcg_env);
5040                     }
5041                     goto jmp_insn;
5042 #endif
5043                 case 0x3b: /* flush */
5044                     /* nop */
5045                     break;
5046                 case 0x3c:      /* save */
5047                     gen_helper_save(tcg_env);
5048                     gen_store_gpr(dc, rd, cpu_tmp0);
5049                     break;
5050                 case 0x3d:      /* restore */
5051                     gen_helper_restore(tcg_env);
5052                     gen_store_gpr(dc, rd, cpu_tmp0);
5053                     break;
5054 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5055                 case 0x3e:      /* V9 done/retry */
5056                     {
5057                         switch (rd) {
5058                         case 0:
5059                             if (!supervisor(dc))
5060                                 goto priv_insn;
5061                             dc->npc = DYNAMIC_PC;
5062                             dc->pc = DYNAMIC_PC;
5063                             translator_io_start(&dc->base);
5064                             gen_helper_done(tcg_env);
5065                             goto jmp_insn;
5066                         case 1:
5067                             if (!supervisor(dc))
5068                                 goto priv_insn;
5069                             dc->npc = DYNAMIC_PC;
5070                             dc->pc = DYNAMIC_PC;
5071                             translator_io_start(&dc->base);
5072                             gen_helper_retry(tcg_env);
5073                             goto jmp_insn;
5074                         default:
5075                             goto illegal_insn;
5076                         }
5077                     }
5078                     break;
5079 #endif
5080                 default:
5081                     goto illegal_insn;
5082                 }
5083             }
5084             break;
5085         }
5086         break;
5087     case 3:                     /* load/store instructions */
5088         {
5089             unsigned int xop = GET_FIELD(insn, 7, 12);
5090             /* ??? gen_address_mask prevents us from using a source
5091                register directly.  Always generate a temporary.  */
5092             TCGv cpu_addr = tcg_temp_new();
5093 
5094             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5095             if (xop == 0x3c || xop == 0x3e) {
5096                 /* V9 casa/casxa : no offset */
5097             } else if (IS_IMM) {     /* immediate */
5098                 simm = GET_FIELDs(insn, 19, 31);
5099                 if (simm != 0) {
5100                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5101                 }
5102             } else {            /* register */
5103                 rs2 = GET_FIELD(insn, 27, 31);
5104                 if (rs2 != 0) {
5105                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5106                 }
5107             }
5108             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5109                 (xop > 0x17 && xop <= 0x1d ) ||
5110                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5111                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5112 
5113                 switch (xop) {
5114                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5115                     gen_address_mask(dc, cpu_addr);
5116                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5117                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5118                     break;
5119                 case 0x1:       /* ldub, load unsigned byte */
5120                     gen_address_mask(dc, cpu_addr);
5121                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5122                                        dc->mem_idx, MO_UB);
5123                     break;
5124                 case 0x2:       /* lduh, load unsigned halfword */
5125                     gen_address_mask(dc, cpu_addr);
5126                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5127                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5128                     break;
5129                 case 0x3:       /* ldd, load double word */
5130                     if (rd & 1)
5131                         goto illegal_insn;
5132                     else {
5133                         TCGv_i64 t64;
5134 
5135                         gen_address_mask(dc, cpu_addr);
5136                         t64 = tcg_temp_new_i64();
5137                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5138                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5139                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5140                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5141                         gen_store_gpr(dc, rd + 1, cpu_val);
5142                         tcg_gen_shri_i64(t64, t64, 32);
5143                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5144                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5145                     }
5146                     break;
5147                 case 0x9:       /* ldsb, load signed byte */
5148                     gen_address_mask(dc, cpu_addr);
5149                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5150                     break;
5151                 case 0xa:       /* ldsh, load signed halfword */
5152                     gen_address_mask(dc, cpu_addr);
5153                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5154                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5155                     break;
5156                 case 0xd:       /* ldstub */
5157                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5158                     break;
5159                 case 0x0f:
5160                     /* swap, swap register with memory. Also atomically */
5161                     cpu_src1 = gen_load_gpr(dc, rd);
5162                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5163                              dc->mem_idx, MO_TEUL);
5164                     break;
5165 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5166                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5167                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5168                     break;
5169                 case 0x11:      /* lduba, load unsigned byte alternate */
5170                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5171                     break;
5172                 case 0x12:      /* lduha, load unsigned halfword alternate */
5173                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5174                     break;
5175                 case 0x13:      /* ldda, load double word alternate */
5176                     if (rd & 1) {
5177                         goto illegal_insn;
5178                     }
5179                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5180                     goto skip_move;
5181                 case 0x19:      /* ldsba, load signed byte alternate */
5182                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5183                     break;
5184                 case 0x1a:      /* ldsha, load signed halfword alternate */
5185                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5186                     break;
5187                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5188                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5189                     break;
5190                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5191                                    atomically */
5192                     cpu_src1 = gen_load_gpr(dc, rd);
5193                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5194                     break;
5195 
5196 #ifndef TARGET_SPARC64
5197                 case 0x30: /* ldc */
5198                 case 0x31: /* ldcsr */
5199                 case 0x33: /* lddc */
5200                     goto ncp_insn;
5201 #endif
5202 #endif
5203 #ifdef TARGET_SPARC64
5204                 case 0x08: /* V9 ldsw */
5205                     gen_address_mask(dc, cpu_addr);
5206                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5207                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5208                     break;
5209                 case 0x0b: /* V9 ldx */
5210                     gen_address_mask(dc, cpu_addr);
5211                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5212                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5213                     break;
5214                 case 0x18: /* V9 ldswa */
5215                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5216                     break;
5217                 case 0x1b: /* V9 ldxa */
5218                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5219                     break;
5220                 case 0x2d: /* V9 prefetch, no effect */
5221                     goto skip_move;
5222                 case 0x30: /* V9 ldfa */
5223                     if (gen_trap_ifnofpu(dc)) {
5224                         goto jmp_insn;
5225                     }
5226                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5227                     gen_update_fprs_dirty(dc, rd);
5228                     goto skip_move;
5229                 case 0x33: /* V9 lddfa */
5230                     if (gen_trap_ifnofpu(dc)) {
5231                         goto jmp_insn;
5232                     }
5233                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5234                     gen_update_fprs_dirty(dc, DFPREG(rd));
5235                     goto skip_move;
5236                 case 0x3d: /* V9 prefetcha, no effect */
5237                     goto skip_move;
5238                 case 0x32: /* V9 ldqfa */
5239                     CHECK_FPU_FEATURE(dc, FLOAT128);
5240                     if (gen_trap_ifnofpu(dc)) {
5241                         goto jmp_insn;
5242                     }
5243                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5244                     gen_update_fprs_dirty(dc, QFPREG(rd));
5245                     goto skip_move;
5246 #endif
5247                 default:
5248                     goto illegal_insn;
5249                 }
5250                 gen_store_gpr(dc, rd, cpu_val);
5251 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5252             skip_move: ;
5253 #endif
5254             } else if (xop >= 0x20 && xop < 0x24) {
5255                 if (gen_trap_ifnofpu(dc)) {
5256                     goto jmp_insn;
5257                 }
5258                 switch (xop) {
5259                 case 0x20:      /* ldf, load fpreg */
5260                     gen_address_mask(dc, cpu_addr);
5261                     cpu_dst_32 = gen_dest_fpr_F(dc);
5262                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5263                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5264                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5265                     break;
5266                 case 0x21:      /* ldfsr, V9 ldxfsr */
5267 #ifdef TARGET_SPARC64
5268                     gen_address_mask(dc, cpu_addr);
5269                     if (rd == 1) {
5270                         TCGv_i64 t64 = tcg_temp_new_i64();
5271                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5272                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5273                         gen_helper_ldxfsr(cpu_fsr, tcg_env, cpu_fsr, t64);
5274                         break;
5275                     }
5276 #endif
5277                     cpu_dst_32 = tcg_temp_new_i32();
5278                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5279                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5280                     gen_helper_ldfsr(cpu_fsr, tcg_env, cpu_fsr, cpu_dst_32);
5281                     break;
5282                 case 0x22:      /* ldqf, load quad fpreg */
5283                     CHECK_FPU_FEATURE(dc, FLOAT128);
5284                     gen_address_mask(dc, cpu_addr);
5285                     cpu_src1_64 = tcg_temp_new_i64();
5286                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5287                                         MO_TEUQ | MO_ALIGN_4);
5288                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5289                     cpu_src2_64 = tcg_temp_new_i64();
5290                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5291                                         MO_TEUQ | MO_ALIGN_4);
5292                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5293                     break;
5294                 case 0x23:      /* lddf, load double fpreg */
5295                     gen_address_mask(dc, cpu_addr);
5296                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5297                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5298                                         MO_TEUQ | MO_ALIGN_4);
5299                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5300                     break;
5301                 default:
5302                     goto illegal_insn;
5303                 }
5304             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5305                        xop == 0xe || xop == 0x1e) {
5306                 TCGv cpu_val = gen_load_gpr(dc, rd);
5307 
5308                 switch (xop) {
5309                 case 0x4: /* st, store word */
5310                     gen_address_mask(dc, cpu_addr);
5311                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5312                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5313                     break;
5314                 case 0x5: /* stb, store byte */
5315                     gen_address_mask(dc, cpu_addr);
5316                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5317                     break;
5318                 case 0x6: /* sth, store halfword */
5319                     gen_address_mask(dc, cpu_addr);
5320                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5321                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5322                     break;
5323                 case 0x7: /* std, store double word */
5324                     if (rd & 1)
5325                         goto illegal_insn;
5326                     else {
5327                         TCGv_i64 t64;
5328                         TCGv lo;
5329 
5330                         gen_address_mask(dc, cpu_addr);
5331                         lo = gen_load_gpr(dc, rd + 1);
5332                         t64 = tcg_temp_new_i64();
5333                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5334                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5335                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5336                     }
5337                     break;
5338 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5339                 case 0x14: /* sta, V9 stwa, store word alternate */
5340                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5341                     break;
5342                 case 0x15: /* stba, store byte alternate */
5343                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5344                     break;
5345                 case 0x16: /* stha, store halfword alternate */
5346                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5347                     break;
5348                 case 0x17: /* stda, store double word alternate */
5349                     if (rd & 1) {
5350                         goto illegal_insn;
5351                     }
5352                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5353                     break;
5354 #endif
5355 #ifdef TARGET_SPARC64
5356                 case 0x0e: /* V9 stx */
5357                     gen_address_mask(dc, cpu_addr);
5358                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5359                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5360                     break;
5361                 case 0x1e: /* V9 stxa */
5362                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5363                     break;
5364 #endif
5365                 default:
5366                     goto illegal_insn;
5367                 }
5368             } else if (xop > 0x23 && xop < 0x28) {
5369                 if (gen_trap_ifnofpu(dc)) {
5370                     goto jmp_insn;
5371                 }
5372                 switch (xop) {
5373                 case 0x24: /* stf, store fpreg */
5374                     gen_address_mask(dc, cpu_addr);
5375                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5376                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5377                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5378                     break;
5379                 case 0x25: /* stfsr, V9 stxfsr */
5380                     {
5381 #ifdef TARGET_SPARC64
5382                         gen_address_mask(dc, cpu_addr);
5383                         if (rd == 1) {
5384                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5385                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5386                             break;
5387                         }
5388 #endif
5389                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5390                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5391                     }
5392                     break;
5393                 case 0x26:
5394 #ifdef TARGET_SPARC64
5395                     /* V9 stqf, store quad fpreg */
5396                     CHECK_FPU_FEATURE(dc, FLOAT128);
5397                     gen_address_mask(dc, cpu_addr);
5398                     /* ??? While stqf only requires 4-byte alignment, it is
5399                        legal for the cpu to signal the unaligned exception.
5400                        The OS trap handler is then required to fix it up.
5401                        For qemu, this avoids having to probe the second page
5402                        before performing the first write.  */
5403                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5404                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5405                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5406                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5407                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5408                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5409                                         dc->mem_idx, MO_TEUQ);
5410                     break;
5411 #else /* !TARGET_SPARC64 */
5412                     /* stdfq, store floating point queue */
5413 #if defined(CONFIG_USER_ONLY)
5414                     goto illegal_insn;
5415 #else
5416                     if (!supervisor(dc))
5417                         goto priv_insn;
5418                     if (gen_trap_ifnofpu(dc)) {
5419                         goto jmp_insn;
5420                     }
5421                     goto nfq_insn;
5422 #endif
5423 #endif
5424                 case 0x27: /* stdf, store double fpreg */
5425                     gen_address_mask(dc, cpu_addr);
5426                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5427                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5428                                         MO_TEUQ | MO_ALIGN_4);
5429                     break;
5430                 default:
5431                     goto illegal_insn;
5432                 }
5433             } else if (xop > 0x33 && xop < 0x3f) {
5434                 switch (xop) {
5435 #ifdef TARGET_SPARC64
5436                 case 0x34: /* V9 stfa */
5437                     if (gen_trap_ifnofpu(dc)) {
5438                         goto jmp_insn;
5439                     }
5440                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5441                     break;
5442                 case 0x36: /* V9 stqfa */
5443                     {
5444                         CHECK_FPU_FEATURE(dc, FLOAT128);
5445                         if (gen_trap_ifnofpu(dc)) {
5446                             goto jmp_insn;
5447                         }
5448                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5449                     }
5450                     break;
5451                 case 0x37: /* V9 stdfa */
5452                     if (gen_trap_ifnofpu(dc)) {
5453                         goto jmp_insn;
5454                     }
5455                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5456                     break;
5457                 case 0x3e: /* V9 casxa */
5458                     rs2 = GET_FIELD(insn, 27, 31);
5459                     cpu_src2 = gen_load_gpr(dc, rs2);
5460                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5461                     break;
5462 #else
5463                 case 0x34: /* stc */
5464                 case 0x35: /* stcsr */
5465                 case 0x36: /* stdcq */
5466                 case 0x37: /* stdc */
5467                     goto ncp_insn;
5468 #endif
5469 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5470                 case 0x3c: /* V9 or LEON3 casa */
5471 #ifndef TARGET_SPARC64
5472                     CHECK_IU_FEATURE(dc, CASA);
5473 #endif
5474                     rs2 = GET_FIELD(insn, 27, 31);
5475                     cpu_src2 = gen_load_gpr(dc, rs2);
5476                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5477                     break;
5478 #endif
5479                 default:
5480                     goto illegal_insn;
5481                 }
5482             } else {
5483                 goto illegal_insn;
5484             }
5485         }
5486         break;
5487     }
5488     advance_pc(dc);
5489  jmp_insn:
5490     return;
5491  illegal_insn:
5492     gen_exception(dc, TT_ILL_INSN);
5493     return;
5494 #if !defined(CONFIG_USER_ONLY)
5495  priv_insn:
5496     gen_exception(dc, TT_PRIV_INSN);
5497     return;
5498 #endif
5499  nfpu_insn:
5500     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5501     return;
5502 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5503  nfq_insn:
5504     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5505     return;
5506 #endif
5507 #ifndef TARGET_SPARC64
5508  ncp_insn:
5509     gen_exception(dc, TT_NCP_INSN);
5510     return;
5511 #endif
5512 }
5513 
5514 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5515 {
5516     DisasContext *dc = container_of(dcbase, DisasContext, base);
5517     CPUSPARCState *env = cpu_env(cs);
5518     int bound;
5519 
5520     dc->pc = dc->base.pc_first;
5521     dc->npc = (target_ulong)dc->base.tb->cs_base;
5522     dc->cc_op = CC_OP_DYNAMIC;
5523     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5524     dc->def = &env->def;
5525     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5526     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5527 #ifndef CONFIG_USER_ONLY
5528     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5529 #endif
5530 #ifdef TARGET_SPARC64
5531     dc->fprs_dirty = 0;
5532     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5533 #ifndef CONFIG_USER_ONLY
5534     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5535 #endif
5536 #endif
5537     /*
5538      * if we reach a page boundary, we stop generation so that the
5539      * PC of a TT_TFAULT exception is always in the right page
5540      */
5541     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5542     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5543 }
5544 
5545 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5546 {
5547 }
5548 
5549 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5550 {
5551     DisasContext *dc = container_of(dcbase, DisasContext, base);
5552     target_ulong npc = dc->npc;
5553 
5554     if (npc & 3) {
5555         switch (npc) {
5556         case JUMP_PC:
5557             assert(dc->jump_pc[1] == dc->pc + 4);
5558             npc = dc->jump_pc[0] | JUMP_PC;
5559             break;
5560         case DYNAMIC_PC:
5561         case DYNAMIC_PC_LOOKUP:
5562             npc = DYNAMIC_PC;
5563             break;
5564         default:
5565             g_assert_not_reached();
5566         }
5567     }
5568     tcg_gen_insn_start(dc->pc, npc);
5569 }
5570 
5571 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5572 {
5573     DisasContext *dc = container_of(dcbase, DisasContext, base);
5574     CPUSPARCState *env = cpu_env(cs);
5575     unsigned int insn;
5576 
5577     insn = translator_ldl(env, &dc->base, dc->pc);
5578     dc->base.pc_next += 4;
5579 
5580     if (!decode(dc, insn)) {
5581         disas_sparc_legacy(dc, insn);
5582     }
5583 
5584     if (dc->base.is_jmp == DISAS_NORETURN) {
5585         return;
5586     }
5587     if (dc->pc != dc->base.pc_next) {
5588         dc->base.is_jmp = DISAS_TOO_MANY;
5589     }
5590 }
5591 
5592 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5593 {
5594     DisasContext *dc = container_of(dcbase, DisasContext, base);
5595     DisasDelayException *e, *e_next;
5596     bool may_lookup;
5597 
5598     switch (dc->base.is_jmp) {
5599     case DISAS_NEXT:
5600     case DISAS_TOO_MANY:
5601         if (((dc->pc | dc->npc) & 3) == 0) {
5602             /* static PC and NPC: we can use direct chaining */
5603             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5604             break;
5605         }
5606 
5607         may_lookup = true;
5608         if (dc->pc & 3) {
5609             switch (dc->pc) {
5610             case DYNAMIC_PC_LOOKUP:
5611                 break;
5612             case DYNAMIC_PC:
5613                 may_lookup = false;
5614                 break;
5615             default:
5616                 g_assert_not_reached();
5617             }
5618         } else {
5619             tcg_gen_movi_tl(cpu_pc, dc->pc);
5620         }
5621 
5622         if (dc->npc & 3) {
5623             switch (dc->npc) {
5624             case JUMP_PC:
5625                 gen_generic_branch(dc);
5626                 break;
5627             case DYNAMIC_PC:
5628                 may_lookup = false;
5629                 break;
5630             case DYNAMIC_PC_LOOKUP:
5631                 break;
5632             default:
5633                 g_assert_not_reached();
5634             }
5635         } else {
5636             tcg_gen_movi_tl(cpu_npc, dc->npc);
5637         }
5638         if (may_lookup) {
5639             tcg_gen_lookup_and_goto_ptr();
5640         } else {
5641             tcg_gen_exit_tb(NULL, 0);
5642         }
5643         break;
5644 
5645     case DISAS_NORETURN:
5646        break;
5647 
5648     case DISAS_EXIT:
5649         /* Exit TB */
5650         save_state(dc);
5651         tcg_gen_exit_tb(NULL, 0);
5652         break;
5653 
5654     default:
5655         g_assert_not_reached();
5656     }
5657 
5658     for (e = dc->delay_excp_list; e ; e = e_next) {
5659         gen_set_label(e->lab);
5660 
5661         tcg_gen_movi_tl(cpu_pc, e->pc);
5662         if (e->npc % 4 == 0) {
5663             tcg_gen_movi_tl(cpu_npc, e->npc);
5664         }
5665         gen_helper_raise_exception(tcg_env, e->excp);
5666 
5667         e_next = e->next;
5668         g_free(e);
5669     }
5670 }
5671 
5672 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5673                                CPUState *cpu, FILE *logfile)
5674 {
5675     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5676     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5677 }
5678 
5679 static const TranslatorOps sparc_tr_ops = {
5680     .init_disas_context = sparc_tr_init_disas_context,
5681     .tb_start           = sparc_tr_tb_start,
5682     .insn_start         = sparc_tr_insn_start,
5683     .translate_insn     = sparc_tr_translate_insn,
5684     .tb_stop            = sparc_tr_tb_stop,
5685     .disas_log          = sparc_tr_disas_log,
5686 };
5687 
5688 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5689                            target_ulong pc, void *host_pc)
5690 {
5691     DisasContext dc = {};
5692 
5693     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5694 }
5695 
5696 void sparc_tcg_init(void)
5697 {
5698     static const char gregnames[32][4] = {
5699         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5700         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5701         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5702         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5703     };
5704     static const char fregnames[32][4] = {
5705         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5706         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5707         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5708         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5709     };
5710 
5711     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5712 #ifdef TARGET_SPARC64
5713         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5714         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5715 #else
5716         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5717 #endif
5718         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5719         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5720     };
5721 
5722     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5723 #ifdef TARGET_SPARC64
5724         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5725         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5726         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5727         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5728           "hstick_cmpr" },
5729         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5730         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5731         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5732         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5733         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5734 #endif
5735         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5736         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5737         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5738         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5739         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5740         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5741         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5742         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5743 #ifndef CONFIG_USER_ONLY
5744         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5745 #endif
5746     };
5747 
5748     unsigned int i;
5749 
5750     cpu_regwptr = tcg_global_mem_new_ptr(tcg_env,
5751                                          offsetof(CPUSPARCState, regwptr),
5752                                          "regwptr");
5753 
5754     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5755         *r32[i].ptr = tcg_global_mem_new_i32(tcg_env, r32[i].off, r32[i].name);
5756     }
5757 
5758     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5759         *rtl[i].ptr = tcg_global_mem_new(tcg_env, rtl[i].off, rtl[i].name);
5760     }
5761 
5762     cpu_regs[0] = NULL;
5763     for (i = 1; i < 8; ++i) {
5764         cpu_regs[i] = tcg_global_mem_new(tcg_env,
5765                                          offsetof(CPUSPARCState, gregs[i]),
5766                                          gregnames[i]);
5767     }
5768 
5769     for (i = 8; i < 32; ++i) {
5770         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5771                                          (i - 8) * sizeof(target_ulong),
5772                                          gregnames[i]);
5773     }
5774 
5775     for (i = 0; i < TARGET_DPREGS; i++) {
5776         cpu_fpr[i] = tcg_global_mem_new_i64(tcg_env,
5777                                             offsetof(CPUSPARCState, fpr[i]),
5778                                             fregnames[i]);
5779     }
5780 }
5781 
5782 void sparc_restore_state_to_opc(CPUState *cs,
5783                                 const TranslationBlock *tb,
5784                                 const uint64_t *data)
5785 {
5786     SPARCCPU *cpu = SPARC_CPU(cs);
5787     CPUSPARCState *env = &cpu->env;
5788     target_ulong pc = data[0];
5789     target_ulong npc = data[1];
5790 
5791     env->pc = pc;
5792     if (npc == DYNAMIC_PC) {
5793         /* dynamic NPC: already stored */
5794     } else if (npc & JUMP_PC) {
5795         /* jump PC: use 'cond' and the jump targets of the translation */
5796         if (env->cond) {
5797             env->npc = npc & ~3;
5798         } else {
5799             env->npc = pc + 4;
5800         }
5801     } else {
5802         env->npc = npc;
5803     }
5804 }
5805