xref: /openbmc/qemu/target/sparc/translate.c (revision d5471936164e4ee3039f15d18308029040013a31)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 
29 #include "exec/helper-gen.h"
30 
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 #include "asi.h"
34 
35 #define HELPER_H "helper.h"
36 #include "exec/helper-info.c.inc"
37 #undef  HELPER_H
38 
39 /* Dynamic PC, must exit to main loop. */
40 #define DYNAMIC_PC         1
41 /* Dynamic PC, one of two values according to jump_pc[T2]. */
42 #define JUMP_PC            2
43 /* Dynamic PC, may lookup next TB. */
44 #define DYNAMIC_PC_LOOKUP  3
45 
46 #define DISAS_EXIT  DISAS_TARGET_0
47 
48 /* global register indexes */
49 static TCGv_ptr cpu_regwptr;
50 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
51 static TCGv_i32 cpu_cc_op;
52 static TCGv_i32 cpu_psr;
53 static TCGv cpu_fsr, cpu_pc, cpu_npc;
54 static TCGv cpu_regs[32];
55 static TCGv cpu_y;
56 #ifndef CONFIG_USER_ONLY
57 static TCGv cpu_tbr;
58 #endif
59 static TCGv cpu_cond;
60 #ifdef TARGET_SPARC64
61 static TCGv_i32 cpu_xcc, cpu_fprs;
62 static TCGv cpu_gsr;
63 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
64 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
65 #else
66 static TCGv cpu_wim;
67 #endif
68 /* Floating point registers */
69 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
70 
71 typedef struct DisasDelayException {
72     struct DisasDelayException *next;
73     TCGLabel *lab;
74     TCGv_i32 excp;
75     /* Saved state at parent insn. */
76     target_ulong pc;
77     target_ulong npc;
78 } DisasDelayException;
79 
80 typedef struct DisasContext {
81     DisasContextBase base;
82     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
83     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
84     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
85     int mem_idx;
86     bool fpu_enabled;
87     bool address_mask_32bit;
88 #ifndef CONFIG_USER_ONLY
89     bool supervisor;
90 #ifdef TARGET_SPARC64
91     bool hypervisor;
92 #endif
93 #endif
94 
95     uint32_t cc_op;  /* current CC operation */
96     sparc_def_t *def;
97 #ifdef TARGET_SPARC64
98     int fprs_dirty;
99     int asi;
100 #endif
101     DisasDelayException *delay_excp_list;
102 } DisasContext;
103 
104 typedef struct {
105     TCGCond cond;
106     bool is_bool;
107     TCGv c1, c2;
108 } DisasCompare;
109 
110 // This function uses non-native bit order
111 #define GET_FIELD(X, FROM, TO)                                  \
112     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
113 
114 // This function uses the order in the manuals, i.e. bit 0 is 2^0
115 #define GET_FIELD_SP(X, FROM, TO)               \
116     GET_FIELD(X, 31 - (TO), 31 - (FROM))
117 
118 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
119 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
120 
121 #ifdef TARGET_SPARC64
122 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
123 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
124 #else
125 #define DFPREG(r) (r & 0x1e)
126 #define QFPREG(r) (r & 0x1c)
127 #endif
128 
129 #define UA2005_HTRAP_MASK 0xff
130 #define V8_TRAP_MASK 0x7f
131 
132 static int sign_extend(int x, int len)
133 {
134     len = 32 - len;
135     return (x << len) >> len;
136 }
137 
138 #define IS_IMM (insn & (1<<13))
139 
140 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     int bit = (rd < 32) ? 1 : 2;
144     /* If we know we've already set this bit within the TB,
145        we can avoid setting it again.  */
146     if (!(dc->fprs_dirty & bit)) {
147         dc->fprs_dirty |= bit;
148         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
149     }
150 #endif
151 }
152 
153 /* floating point registers moves */
154 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
155 {
156     TCGv_i32 ret = tcg_temp_new_i32();
157     if (src & 1) {
158         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
159     } else {
160         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
161     }
162     return ret;
163 }
164 
165 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
166 {
167     TCGv_i64 t = tcg_temp_new_i64();
168 
169     tcg_gen_extu_i32_i64(t, v);
170     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
171                         (dst & 1 ? 0 : 32), 32);
172     gen_update_fprs_dirty(dc, dst);
173 }
174 
175 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
176 {
177     return tcg_temp_new_i32();
178 }
179 
180 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
181 {
182     src = DFPREG(src);
183     return cpu_fpr[src / 2];
184 }
185 
186 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
187 {
188     dst = DFPREG(dst);
189     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
190     gen_update_fprs_dirty(dc, dst);
191 }
192 
193 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
194 {
195     return cpu_fpr[DFPREG(dst) / 2];
196 }
197 
198 static void gen_op_load_fpr_QT0(unsigned int src)
199 {
200     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
201                    offsetof(CPU_QuadU, ll.upper));
202     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.lower));
204 }
205 
206 static void gen_op_load_fpr_QT1(unsigned int src)
207 {
208     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt1) +
209                    offsetof(CPU_QuadU, ll.upper));
210     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.lower));
212 }
213 
214 static void gen_op_store_QT0_fpr(unsigned int dst)
215 {
216     tcg_gen_ld_i64(cpu_fpr[dst / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221 
222 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
223                             TCGv_i64 v1, TCGv_i64 v2)
224 {
225     dst = QFPREG(dst);
226 
227     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
228     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
229     gen_update_fprs_dirty(dc, dst);
230 }
231 
232 #ifdef TARGET_SPARC64
233 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
234 {
235     src = QFPREG(src);
236     return cpu_fpr[src / 2];
237 }
238 
239 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
240 {
241     src = QFPREG(src);
242     return cpu_fpr[src / 2 + 1];
243 }
244 
245 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
246 {
247     rd = QFPREG(rd);
248     rs = QFPREG(rs);
249 
250     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
251     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
252     gen_update_fprs_dirty(dc, rd);
253 }
254 #endif
255 
256 /* moves */
257 #ifdef CONFIG_USER_ONLY
258 #define supervisor(dc) 0
259 #ifdef TARGET_SPARC64
260 #define hypervisor(dc) 0
261 #endif
262 #else
263 #ifdef TARGET_SPARC64
264 #define hypervisor(dc) (dc->hypervisor)
265 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
266 #else
267 #define supervisor(dc) (dc->supervisor)
268 #endif
269 #endif
270 
271 #if !defined(TARGET_SPARC64)
272 # define AM_CHECK(dc)  false
273 #elif defined(TARGET_ABI32)
274 # define AM_CHECK(dc)  true
275 #elif defined(CONFIG_USER_ONLY)
276 # define AM_CHECK(dc)  false
277 #else
278 # define AM_CHECK(dc)  ((dc)->address_mask_32bit)
279 #endif
280 
281 static void gen_address_mask(DisasContext *dc, TCGv addr)
282 {
283     if (AM_CHECK(dc)) {
284         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
285     }
286 }
287 
288 static target_ulong address_mask_i(DisasContext *dc, target_ulong addr)
289 {
290     return AM_CHECK(dc) ? (uint32_t)addr : addr;
291 }
292 
293 static TCGv gen_load_gpr(DisasContext *dc, int reg)
294 {
295     if (reg > 0) {
296         assert(reg < 32);
297         return cpu_regs[reg];
298     } else {
299         TCGv t = tcg_temp_new();
300         tcg_gen_movi_tl(t, 0);
301         return t;
302     }
303 }
304 
305 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
306 {
307     if (reg > 0) {
308         assert(reg < 32);
309         tcg_gen_mov_tl(cpu_regs[reg], v);
310     }
311 }
312 
313 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
314 {
315     if (reg > 0) {
316         assert(reg < 32);
317         return cpu_regs[reg];
318     } else {
319         return tcg_temp_new();
320     }
321 }
322 
323 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
324 {
325     return translator_use_goto_tb(&s->base, pc) &&
326            translator_use_goto_tb(&s->base, npc);
327 }
328 
329 static void gen_goto_tb(DisasContext *s, int tb_num,
330                         target_ulong pc, target_ulong npc)
331 {
332     if (use_goto_tb(s, pc, npc))  {
333         /* jump to same page: we can use a direct jump */
334         tcg_gen_goto_tb(tb_num);
335         tcg_gen_movi_tl(cpu_pc, pc);
336         tcg_gen_movi_tl(cpu_npc, npc);
337         tcg_gen_exit_tb(s->base.tb, tb_num);
338     } else {
339         /* jump to another page: we can use an indirect jump */
340         tcg_gen_movi_tl(cpu_pc, pc);
341         tcg_gen_movi_tl(cpu_npc, npc);
342         tcg_gen_lookup_and_goto_ptr();
343     }
344 }
345 
346 // XXX suboptimal
347 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
348 {
349     tcg_gen_extu_i32_tl(reg, src);
350     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
351 }
352 
353 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
354 {
355     tcg_gen_extu_i32_tl(reg, src);
356     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
357 }
358 
359 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
360 {
361     tcg_gen_extu_i32_tl(reg, src);
362     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
363 }
364 
365 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
366 {
367     tcg_gen_extu_i32_tl(reg, src);
368     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
369 }
370 
371 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
372 {
373     tcg_gen_mov_tl(cpu_cc_src, src1);
374     tcg_gen_mov_tl(cpu_cc_src2, src2);
375     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
376     tcg_gen_mov_tl(dst, cpu_cc_dst);
377 }
378 
379 static TCGv_i32 gen_add32_carry32(void)
380 {
381     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
382 
383     /* Carry is computed from a previous add: (dst < src)  */
384 #if TARGET_LONG_BITS == 64
385     cc_src1_32 = tcg_temp_new_i32();
386     cc_src2_32 = tcg_temp_new_i32();
387     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
388     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
389 #else
390     cc_src1_32 = cpu_cc_dst;
391     cc_src2_32 = cpu_cc_src;
392 #endif
393 
394     carry_32 = tcg_temp_new_i32();
395     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
396 
397     return carry_32;
398 }
399 
400 static TCGv_i32 gen_sub32_carry32(void)
401 {
402     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
403 
404     /* Carry is computed from a previous borrow: (src1 < src2)  */
405 #if TARGET_LONG_BITS == 64
406     cc_src1_32 = tcg_temp_new_i32();
407     cc_src2_32 = tcg_temp_new_i32();
408     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
409     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
410 #else
411     cc_src1_32 = cpu_cc_src;
412     cc_src2_32 = cpu_cc_src2;
413 #endif
414 
415     carry_32 = tcg_temp_new_i32();
416     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
417 
418     return carry_32;
419 }
420 
421 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
422                             TCGv src2, int update_cc)
423 {
424     TCGv_i32 carry_32;
425     TCGv carry;
426 
427     switch (dc->cc_op) {
428     case CC_OP_DIV:
429     case CC_OP_LOGIC:
430         /* Carry is known to be zero.  Fall back to plain ADD.  */
431         if (update_cc) {
432             gen_op_add_cc(dst, src1, src2);
433         } else {
434             tcg_gen_add_tl(dst, src1, src2);
435         }
436         return;
437 
438     case CC_OP_ADD:
439     case CC_OP_TADD:
440     case CC_OP_TADDTV:
441         if (TARGET_LONG_BITS == 32) {
442             /* We can re-use the host's hardware carry generation by using
443                an ADD2 opcode.  We discard the low part of the output.
444                Ideally we'd combine this operation with the add that
445                generated the carry in the first place.  */
446             carry = tcg_temp_new();
447             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
448             goto add_done;
449         }
450         carry_32 = gen_add32_carry32();
451         break;
452 
453     case CC_OP_SUB:
454     case CC_OP_TSUB:
455     case CC_OP_TSUBTV:
456         carry_32 = gen_sub32_carry32();
457         break;
458 
459     default:
460         /* We need external help to produce the carry.  */
461         carry_32 = tcg_temp_new_i32();
462         gen_helper_compute_C_icc(carry_32, tcg_env);
463         break;
464     }
465 
466 #if TARGET_LONG_BITS == 64
467     carry = tcg_temp_new();
468     tcg_gen_extu_i32_i64(carry, carry_32);
469 #else
470     carry = carry_32;
471 #endif
472 
473     tcg_gen_add_tl(dst, src1, src2);
474     tcg_gen_add_tl(dst, dst, carry);
475 
476  add_done:
477     if (update_cc) {
478         tcg_gen_mov_tl(cpu_cc_src, src1);
479         tcg_gen_mov_tl(cpu_cc_src2, src2);
480         tcg_gen_mov_tl(cpu_cc_dst, dst);
481         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
482         dc->cc_op = CC_OP_ADDX;
483     }
484 }
485 
486 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
487 {
488     tcg_gen_mov_tl(cpu_cc_src, src1);
489     tcg_gen_mov_tl(cpu_cc_src2, src2);
490     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
491     tcg_gen_mov_tl(dst, cpu_cc_dst);
492 }
493 
494 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
495                             TCGv src2, int update_cc)
496 {
497     TCGv_i32 carry_32;
498     TCGv carry;
499 
500     switch (dc->cc_op) {
501     case CC_OP_DIV:
502     case CC_OP_LOGIC:
503         /* Carry is known to be zero.  Fall back to plain SUB.  */
504         if (update_cc) {
505             gen_op_sub_cc(dst, src1, src2);
506         } else {
507             tcg_gen_sub_tl(dst, src1, src2);
508         }
509         return;
510 
511     case CC_OP_ADD:
512     case CC_OP_TADD:
513     case CC_OP_TADDTV:
514         carry_32 = gen_add32_carry32();
515         break;
516 
517     case CC_OP_SUB:
518     case CC_OP_TSUB:
519     case CC_OP_TSUBTV:
520         if (TARGET_LONG_BITS == 32) {
521             /* We can re-use the host's hardware carry generation by using
522                a SUB2 opcode.  We discard the low part of the output.
523                Ideally we'd combine this operation with the add that
524                generated the carry in the first place.  */
525             carry = tcg_temp_new();
526             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
527             goto sub_done;
528         }
529         carry_32 = gen_sub32_carry32();
530         break;
531 
532     default:
533         /* We need external help to produce the carry.  */
534         carry_32 = tcg_temp_new_i32();
535         gen_helper_compute_C_icc(carry_32, tcg_env);
536         break;
537     }
538 
539 #if TARGET_LONG_BITS == 64
540     carry = tcg_temp_new();
541     tcg_gen_extu_i32_i64(carry, carry_32);
542 #else
543     carry = carry_32;
544 #endif
545 
546     tcg_gen_sub_tl(dst, src1, src2);
547     tcg_gen_sub_tl(dst, dst, carry);
548 
549  sub_done:
550     if (update_cc) {
551         tcg_gen_mov_tl(cpu_cc_src, src1);
552         tcg_gen_mov_tl(cpu_cc_src2, src2);
553         tcg_gen_mov_tl(cpu_cc_dst, dst);
554         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
555         dc->cc_op = CC_OP_SUBX;
556     }
557 }
558 
559 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
560 {
561     TCGv r_temp, zero, t0;
562 
563     r_temp = tcg_temp_new();
564     t0 = tcg_temp_new();
565 
566     /* old op:
567     if (!(env->y & 1))
568         T1 = 0;
569     */
570     zero = tcg_constant_tl(0);
571     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
572     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
573     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
574     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
575                        zero, cpu_cc_src2);
576 
577     // b2 = T0 & 1;
578     // env->y = (b2 << 31) | (env->y >> 1);
579     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
580     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
581 
582     // b1 = N ^ V;
583     gen_mov_reg_N(t0, cpu_psr);
584     gen_mov_reg_V(r_temp, cpu_psr);
585     tcg_gen_xor_tl(t0, t0, r_temp);
586 
587     // T0 = (b1 << 31) | (T0 >> 1);
588     // src1 = T0;
589     tcg_gen_shli_tl(t0, t0, 31);
590     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
591     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
592 
593     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
594 
595     tcg_gen_mov_tl(dst, cpu_cc_dst);
596 }
597 
598 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
599 {
600 #if TARGET_LONG_BITS == 32
601     if (sign_ext) {
602         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
603     } else {
604         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
605     }
606 #else
607     TCGv t0 = tcg_temp_new_i64();
608     TCGv t1 = tcg_temp_new_i64();
609 
610     if (sign_ext) {
611         tcg_gen_ext32s_i64(t0, src1);
612         tcg_gen_ext32s_i64(t1, src2);
613     } else {
614         tcg_gen_ext32u_i64(t0, src1);
615         tcg_gen_ext32u_i64(t1, src2);
616     }
617 
618     tcg_gen_mul_i64(dst, t0, t1);
619     tcg_gen_shri_i64(cpu_y, dst, 32);
620 #endif
621 }
622 
623 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
624 {
625     /* zero-extend truncated operands before multiplication */
626     gen_op_multiply(dst, src1, src2, 0);
627 }
628 
629 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
630 {
631     /* sign-extend truncated operands before multiplication */
632     gen_op_multiply(dst, src1, src2, 1);
633 }
634 
635 // 1
636 static void gen_op_eval_ba(TCGv dst)
637 {
638     tcg_gen_movi_tl(dst, 1);
639 }
640 
641 // Z
642 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
643 {
644     gen_mov_reg_Z(dst, src);
645 }
646 
647 // Z | (N ^ V)
648 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
649 {
650     TCGv t0 = tcg_temp_new();
651     gen_mov_reg_N(t0, src);
652     gen_mov_reg_V(dst, src);
653     tcg_gen_xor_tl(dst, dst, t0);
654     gen_mov_reg_Z(t0, src);
655     tcg_gen_or_tl(dst, dst, t0);
656 }
657 
658 // N ^ V
659 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
660 {
661     TCGv t0 = tcg_temp_new();
662     gen_mov_reg_V(t0, src);
663     gen_mov_reg_N(dst, src);
664     tcg_gen_xor_tl(dst, dst, t0);
665 }
666 
667 // C | Z
668 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
669 {
670     TCGv t0 = tcg_temp_new();
671     gen_mov_reg_Z(t0, src);
672     gen_mov_reg_C(dst, src);
673     tcg_gen_or_tl(dst, dst, t0);
674 }
675 
676 // C
677 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
678 {
679     gen_mov_reg_C(dst, src);
680 }
681 
682 // V
683 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
684 {
685     gen_mov_reg_V(dst, src);
686 }
687 
688 // 0
689 static void gen_op_eval_bn(TCGv dst)
690 {
691     tcg_gen_movi_tl(dst, 0);
692 }
693 
694 // N
695 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
696 {
697     gen_mov_reg_N(dst, src);
698 }
699 
700 // !Z
701 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
702 {
703     gen_mov_reg_Z(dst, src);
704     tcg_gen_xori_tl(dst, dst, 0x1);
705 }
706 
707 // !(Z | (N ^ V))
708 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
709 {
710     gen_op_eval_ble(dst, src);
711     tcg_gen_xori_tl(dst, dst, 0x1);
712 }
713 
714 // !(N ^ V)
715 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
716 {
717     gen_op_eval_bl(dst, src);
718     tcg_gen_xori_tl(dst, dst, 0x1);
719 }
720 
721 // !(C | Z)
722 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
723 {
724     gen_op_eval_bleu(dst, src);
725     tcg_gen_xori_tl(dst, dst, 0x1);
726 }
727 
728 // !C
729 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
730 {
731     gen_mov_reg_C(dst, src);
732     tcg_gen_xori_tl(dst, dst, 0x1);
733 }
734 
735 // !N
736 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
737 {
738     gen_mov_reg_N(dst, src);
739     tcg_gen_xori_tl(dst, dst, 0x1);
740 }
741 
742 // !V
743 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
744 {
745     gen_mov_reg_V(dst, src);
746     tcg_gen_xori_tl(dst, dst, 0x1);
747 }
748 
749 /*
750   FPSR bit field FCC1 | FCC0:
751    0 =
752    1 <
753    2 >
754    3 unordered
755 */
756 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
757                                     unsigned int fcc_offset)
758 {
759     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
760     tcg_gen_andi_tl(reg, reg, 0x1);
761 }
762 
763 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
764 {
765     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
766     tcg_gen_andi_tl(reg, reg, 0x1);
767 }
768 
769 // !0: FCC0 | FCC1
770 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
771 {
772     TCGv t0 = tcg_temp_new();
773     gen_mov_reg_FCC0(dst, src, fcc_offset);
774     gen_mov_reg_FCC1(t0, src, fcc_offset);
775     tcg_gen_or_tl(dst, dst, t0);
776 }
777 
778 // 1 or 2: FCC0 ^ FCC1
779 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
780 {
781     TCGv t0 = tcg_temp_new();
782     gen_mov_reg_FCC0(dst, src, fcc_offset);
783     gen_mov_reg_FCC1(t0, src, fcc_offset);
784     tcg_gen_xor_tl(dst, dst, t0);
785 }
786 
787 // 1 or 3: FCC0
788 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
789 {
790     gen_mov_reg_FCC0(dst, src, fcc_offset);
791 }
792 
793 // 1: FCC0 & !FCC1
794 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
795 {
796     TCGv t0 = tcg_temp_new();
797     gen_mov_reg_FCC0(dst, src, fcc_offset);
798     gen_mov_reg_FCC1(t0, src, fcc_offset);
799     tcg_gen_andc_tl(dst, dst, t0);
800 }
801 
802 // 2 or 3: FCC1
803 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
804 {
805     gen_mov_reg_FCC1(dst, src, fcc_offset);
806 }
807 
808 // 2: !FCC0 & FCC1
809 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
810 {
811     TCGv t0 = tcg_temp_new();
812     gen_mov_reg_FCC0(dst, src, fcc_offset);
813     gen_mov_reg_FCC1(t0, src, fcc_offset);
814     tcg_gen_andc_tl(dst, t0, dst);
815 }
816 
817 // 3: FCC0 & FCC1
818 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
819 {
820     TCGv t0 = tcg_temp_new();
821     gen_mov_reg_FCC0(dst, src, fcc_offset);
822     gen_mov_reg_FCC1(t0, src, fcc_offset);
823     tcg_gen_and_tl(dst, dst, t0);
824 }
825 
826 // 0: !(FCC0 | FCC1)
827 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
828 {
829     TCGv t0 = tcg_temp_new();
830     gen_mov_reg_FCC0(dst, src, fcc_offset);
831     gen_mov_reg_FCC1(t0, src, fcc_offset);
832     tcg_gen_or_tl(dst, dst, t0);
833     tcg_gen_xori_tl(dst, dst, 0x1);
834 }
835 
836 // 0 or 3: !(FCC0 ^ FCC1)
837 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
838 {
839     TCGv t0 = tcg_temp_new();
840     gen_mov_reg_FCC0(dst, src, fcc_offset);
841     gen_mov_reg_FCC1(t0, src, fcc_offset);
842     tcg_gen_xor_tl(dst, dst, t0);
843     tcg_gen_xori_tl(dst, dst, 0x1);
844 }
845 
846 // 0 or 2: !FCC0
847 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
848 {
849     gen_mov_reg_FCC0(dst, src, fcc_offset);
850     tcg_gen_xori_tl(dst, dst, 0x1);
851 }
852 
853 // !1: !(FCC0 & !FCC1)
854 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
855 {
856     TCGv t0 = tcg_temp_new();
857     gen_mov_reg_FCC0(dst, src, fcc_offset);
858     gen_mov_reg_FCC1(t0, src, fcc_offset);
859     tcg_gen_andc_tl(dst, dst, t0);
860     tcg_gen_xori_tl(dst, dst, 0x1);
861 }
862 
863 // 0 or 1: !FCC1
864 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
865 {
866     gen_mov_reg_FCC1(dst, src, fcc_offset);
867     tcg_gen_xori_tl(dst, dst, 0x1);
868 }
869 
870 // !2: !(!FCC0 & FCC1)
871 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
872 {
873     TCGv t0 = tcg_temp_new();
874     gen_mov_reg_FCC0(dst, src, fcc_offset);
875     gen_mov_reg_FCC1(t0, src, fcc_offset);
876     tcg_gen_andc_tl(dst, t0, dst);
877     tcg_gen_xori_tl(dst, dst, 0x1);
878 }
879 
880 // !3: !(FCC0 & FCC1)
881 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
882 {
883     TCGv t0 = tcg_temp_new();
884     gen_mov_reg_FCC0(dst, src, fcc_offset);
885     gen_mov_reg_FCC1(t0, src, fcc_offset);
886     tcg_gen_and_tl(dst, dst, t0);
887     tcg_gen_xori_tl(dst, dst, 0x1);
888 }
889 
890 static void gen_branch2(DisasContext *dc, target_ulong pc1,
891                         target_ulong pc2, TCGv r_cond)
892 {
893     TCGLabel *l1 = gen_new_label();
894 
895     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
896 
897     gen_goto_tb(dc, 0, pc1, pc1 + 4);
898 
899     gen_set_label(l1);
900     gen_goto_tb(dc, 1, pc2, pc2 + 4);
901 }
902 
903 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
904 {
905     TCGLabel *l1 = gen_new_label();
906     target_ulong npc = dc->npc;
907 
908     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
909 
910     gen_goto_tb(dc, 0, npc, pc1);
911 
912     gen_set_label(l1);
913     gen_goto_tb(dc, 1, npc + 4, npc + 8);
914 
915     dc->base.is_jmp = DISAS_NORETURN;
916 }
917 
918 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
919 {
920     target_ulong npc = dc->npc;
921 
922     if (npc & 3) {
923         switch (npc) {
924         case DYNAMIC_PC:
925         case DYNAMIC_PC_LOOKUP:
926             tcg_gen_mov_tl(cpu_pc, cpu_npc);
927             tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
928             tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc,
929                                cpu_cond, tcg_constant_tl(0),
930                                tcg_constant_tl(pc1), cpu_npc);
931             dc->pc = npc;
932             break;
933         default:
934             g_assert_not_reached();
935         }
936     } else {
937         dc->pc = npc;
938         dc->jump_pc[0] = pc1;
939         dc->jump_pc[1] = npc + 4;
940         dc->npc = JUMP_PC;
941     }
942 }
943 
944 static void gen_generic_branch(DisasContext *dc)
945 {
946     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
947     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
948     TCGv zero = tcg_constant_tl(0);
949 
950     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
951 }
952 
953 /* call this function before using the condition register as it may
954    have been set for a jump */
955 static void flush_cond(DisasContext *dc)
956 {
957     if (dc->npc == JUMP_PC) {
958         gen_generic_branch(dc);
959         dc->npc = DYNAMIC_PC_LOOKUP;
960     }
961 }
962 
963 static void save_npc(DisasContext *dc)
964 {
965     if (dc->npc & 3) {
966         switch (dc->npc) {
967         case JUMP_PC:
968             gen_generic_branch(dc);
969             dc->npc = DYNAMIC_PC_LOOKUP;
970             break;
971         case DYNAMIC_PC:
972         case DYNAMIC_PC_LOOKUP:
973             break;
974         default:
975             g_assert_not_reached();
976         }
977     } else {
978         tcg_gen_movi_tl(cpu_npc, dc->npc);
979     }
980 }
981 
982 static void update_psr(DisasContext *dc)
983 {
984     if (dc->cc_op != CC_OP_FLAGS) {
985         dc->cc_op = CC_OP_FLAGS;
986         gen_helper_compute_psr(tcg_env);
987     }
988 }
989 
990 static void save_state(DisasContext *dc)
991 {
992     tcg_gen_movi_tl(cpu_pc, dc->pc);
993     save_npc(dc);
994 }
995 
996 static void gen_exception(DisasContext *dc, int which)
997 {
998     save_state(dc);
999     gen_helper_raise_exception(tcg_env, tcg_constant_i32(which));
1000     dc->base.is_jmp = DISAS_NORETURN;
1001 }
1002 
1003 static TCGLabel *delay_exceptionv(DisasContext *dc, TCGv_i32 excp)
1004 {
1005     DisasDelayException *e = g_new0(DisasDelayException, 1);
1006 
1007     e->next = dc->delay_excp_list;
1008     dc->delay_excp_list = e;
1009 
1010     e->lab = gen_new_label();
1011     e->excp = excp;
1012     e->pc = dc->pc;
1013     /* Caller must have used flush_cond before branch. */
1014     assert(e->npc != JUMP_PC);
1015     e->npc = dc->npc;
1016 
1017     return e->lab;
1018 }
1019 
1020 static TCGLabel *delay_exception(DisasContext *dc, int excp)
1021 {
1022     return delay_exceptionv(dc, tcg_constant_i32(excp));
1023 }
1024 
1025 static void gen_check_align(DisasContext *dc, TCGv addr, int mask)
1026 {
1027     TCGv t = tcg_temp_new();
1028     TCGLabel *lab;
1029 
1030     tcg_gen_andi_tl(t, addr, mask);
1031 
1032     flush_cond(dc);
1033     lab = delay_exception(dc, TT_UNALIGNED);
1034     tcg_gen_brcondi_tl(TCG_COND_NE, t, 0, lab);
1035 }
1036 
1037 static void gen_mov_pc_npc(DisasContext *dc)
1038 {
1039     if (dc->npc & 3) {
1040         switch (dc->npc) {
1041         case JUMP_PC:
1042             gen_generic_branch(dc);
1043             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1044             dc->pc = DYNAMIC_PC_LOOKUP;
1045             break;
1046         case DYNAMIC_PC:
1047         case DYNAMIC_PC_LOOKUP:
1048             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1049             dc->pc = dc->npc;
1050             break;
1051         default:
1052             g_assert_not_reached();
1053         }
1054     } else {
1055         dc->pc = dc->npc;
1056     }
1057 }
1058 
1059 static void gen_op_next_insn(void)
1060 {
1061     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1062     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1063 }
1064 
1065 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1066                         DisasContext *dc)
1067 {
1068     static int subcc_cond[16] = {
1069         TCG_COND_NEVER,
1070         TCG_COND_EQ,
1071         TCG_COND_LE,
1072         TCG_COND_LT,
1073         TCG_COND_LEU,
1074         TCG_COND_LTU,
1075         -1, /* neg */
1076         -1, /* overflow */
1077         TCG_COND_ALWAYS,
1078         TCG_COND_NE,
1079         TCG_COND_GT,
1080         TCG_COND_GE,
1081         TCG_COND_GTU,
1082         TCG_COND_GEU,
1083         -1, /* pos */
1084         -1, /* no overflow */
1085     };
1086 
1087     static int logic_cond[16] = {
1088         TCG_COND_NEVER,
1089         TCG_COND_EQ,     /* eq:  Z */
1090         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1091         TCG_COND_LT,     /* lt:  N ^ V -> N */
1092         TCG_COND_EQ,     /* leu: C | Z -> Z */
1093         TCG_COND_NEVER,  /* ltu: C -> 0 */
1094         TCG_COND_LT,     /* neg: N */
1095         TCG_COND_NEVER,  /* vs:  V -> 0 */
1096         TCG_COND_ALWAYS,
1097         TCG_COND_NE,     /* ne:  !Z */
1098         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1099         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1100         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1101         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1102         TCG_COND_GE,     /* pos: !N */
1103         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1104     };
1105 
1106     TCGv_i32 r_src;
1107     TCGv r_dst;
1108 
1109 #ifdef TARGET_SPARC64
1110     if (xcc) {
1111         r_src = cpu_xcc;
1112     } else {
1113         r_src = cpu_psr;
1114     }
1115 #else
1116     r_src = cpu_psr;
1117 #endif
1118 
1119     switch (dc->cc_op) {
1120     case CC_OP_LOGIC:
1121         cmp->cond = logic_cond[cond];
1122     do_compare_dst_0:
1123         cmp->is_bool = false;
1124         cmp->c2 = tcg_constant_tl(0);
1125 #ifdef TARGET_SPARC64
1126         if (!xcc) {
1127             cmp->c1 = tcg_temp_new();
1128             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1129             break;
1130         }
1131 #endif
1132         cmp->c1 = cpu_cc_dst;
1133         break;
1134 
1135     case CC_OP_SUB:
1136         switch (cond) {
1137         case 6:  /* neg */
1138         case 14: /* pos */
1139             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1140             goto do_compare_dst_0;
1141 
1142         case 7: /* overflow */
1143         case 15: /* !overflow */
1144             goto do_dynamic;
1145 
1146         default:
1147             cmp->cond = subcc_cond[cond];
1148             cmp->is_bool = false;
1149 #ifdef TARGET_SPARC64
1150             if (!xcc) {
1151                 /* Note that sign-extension works for unsigned compares as
1152                    long as both operands are sign-extended.  */
1153                 cmp->c1 = tcg_temp_new();
1154                 cmp->c2 = tcg_temp_new();
1155                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1156                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1157                 break;
1158             }
1159 #endif
1160             cmp->c1 = cpu_cc_src;
1161             cmp->c2 = cpu_cc_src2;
1162             break;
1163         }
1164         break;
1165 
1166     default:
1167     do_dynamic:
1168         gen_helper_compute_psr(tcg_env);
1169         dc->cc_op = CC_OP_FLAGS;
1170         /* FALLTHRU */
1171 
1172     case CC_OP_FLAGS:
1173         /* We're going to generate a boolean result.  */
1174         cmp->cond = TCG_COND_NE;
1175         cmp->is_bool = true;
1176         cmp->c1 = r_dst = tcg_temp_new();
1177         cmp->c2 = tcg_constant_tl(0);
1178 
1179         switch (cond) {
1180         case 0x0:
1181             gen_op_eval_bn(r_dst);
1182             break;
1183         case 0x1:
1184             gen_op_eval_be(r_dst, r_src);
1185             break;
1186         case 0x2:
1187             gen_op_eval_ble(r_dst, r_src);
1188             break;
1189         case 0x3:
1190             gen_op_eval_bl(r_dst, r_src);
1191             break;
1192         case 0x4:
1193             gen_op_eval_bleu(r_dst, r_src);
1194             break;
1195         case 0x5:
1196             gen_op_eval_bcs(r_dst, r_src);
1197             break;
1198         case 0x6:
1199             gen_op_eval_bneg(r_dst, r_src);
1200             break;
1201         case 0x7:
1202             gen_op_eval_bvs(r_dst, r_src);
1203             break;
1204         case 0x8:
1205             gen_op_eval_ba(r_dst);
1206             break;
1207         case 0x9:
1208             gen_op_eval_bne(r_dst, r_src);
1209             break;
1210         case 0xa:
1211             gen_op_eval_bg(r_dst, r_src);
1212             break;
1213         case 0xb:
1214             gen_op_eval_bge(r_dst, r_src);
1215             break;
1216         case 0xc:
1217             gen_op_eval_bgu(r_dst, r_src);
1218             break;
1219         case 0xd:
1220             gen_op_eval_bcc(r_dst, r_src);
1221             break;
1222         case 0xe:
1223             gen_op_eval_bpos(r_dst, r_src);
1224             break;
1225         case 0xf:
1226             gen_op_eval_bvc(r_dst, r_src);
1227             break;
1228         }
1229         break;
1230     }
1231 }
1232 
1233 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1234 {
1235     unsigned int offset;
1236     TCGv r_dst;
1237 
1238     /* For now we still generate a straight boolean result.  */
1239     cmp->cond = TCG_COND_NE;
1240     cmp->is_bool = true;
1241     cmp->c1 = r_dst = tcg_temp_new();
1242     cmp->c2 = tcg_constant_tl(0);
1243 
1244     switch (cc) {
1245     default:
1246     case 0x0:
1247         offset = 0;
1248         break;
1249     case 0x1:
1250         offset = 32 - 10;
1251         break;
1252     case 0x2:
1253         offset = 34 - 10;
1254         break;
1255     case 0x3:
1256         offset = 36 - 10;
1257         break;
1258     }
1259 
1260     switch (cond) {
1261     case 0x0:
1262         gen_op_eval_bn(r_dst);
1263         break;
1264     case 0x1:
1265         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1266         break;
1267     case 0x2:
1268         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1269         break;
1270     case 0x3:
1271         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1272         break;
1273     case 0x4:
1274         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1275         break;
1276     case 0x5:
1277         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1278         break;
1279     case 0x6:
1280         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1281         break;
1282     case 0x7:
1283         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1284         break;
1285     case 0x8:
1286         gen_op_eval_ba(r_dst);
1287         break;
1288     case 0x9:
1289         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1290         break;
1291     case 0xa:
1292         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1293         break;
1294     case 0xb:
1295         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1296         break;
1297     case 0xc:
1298         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1299         break;
1300     case 0xd:
1301         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1302         break;
1303     case 0xe:
1304         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1305         break;
1306     case 0xf:
1307         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1308         break;
1309     }
1310 }
1311 
1312 // Inverted logic
1313 static const TCGCond gen_tcg_cond_reg[8] = {
1314     TCG_COND_NEVER,  /* reserved */
1315     TCG_COND_NE,
1316     TCG_COND_GT,
1317     TCG_COND_GE,
1318     TCG_COND_NEVER,  /* reserved */
1319     TCG_COND_EQ,
1320     TCG_COND_LE,
1321     TCG_COND_LT,
1322 };
1323 
1324 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1325 {
1326     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1327     cmp->is_bool = false;
1328     cmp->c1 = r_src;
1329     cmp->c2 = tcg_constant_tl(0);
1330 }
1331 
1332 #ifdef TARGET_SPARC64
1333 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1334 {
1335     switch (fccno) {
1336     case 0:
1337         gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1338         break;
1339     case 1:
1340         gen_helper_fcmps_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1341         break;
1342     case 2:
1343         gen_helper_fcmps_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1344         break;
1345     case 3:
1346         gen_helper_fcmps_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1347         break;
1348     }
1349 }
1350 
1351 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1352 {
1353     switch (fccno) {
1354     case 0:
1355         gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1356         break;
1357     case 1:
1358         gen_helper_fcmpd_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1359         break;
1360     case 2:
1361         gen_helper_fcmpd_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1362         break;
1363     case 3:
1364         gen_helper_fcmpd_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1365         break;
1366     }
1367 }
1368 
1369 static void gen_op_fcmpq(int fccno)
1370 {
1371     switch (fccno) {
1372     case 0:
1373         gen_helper_fcmpq(cpu_fsr, tcg_env);
1374         break;
1375     case 1:
1376         gen_helper_fcmpq_fcc1(cpu_fsr, tcg_env);
1377         break;
1378     case 2:
1379         gen_helper_fcmpq_fcc2(cpu_fsr, tcg_env);
1380         break;
1381     case 3:
1382         gen_helper_fcmpq_fcc3(cpu_fsr, tcg_env);
1383         break;
1384     }
1385 }
1386 
1387 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1388 {
1389     switch (fccno) {
1390     case 0:
1391         gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1392         break;
1393     case 1:
1394         gen_helper_fcmpes_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1395         break;
1396     case 2:
1397         gen_helper_fcmpes_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1398         break;
1399     case 3:
1400         gen_helper_fcmpes_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1401         break;
1402     }
1403 }
1404 
1405 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1406 {
1407     switch (fccno) {
1408     case 0:
1409         gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1410         break;
1411     case 1:
1412         gen_helper_fcmped_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1413         break;
1414     case 2:
1415         gen_helper_fcmped_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1416         break;
1417     case 3:
1418         gen_helper_fcmped_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1419         break;
1420     }
1421 }
1422 
1423 static void gen_op_fcmpeq(int fccno)
1424 {
1425     switch (fccno) {
1426     case 0:
1427         gen_helper_fcmpeq(cpu_fsr, tcg_env);
1428         break;
1429     case 1:
1430         gen_helper_fcmpeq_fcc1(cpu_fsr, tcg_env);
1431         break;
1432     case 2:
1433         gen_helper_fcmpeq_fcc2(cpu_fsr, tcg_env);
1434         break;
1435     case 3:
1436         gen_helper_fcmpeq_fcc3(cpu_fsr, tcg_env);
1437         break;
1438     }
1439 }
1440 
1441 #else
1442 
1443 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1444 {
1445     gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1446 }
1447 
1448 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1449 {
1450     gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1451 }
1452 
1453 static void gen_op_fcmpq(int fccno)
1454 {
1455     gen_helper_fcmpq(cpu_fsr, tcg_env);
1456 }
1457 
1458 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1459 {
1460     gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1461 }
1462 
1463 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1464 {
1465     gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1466 }
1467 
1468 static void gen_op_fcmpeq(int fccno)
1469 {
1470     gen_helper_fcmpeq(cpu_fsr, tcg_env);
1471 }
1472 #endif
1473 
1474 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1475 {
1476     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1477     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1478     gen_exception(dc, TT_FP_EXCP);
1479 }
1480 
1481 static int gen_trap_ifnofpu(DisasContext *dc)
1482 {
1483 #if !defined(CONFIG_USER_ONLY)
1484     if (!dc->fpu_enabled) {
1485         gen_exception(dc, TT_NFPU_INSN);
1486         return 1;
1487     }
1488 #endif
1489     return 0;
1490 }
1491 
1492 static void gen_op_clear_ieee_excp_and_FTT(void)
1493 {
1494     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1495 }
1496 
1497 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1498                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1499 {
1500     TCGv_i32 dst, src;
1501 
1502     src = gen_load_fpr_F(dc, rs);
1503     dst = gen_dest_fpr_F(dc);
1504 
1505     gen(dst, tcg_env, src);
1506     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1507 
1508     gen_store_fpr_F(dc, rd, dst);
1509 }
1510 
1511 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1512                           void (*gen)(TCGv_i32, TCGv_i32))
1513 {
1514     TCGv_i32 dst, src;
1515 
1516     src = gen_load_fpr_F(dc, rs);
1517     dst = gen_dest_fpr_F(dc);
1518 
1519     gen(dst, src);
1520 
1521     gen_store_fpr_F(dc, rd, dst);
1522 }
1523 
1524 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1525                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1526 {
1527     TCGv_i32 dst, src1, src2;
1528 
1529     src1 = gen_load_fpr_F(dc, rs1);
1530     src2 = gen_load_fpr_F(dc, rs2);
1531     dst = gen_dest_fpr_F(dc);
1532 
1533     gen(dst, tcg_env, src1, src2);
1534     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1535 
1536     gen_store_fpr_F(dc, rd, dst);
1537 }
1538 
1539 #ifdef TARGET_SPARC64
1540 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1541                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1542 {
1543     TCGv_i32 dst, src1, src2;
1544 
1545     src1 = gen_load_fpr_F(dc, rs1);
1546     src2 = gen_load_fpr_F(dc, rs2);
1547     dst = gen_dest_fpr_F(dc);
1548 
1549     gen(dst, src1, src2);
1550 
1551     gen_store_fpr_F(dc, rd, dst);
1552 }
1553 #endif
1554 
1555 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1556                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1557 {
1558     TCGv_i64 dst, src;
1559 
1560     src = gen_load_fpr_D(dc, rs);
1561     dst = gen_dest_fpr_D(dc, rd);
1562 
1563     gen(dst, tcg_env, src);
1564     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1565 
1566     gen_store_fpr_D(dc, rd, dst);
1567 }
1568 
1569 #ifdef TARGET_SPARC64
1570 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1571                           void (*gen)(TCGv_i64, TCGv_i64))
1572 {
1573     TCGv_i64 dst, src;
1574 
1575     src = gen_load_fpr_D(dc, rs);
1576     dst = gen_dest_fpr_D(dc, rd);
1577 
1578     gen(dst, src);
1579 
1580     gen_store_fpr_D(dc, rd, dst);
1581 }
1582 #endif
1583 
1584 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1585                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1586 {
1587     TCGv_i64 dst, src1, src2;
1588 
1589     src1 = gen_load_fpr_D(dc, rs1);
1590     src2 = gen_load_fpr_D(dc, rs2);
1591     dst = gen_dest_fpr_D(dc, rd);
1592 
1593     gen(dst, tcg_env, src1, src2);
1594     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1595 
1596     gen_store_fpr_D(dc, rd, dst);
1597 }
1598 
1599 #ifdef TARGET_SPARC64
1600 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1601                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1602 {
1603     TCGv_i64 dst, src1, src2;
1604 
1605     src1 = gen_load_fpr_D(dc, rs1);
1606     src2 = gen_load_fpr_D(dc, rs2);
1607     dst = gen_dest_fpr_D(dc, rd);
1608 
1609     gen(dst, src1, src2);
1610 
1611     gen_store_fpr_D(dc, rd, dst);
1612 }
1613 
1614 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1615                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1616 {
1617     TCGv_i64 dst, src1, src2;
1618 
1619     src1 = gen_load_fpr_D(dc, rs1);
1620     src2 = gen_load_fpr_D(dc, rs2);
1621     dst = gen_dest_fpr_D(dc, rd);
1622 
1623     gen(dst, cpu_gsr, src1, src2);
1624 
1625     gen_store_fpr_D(dc, rd, dst);
1626 }
1627 
1628 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1629                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1630 {
1631     TCGv_i64 dst, src0, src1, src2;
1632 
1633     src1 = gen_load_fpr_D(dc, rs1);
1634     src2 = gen_load_fpr_D(dc, rs2);
1635     src0 = gen_load_fpr_D(dc, rd);
1636     dst = gen_dest_fpr_D(dc, rd);
1637 
1638     gen(dst, src0, src1, src2);
1639 
1640     gen_store_fpr_D(dc, rd, dst);
1641 }
1642 #endif
1643 
1644 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1645                        void (*gen)(TCGv_ptr))
1646 {
1647     gen_op_load_fpr_QT1(QFPREG(rs));
1648 
1649     gen(tcg_env);
1650     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1651 
1652     gen_op_store_QT0_fpr(QFPREG(rd));
1653     gen_update_fprs_dirty(dc, QFPREG(rd));
1654 }
1655 
1656 #ifdef TARGET_SPARC64
1657 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1658                           void (*gen)(TCGv_ptr))
1659 {
1660     gen_op_load_fpr_QT1(QFPREG(rs));
1661 
1662     gen(tcg_env);
1663 
1664     gen_op_store_QT0_fpr(QFPREG(rd));
1665     gen_update_fprs_dirty(dc, QFPREG(rd));
1666 }
1667 #endif
1668 
1669 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1670                         void (*gen)(TCGv_ptr))
1671 {
1672     gen_op_load_fpr_QT0(QFPREG(rs1));
1673     gen_op_load_fpr_QT1(QFPREG(rs2));
1674 
1675     gen(tcg_env);
1676     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1677 
1678     gen_op_store_QT0_fpr(QFPREG(rd));
1679     gen_update_fprs_dirty(dc, QFPREG(rd));
1680 }
1681 
1682 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1683                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1684 {
1685     TCGv_i64 dst;
1686     TCGv_i32 src1, src2;
1687 
1688     src1 = gen_load_fpr_F(dc, rs1);
1689     src2 = gen_load_fpr_F(dc, rs2);
1690     dst = gen_dest_fpr_D(dc, rd);
1691 
1692     gen(dst, tcg_env, src1, src2);
1693     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1694 
1695     gen_store_fpr_D(dc, rd, dst);
1696 }
1697 
1698 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1699                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1700 {
1701     TCGv_i64 src1, src2;
1702 
1703     src1 = gen_load_fpr_D(dc, rs1);
1704     src2 = gen_load_fpr_D(dc, rs2);
1705 
1706     gen(tcg_env, src1, src2);
1707     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1708 
1709     gen_op_store_QT0_fpr(QFPREG(rd));
1710     gen_update_fprs_dirty(dc, QFPREG(rd));
1711 }
1712 
1713 #ifdef TARGET_SPARC64
1714 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1715                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1716 {
1717     TCGv_i64 dst;
1718     TCGv_i32 src;
1719 
1720     src = gen_load_fpr_F(dc, rs);
1721     dst = gen_dest_fpr_D(dc, rd);
1722 
1723     gen(dst, tcg_env, src);
1724     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1725 
1726     gen_store_fpr_D(dc, rd, dst);
1727 }
1728 #endif
1729 
1730 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1731                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1732 {
1733     TCGv_i64 dst;
1734     TCGv_i32 src;
1735 
1736     src = gen_load_fpr_F(dc, rs);
1737     dst = gen_dest_fpr_D(dc, rd);
1738 
1739     gen(dst, tcg_env, src);
1740 
1741     gen_store_fpr_D(dc, rd, dst);
1742 }
1743 
1744 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1745                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1746 {
1747     TCGv_i32 dst;
1748     TCGv_i64 src;
1749 
1750     src = gen_load_fpr_D(dc, rs);
1751     dst = gen_dest_fpr_F(dc);
1752 
1753     gen(dst, tcg_env, src);
1754     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1755 
1756     gen_store_fpr_F(dc, rd, dst);
1757 }
1758 
1759 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1760                        void (*gen)(TCGv_i32, TCGv_ptr))
1761 {
1762     TCGv_i32 dst;
1763 
1764     gen_op_load_fpr_QT1(QFPREG(rs));
1765     dst = gen_dest_fpr_F(dc);
1766 
1767     gen(dst, tcg_env);
1768     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1769 
1770     gen_store_fpr_F(dc, rd, dst);
1771 }
1772 
1773 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1774                        void (*gen)(TCGv_i64, TCGv_ptr))
1775 {
1776     TCGv_i64 dst;
1777 
1778     gen_op_load_fpr_QT1(QFPREG(rs));
1779     dst = gen_dest_fpr_D(dc, rd);
1780 
1781     gen(dst, tcg_env);
1782     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1783 
1784     gen_store_fpr_D(dc, rd, dst);
1785 }
1786 
1787 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1788                           void (*gen)(TCGv_ptr, TCGv_i32))
1789 {
1790     TCGv_i32 src;
1791 
1792     src = gen_load_fpr_F(dc, rs);
1793 
1794     gen(tcg_env, src);
1795 
1796     gen_op_store_QT0_fpr(QFPREG(rd));
1797     gen_update_fprs_dirty(dc, QFPREG(rd));
1798 }
1799 
1800 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1801                           void (*gen)(TCGv_ptr, TCGv_i64))
1802 {
1803     TCGv_i64 src;
1804 
1805     src = gen_load_fpr_D(dc, rs);
1806 
1807     gen(tcg_env, src);
1808 
1809     gen_op_store_QT0_fpr(QFPREG(rd));
1810     gen_update_fprs_dirty(dc, QFPREG(rd));
1811 }
1812 
1813 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1814                      TCGv addr, int mmu_idx, MemOp memop)
1815 {
1816     gen_address_mask(dc, addr);
1817     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1818 }
1819 
1820 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1821 {
1822     TCGv m1 = tcg_constant_tl(0xff);
1823     gen_address_mask(dc, addr);
1824     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1825 }
1826 
1827 /* asi moves */
1828 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1829 typedef enum {
1830     GET_ASI_HELPER,
1831     GET_ASI_EXCP,
1832     GET_ASI_DIRECT,
1833     GET_ASI_DTWINX,
1834     GET_ASI_BLOCK,
1835     GET_ASI_SHORT,
1836     GET_ASI_BCOPY,
1837     GET_ASI_BFILL,
1838 } ASIType;
1839 
1840 typedef struct {
1841     ASIType type;
1842     int asi;
1843     int mem_idx;
1844     MemOp memop;
1845 } DisasASI;
1846 
1847 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1848 {
1849     int asi = GET_FIELD(insn, 19, 26);
1850     ASIType type = GET_ASI_HELPER;
1851     int mem_idx = dc->mem_idx;
1852 
1853 #ifndef TARGET_SPARC64
1854     /* Before v9, all asis are immediate and privileged.  */
1855     if (IS_IMM) {
1856         gen_exception(dc, TT_ILL_INSN);
1857         type = GET_ASI_EXCP;
1858     } else if (supervisor(dc)
1859                /* Note that LEON accepts ASI_USERDATA in user mode, for
1860                   use with CASA.  Also note that previous versions of
1861                   QEMU allowed (and old versions of gcc emitted) ASI_P
1862                   for LEON, which is incorrect.  */
1863                || (asi == ASI_USERDATA
1864                    && (dc->def->features & CPU_FEATURE_CASA))) {
1865         switch (asi) {
1866         case ASI_USERDATA:   /* User data access */
1867             mem_idx = MMU_USER_IDX;
1868             type = GET_ASI_DIRECT;
1869             break;
1870         case ASI_KERNELDATA: /* Supervisor data access */
1871             mem_idx = MMU_KERNEL_IDX;
1872             type = GET_ASI_DIRECT;
1873             break;
1874         case ASI_M_BYPASS:    /* MMU passthrough */
1875         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1876             mem_idx = MMU_PHYS_IDX;
1877             type = GET_ASI_DIRECT;
1878             break;
1879         case ASI_M_BCOPY: /* Block copy, sta access */
1880             mem_idx = MMU_KERNEL_IDX;
1881             type = GET_ASI_BCOPY;
1882             break;
1883         case ASI_M_BFILL: /* Block fill, stda access */
1884             mem_idx = MMU_KERNEL_IDX;
1885             type = GET_ASI_BFILL;
1886             break;
1887         }
1888 
1889         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1890          * permissions check in get_physical_address(..).
1891          */
1892         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1893     } else {
1894         gen_exception(dc, TT_PRIV_INSN);
1895         type = GET_ASI_EXCP;
1896     }
1897 #else
1898     if (IS_IMM) {
1899         asi = dc->asi;
1900     }
1901     /* With v9, all asis below 0x80 are privileged.  */
1902     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1903        down that bit into DisasContext.  For the moment that's ok,
1904        since the direct implementations below doesn't have any ASIs
1905        in the restricted [0x30, 0x7f] range, and the check will be
1906        done properly in the helper.  */
1907     if (!supervisor(dc) && asi < 0x80) {
1908         gen_exception(dc, TT_PRIV_ACT);
1909         type = GET_ASI_EXCP;
1910     } else {
1911         switch (asi) {
1912         case ASI_REAL:      /* Bypass */
1913         case ASI_REAL_IO:   /* Bypass, non-cacheable */
1914         case ASI_REAL_L:    /* Bypass LE */
1915         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
1916         case ASI_TWINX_REAL:   /* Real address, twinx */
1917         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
1918         case ASI_QUAD_LDD_PHYS:
1919         case ASI_QUAD_LDD_PHYS_L:
1920             mem_idx = MMU_PHYS_IDX;
1921             break;
1922         case ASI_N:  /* Nucleus */
1923         case ASI_NL: /* Nucleus LE */
1924         case ASI_TWINX_N:
1925         case ASI_TWINX_NL:
1926         case ASI_NUCLEUS_QUAD_LDD:
1927         case ASI_NUCLEUS_QUAD_LDD_L:
1928             if (hypervisor(dc)) {
1929                 mem_idx = MMU_PHYS_IDX;
1930             } else {
1931                 mem_idx = MMU_NUCLEUS_IDX;
1932             }
1933             break;
1934         case ASI_AIUP:  /* As if user primary */
1935         case ASI_AIUPL: /* As if user primary LE */
1936         case ASI_TWINX_AIUP:
1937         case ASI_TWINX_AIUP_L:
1938         case ASI_BLK_AIUP_4V:
1939         case ASI_BLK_AIUP_L_4V:
1940         case ASI_BLK_AIUP:
1941         case ASI_BLK_AIUPL:
1942             mem_idx = MMU_USER_IDX;
1943             break;
1944         case ASI_AIUS:  /* As if user secondary */
1945         case ASI_AIUSL: /* As if user secondary LE */
1946         case ASI_TWINX_AIUS:
1947         case ASI_TWINX_AIUS_L:
1948         case ASI_BLK_AIUS_4V:
1949         case ASI_BLK_AIUS_L_4V:
1950         case ASI_BLK_AIUS:
1951         case ASI_BLK_AIUSL:
1952             mem_idx = MMU_USER_SECONDARY_IDX;
1953             break;
1954         case ASI_S:  /* Secondary */
1955         case ASI_SL: /* Secondary LE */
1956         case ASI_TWINX_S:
1957         case ASI_TWINX_SL:
1958         case ASI_BLK_COMMIT_S:
1959         case ASI_BLK_S:
1960         case ASI_BLK_SL:
1961         case ASI_FL8_S:
1962         case ASI_FL8_SL:
1963         case ASI_FL16_S:
1964         case ASI_FL16_SL:
1965             if (mem_idx == MMU_USER_IDX) {
1966                 mem_idx = MMU_USER_SECONDARY_IDX;
1967             } else if (mem_idx == MMU_KERNEL_IDX) {
1968                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
1969             }
1970             break;
1971         case ASI_P:  /* Primary */
1972         case ASI_PL: /* Primary LE */
1973         case ASI_TWINX_P:
1974         case ASI_TWINX_PL:
1975         case ASI_BLK_COMMIT_P:
1976         case ASI_BLK_P:
1977         case ASI_BLK_PL:
1978         case ASI_FL8_P:
1979         case ASI_FL8_PL:
1980         case ASI_FL16_P:
1981         case ASI_FL16_PL:
1982             break;
1983         }
1984         switch (asi) {
1985         case ASI_REAL:
1986         case ASI_REAL_IO:
1987         case ASI_REAL_L:
1988         case ASI_REAL_IO_L:
1989         case ASI_N:
1990         case ASI_NL:
1991         case ASI_AIUP:
1992         case ASI_AIUPL:
1993         case ASI_AIUS:
1994         case ASI_AIUSL:
1995         case ASI_S:
1996         case ASI_SL:
1997         case ASI_P:
1998         case ASI_PL:
1999             type = GET_ASI_DIRECT;
2000             break;
2001         case ASI_TWINX_REAL:
2002         case ASI_TWINX_REAL_L:
2003         case ASI_TWINX_N:
2004         case ASI_TWINX_NL:
2005         case ASI_TWINX_AIUP:
2006         case ASI_TWINX_AIUP_L:
2007         case ASI_TWINX_AIUS:
2008         case ASI_TWINX_AIUS_L:
2009         case ASI_TWINX_P:
2010         case ASI_TWINX_PL:
2011         case ASI_TWINX_S:
2012         case ASI_TWINX_SL:
2013         case ASI_QUAD_LDD_PHYS:
2014         case ASI_QUAD_LDD_PHYS_L:
2015         case ASI_NUCLEUS_QUAD_LDD:
2016         case ASI_NUCLEUS_QUAD_LDD_L:
2017             type = GET_ASI_DTWINX;
2018             break;
2019         case ASI_BLK_COMMIT_P:
2020         case ASI_BLK_COMMIT_S:
2021         case ASI_BLK_AIUP_4V:
2022         case ASI_BLK_AIUP_L_4V:
2023         case ASI_BLK_AIUP:
2024         case ASI_BLK_AIUPL:
2025         case ASI_BLK_AIUS_4V:
2026         case ASI_BLK_AIUS_L_4V:
2027         case ASI_BLK_AIUS:
2028         case ASI_BLK_AIUSL:
2029         case ASI_BLK_S:
2030         case ASI_BLK_SL:
2031         case ASI_BLK_P:
2032         case ASI_BLK_PL:
2033             type = GET_ASI_BLOCK;
2034             break;
2035         case ASI_FL8_S:
2036         case ASI_FL8_SL:
2037         case ASI_FL8_P:
2038         case ASI_FL8_PL:
2039             memop = MO_UB;
2040             type = GET_ASI_SHORT;
2041             break;
2042         case ASI_FL16_S:
2043         case ASI_FL16_SL:
2044         case ASI_FL16_P:
2045         case ASI_FL16_PL:
2046             memop = MO_TEUW;
2047             type = GET_ASI_SHORT;
2048             break;
2049         }
2050         /* The little-endian asis all have bit 3 set.  */
2051         if (asi & 8) {
2052             memop ^= MO_BSWAP;
2053         }
2054     }
2055 #endif
2056 
2057     return (DisasASI){ type, asi, mem_idx, memop };
2058 }
2059 
2060 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2061                        int insn, MemOp memop)
2062 {
2063     DisasASI da = get_asi(dc, insn, memop);
2064 
2065     switch (da.type) {
2066     case GET_ASI_EXCP:
2067         break;
2068     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2069         gen_exception(dc, TT_ILL_INSN);
2070         break;
2071     case GET_ASI_DIRECT:
2072         gen_address_mask(dc, addr);
2073         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2074         break;
2075     default:
2076         {
2077             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2078             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2079 
2080             save_state(dc);
2081 #ifdef TARGET_SPARC64
2082             gen_helper_ld_asi(dst, tcg_env, addr, r_asi, r_mop);
2083 #else
2084             {
2085                 TCGv_i64 t64 = tcg_temp_new_i64();
2086                 gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2087                 tcg_gen_trunc_i64_tl(dst, t64);
2088             }
2089 #endif
2090         }
2091         break;
2092     }
2093 }
2094 
2095 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2096                        int insn, MemOp memop)
2097 {
2098     DisasASI da = get_asi(dc, insn, memop);
2099 
2100     switch (da.type) {
2101     case GET_ASI_EXCP:
2102         break;
2103     case GET_ASI_DTWINX: /* Reserved for stda.  */
2104 #ifndef TARGET_SPARC64
2105         gen_exception(dc, TT_ILL_INSN);
2106         break;
2107 #else
2108         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2109             /* Pre OpenSPARC CPUs don't have these */
2110             gen_exception(dc, TT_ILL_INSN);
2111             return;
2112         }
2113         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2114          * are ST_BLKINIT_ ASIs */
2115 #endif
2116         /* fall through */
2117     case GET_ASI_DIRECT:
2118         gen_address_mask(dc, addr);
2119         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2120         break;
2121 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2122     case GET_ASI_BCOPY:
2123         /* Copy 32 bytes from the address in SRC to ADDR.  */
2124         /* ??? The original qemu code suggests 4-byte alignment, dropping
2125            the low bits, but the only place I can see this used is in the
2126            Linux kernel with 32 byte alignment, which would make more sense
2127            as a cacheline-style operation.  */
2128         {
2129             TCGv saddr = tcg_temp_new();
2130             TCGv daddr = tcg_temp_new();
2131             TCGv four = tcg_constant_tl(4);
2132             TCGv_i32 tmp = tcg_temp_new_i32();
2133             int i;
2134 
2135             tcg_gen_andi_tl(saddr, src, -4);
2136             tcg_gen_andi_tl(daddr, addr, -4);
2137             for (i = 0; i < 32; i += 4) {
2138                 /* Since the loads and stores are paired, allow the
2139                    copy to happen in the host endianness.  */
2140                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2141                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2142                 tcg_gen_add_tl(saddr, saddr, four);
2143                 tcg_gen_add_tl(daddr, daddr, four);
2144             }
2145         }
2146         break;
2147 #endif
2148     default:
2149         {
2150             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2151             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2152 
2153             save_state(dc);
2154 #ifdef TARGET_SPARC64
2155             gen_helper_st_asi(tcg_env, addr, src, r_asi, r_mop);
2156 #else
2157             {
2158                 TCGv_i64 t64 = tcg_temp_new_i64();
2159                 tcg_gen_extu_tl_i64(t64, src);
2160                 gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2161             }
2162 #endif
2163 
2164             /* A write to a TLB register may alter page maps.  End the TB. */
2165             dc->npc = DYNAMIC_PC;
2166         }
2167         break;
2168     }
2169 }
2170 
2171 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2172                          TCGv addr, int insn)
2173 {
2174     DisasASI da = get_asi(dc, insn, MO_TEUL);
2175 
2176     switch (da.type) {
2177     case GET_ASI_EXCP:
2178         break;
2179     case GET_ASI_DIRECT:
2180         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2181         break;
2182     default:
2183         /* ??? Should be DAE_invalid_asi.  */
2184         gen_exception(dc, TT_DATA_ACCESS);
2185         break;
2186     }
2187 }
2188 
2189 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2190                         int insn, int rd)
2191 {
2192     DisasASI da = get_asi(dc, insn, MO_TEUL);
2193     TCGv oldv;
2194 
2195     switch (da.type) {
2196     case GET_ASI_EXCP:
2197         return;
2198     case GET_ASI_DIRECT:
2199         oldv = tcg_temp_new();
2200         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2201                                   da.mem_idx, da.memop | MO_ALIGN);
2202         gen_store_gpr(dc, rd, oldv);
2203         break;
2204     default:
2205         /* ??? Should be DAE_invalid_asi.  */
2206         gen_exception(dc, TT_DATA_ACCESS);
2207         break;
2208     }
2209 }
2210 
2211 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2212 {
2213     DisasASI da = get_asi(dc, insn, MO_UB);
2214 
2215     switch (da.type) {
2216     case GET_ASI_EXCP:
2217         break;
2218     case GET_ASI_DIRECT:
2219         gen_ldstub(dc, dst, addr, da.mem_idx);
2220         break;
2221     default:
2222         /* ??? In theory, this should be raise DAE_invalid_asi.
2223            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2224         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2225             gen_helper_exit_atomic(tcg_env);
2226         } else {
2227             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2228             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2229             TCGv_i64 s64, t64;
2230 
2231             save_state(dc);
2232             t64 = tcg_temp_new_i64();
2233             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2234 
2235             s64 = tcg_constant_i64(0xff);
2236             gen_helper_st_asi(tcg_env, addr, s64, r_asi, r_mop);
2237 
2238             tcg_gen_trunc_i64_tl(dst, t64);
2239 
2240             /* End the TB.  */
2241             dc->npc = DYNAMIC_PC;
2242         }
2243         break;
2244     }
2245 }
2246 #endif
2247 
2248 #ifdef TARGET_SPARC64
2249 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2250                         int insn, int size, int rd)
2251 {
2252     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2253     TCGv_i32 d32;
2254     TCGv_i64 d64;
2255 
2256     switch (da.type) {
2257     case GET_ASI_EXCP:
2258         break;
2259 
2260     case GET_ASI_DIRECT:
2261         gen_address_mask(dc, addr);
2262         switch (size) {
2263         case 4:
2264             d32 = gen_dest_fpr_F(dc);
2265             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2266             gen_store_fpr_F(dc, rd, d32);
2267             break;
2268         case 8:
2269             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2270                                 da.memop | MO_ALIGN_4);
2271             break;
2272         case 16:
2273             d64 = tcg_temp_new_i64();
2274             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2275             tcg_gen_addi_tl(addr, addr, 8);
2276             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2277                                 da.memop | MO_ALIGN_4);
2278             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2279             break;
2280         default:
2281             g_assert_not_reached();
2282         }
2283         break;
2284 
2285     case GET_ASI_BLOCK:
2286         /* Valid for lddfa on aligned registers only.  */
2287         if (size == 8 && (rd & 7) == 0) {
2288             MemOp memop;
2289             TCGv eight;
2290             int i;
2291 
2292             gen_address_mask(dc, addr);
2293 
2294             /* The first operation checks required alignment.  */
2295             memop = da.memop | MO_ALIGN_64;
2296             eight = tcg_constant_tl(8);
2297             for (i = 0; ; ++i) {
2298                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2299                                     da.mem_idx, memop);
2300                 if (i == 7) {
2301                     break;
2302                 }
2303                 tcg_gen_add_tl(addr, addr, eight);
2304                 memop = da.memop;
2305             }
2306         } else {
2307             gen_exception(dc, TT_ILL_INSN);
2308         }
2309         break;
2310 
2311     case GET_ASI_SHORT:
2312         /* Valid for lddfa only.  */
2313         if (size == 8) {
2314             gen_address_mask(dc, addr);
2315             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2316                                 da.memop | MO_ALIGN);
2317         } else {
2318             gen_exception(dc, TT_ILL_INSN);
2319         }
2320         break;
2321 
2322     default:
2323         {
2324             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2325             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2326 
2327             save_state(dc);
2328             /* According to the table in the UA2011 manual, the only
2329                other asis that are valid for ldfa/lddfa/ldqfa are
2330                the NO_FAULT asis.  We still need a helper for these,
2331                but we can just use the integer asi helper for them.  */
2332             switch (size) {
2333             case 4:
2334                 d64 = tcg_temp_new_i64();
2335                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2336                 d32 = gen_dest_fpr_F(dc);
2337                 tcg_gen_extrl_i64_i32(d32, d64);
2338                 gen_store_fpr_F(dc, rd, d32);
2339                 break;
2340             case 8:
2341                 gen_helper_ld_asi(cpu_fpr[rd / 2], tcg_env, addr, r_asi, r_mop);
2342                 break;
2343             case 16:
2344                 d64 = tcg_temp_new_i64();
2345                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2346                 tcg_gen_addi_tl(addr, addr, 8);
2347                 gen_helper_ld_asi(cpu_fpr[rd/2+1], tcg_env, addr, r_asi, r_mop);
2348                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2349                 break;
2350             default:
2351                 g_assert_not_reached();
2352             }
2353         }
2354         break;
2355     }
2356 }
2357 
2358 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2359                         int insn, int size, int rd)
2360 {
2361     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2362     TCGv_i32 d32;
2363 
2364     switch (da.type) {
2365     case GET_ASI_EXCP:
2366         break;
2367 
2368     case GET_ASI_DIRECT:
2369         gen_address_mask(dc, addr);
2370         switch (size) {
2371         case 4:
2372             d32 = gen_load_fpr_F(dc, rd);
2373             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2374             break;
2375         case 8:
2376             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2377                                 da.memop | MO_ALIGN_4);
2378             break;
2379         case 16:
2380             /* Only 4-byte alignment required.  However, it is legal for the
2381                cpu to signal the alignment fault, and the OS trap handler is
2382                required to fix it up.  Requiring 16-byte alignment here avoids
2383                having to probe the second page before performing the first
2384                write.  */
2385             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2386                                 da.memop | MO_ALIGN_16);
2387             tcg_gen_addi_tl(addr, addr, 8);
2388             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2389             break;
2390         default:
2391             g_assert_not_reached();
2392         }
2393         break;
2394 
2395     case GET_ASI_BLOCK:
2396         /* Valid for stdfa on aligned registers only.  */
2397         if (size == 8 && (rd & 7) == 0) {
2398             MemOp memop;
2399             TCGv eight;
2400             int i;
2401 
2402             gen_address_mask(dc, addr);
2403 
2404             /* The first operation checks required alignment.  */
2405             memop = da.memop | MO_ALIGN_64;
2406             eight = tcg_constant_tl(8);
2407             for (i = 0; ; ++i) {
2408                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2409                                     da.mem_idx, memop);
2410                 if (i == 7) {
2411                     break;
2412                 }
2413                 tcg_gen_add_tl(addr, addr, eight);
2414                 memop = da.memop;
2415             }
2416         } else {
2417             gen_exception(dc, TT_ILL_INSN);
2418         }
2419         break;
2420 
2421     case GET_ASI_SHORT:
2422         /* Valid for stdfa only.  */
2423         if (size == 8) {
2424             gen_address_mask(dc, addr);
2425             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2426                                 da.memop | MO_ALIGN);
2427         } else {
2428             gen_exception(dc, TT_ILL_INSN);
2429         }
2430         break;
2431 
2432     default:
2433         /* According to the table in the UA2011 manual, the only
2434            other asis that are valid for ldfa/lddfa/ldqfa are
2435            the PST* asis, which aren't currently handled.  */
2436         gen_exception(dc, TT_ILL_INSN);
2437         break;
2438     }
2439 }
2440 
2441 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2442 {
2443     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2444     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2445     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2446 
2447     switch (da.type) {
2448     case GET_ASI_EXCP:
2449         return;
2450 
2451     case GET_ASI_DTWINX:
2452         gen_address_mask(dc, addr);
2453         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2454         tcg_gen_addi_tl(addr, addr, 8);
2455         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2456         break;
2457 
2458     case GET_ASI_DIRECT:
2459         {
2460             TCGv_i64 tmp = tcg_temp_new_i64();
2461 
2462             gen_address_mask(dc, addr);
2463             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2464 
2465             /* Note that LE ldda acts as if each 32-bit register
2466                result is byte swapped.  Having just performed one
2467                64-bit bswap, we need now to swap the writebacks.  */
2468             if ((da.memop & MO_BSWAP) == MO_TE) {
2469                 tcg_gen_extr32_i64(lo, hi, tmp);
2470             } else {
2471                 tcg_gen_extr32_i64(hi, lo, tmp);
2472             }
2473         }
2474         break;
2475 
2476     default:
2477         /* ??? In theory we've handled all of the ASIs that are valid
2478            for ldda, and this should raise DAE_invalid_asi.  However,
2479            real hardware allows others.  This can be seen with e.g.
2480            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2481         {
2482             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2483             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2484             TCGv_i64 tmp = tcg_temp_new_i64();
2485 
2486             save_state(dc);
2487             gen_helper_ld_asi(tmp, tcg_env, addr, r_asi, r_mop);
2488 
2489             /* See above.  */
2490             if ((da.memop & MO_BSWAP) == MO_TE) {
2491                 tcg_gen_extr32_i64(lo, hi, tmp);
2492             } else {
2493                 tcg_gen_extr32_i64(hi, lo, tmp);
2494             }
2495         }
2496         break;
2497     }
2498 
2499     gen_store_gpr(dc, rd, hi);
2500     gen_store_gpr(dc, rd + 1, lo);
2501 }
2502 
2503 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2504                          int insn, int rd)
2505 {
2506     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2507     TCGv lo = gen_load_gpr(dc, rd + 1);
2508 
2509     switch (da.type) {
2510     case GET_ASI_EXCP:
2511         break;
2512 
2513     case GET_ASI_DTWINX:
2514         gen_address_mask(dc, addr);
2515         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2516         tcg_gen_addi_tl(addr, addr, 8);
2517         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2518         break;
2519 
2520     case GET_ASI_DIRECT:
2521         {
2522             TCGv_i64 t64 = tcg_temp_new_i64();
2523 
2524             /* Note that LE stda acts as if each 32-bit register result is
2525                byte swapped.  We will perform one 64-bit LE store, so now
2526                we must swap the order of the construction.  */
2527             if ((da.memop & MO_BSWAP) == MO_TE) {
2528                 tcg_gen_concat32_i64(t64, lo, hi);
2529             } else {
2530                 tcg_gen_concat32_i64(t64, hi, lo);
2531             }
2532             gen_address_mask(dc, addr);
2533             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2534         }
2535         break;
2536 
2537     default:
2538         /* ??? In theory we've handled all of the ASIs that are valid
2539            for stda, and this should raise DAE_invalid_asi.  */
2540         {
2541             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2542             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2543             TCGv_i64 t64 = tcg_temp_new_i64();
2544 
2545             /* See above.  */
2546             if ((da.memop & MO_BSWAP) == MO_TE) {
2547                 tcg_gen_concat32_i64(t64, lo, hi);
2548             } else {
2549                 tcg_gen_concat32_i64(t64, hi, lo);
2550             }
2551 
2552             save_state(dc);
2553             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2554         }
2555         break;
2556     }
2557 }
2558 
2559 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2560                          int insn, int rd)
2561 {
2562     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2563     TCGv oldv;
2564 
2565     switch (da.type) {
2566     case GET_ASI_EXCP:
2567         return;
2568     case GET_ASI_DIRECT:
2569         oldv = tcg_temp_new();
2570         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2571                                   da.mem_idx, da.memop | MO_ALIGN);
2572         gen_store_gpr(dc, rd, oldv);
2573         break;
2574     default:
2575         /* ??? Should be DAE_invalid_asi.  */
2576         gen_exception(dc, TT_DATA_ACCESS);
2577         break;
2578     }
2579 }
2580 
2581 #elif !defined(CONFIG_USER_ONLY)
2582 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2583 {
2584     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2585        whereby "rd + 1" elicits "error: array subscript is above array".
2586        Since we have already asserted that rd is even, the semantics
2587        are unchanged.  */
2588     TCGv lo = gen_dest_gpr(dc, rd | 1);
2589     TCGv hi = gen_dest_gpr(dc, rd);
2590     TCGv_i64 t64 = tcg_temp_new_i64();
2591     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2592 
2593     switch (da.type) {
2594     case GET_ASI_EXCP:
2595         return;
2596     case GET_ASI_DIRECT:
2597         gen_address_mask(dc, addr);
2598         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2599         break;
2600     default:
2601         {
2602             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2603             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2604 
2605             save_state(dc);
2606             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2607         }
2608         break;
2609     }
2610 
2611     tcg_gen_extr_i64_i32(lo, hi, t64);
2612     gen_store_gpr(dc, rd | 1, lo);
2613     gen_store_gpr(dc, rd, hi);
2614 }
2615 
2616 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2617                          int insn, int rd)
2618 {
2619     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2620     TCGv lo = gen_load_gpr(dc, rd + 1);
2621     TCGv_i64 t64 = tcg_temp_new_i64();
2622 
2623     tcg_gen_concat_tl_i64(t64, lo, hi);
2624 
2625     switch (da.type) {
2626     case GET_ASI_EXCP:
2627         break;
2628     case GET_ASI_DIRECT:
2629         gen_address_mask(dc, addr);
2630         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2631         break;
2632     case GET_ASI_BFILL:
2633         /* Store 32 bytes of T64 to ADDR.  */
2634         /* ??? The original qemu code suggests 8-byte alignment, dropping
2635            the low bits, but the only place I can see this used is in the
2636            Linux kernel with 32 byte alignment, which would make more sense
2637            as a cacheline-style operation.  */
2638         {
2639             TCGv d_addr = tcg_temp_new();
2640             TCGv eight = tcg_constant_tl(8);
2641             int i;
2642 
2643             tcg_gen_andi_tl(d_addr, addr, -8);
2644             for (i = 0; i < 32; i += 8) {
2645                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2646                 tcg_gen_add_tl(d_addr, d_addr, eight);
2647             }
2648         }
2649         break;
2650     default:
2651         {
2652             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2653             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2654 
2655             save_state(dc);
2656             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2657         }
2658         break;
2659     }
2660 }
2661 #endif
2662 
2663 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2664 {
2665     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2666     return gen_load_gpr(dc, rs1);
2667 }
2668 
2669 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2670 {
2671     if (IS_IMM) { /* immediate */
2672         target_long simm = GET_FIELDs(insn, 19, 31);
2673         TCGv t = tcg_temp_new();
2674         tcg_gen_movi_tl(t, simm);
2675         return t;
2676     } else {      /* register */
2677         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2678         return gen_load_gpr(dc, rs2);
2679     }
2680 }
2681 
2682 #ifdef TARGET_SPARC64
2683 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2684 {
2685     TCGv_i32 c32, zero, dst, s1, s2;
2686 
2687     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2688        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2689        the later.  */
2690     c32 = tcg_temp_new_i32();
2691     if (cmp->is_bool) {
2692         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2693     } else {
2694         TCGv_i64 c64 = tcg_temp_new_i64();
2695         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2696         tcg_gen_extrl_i64_i32(c32, c64);
2697     }
2698 
2699     s1 = gen_load_fpr_F(dc, rs);
2700     s2 = gen_load_fpr_F(dc, rd);
2701     dst = gen_dest_fpr_F(dc);
2702     zero = tcg_constant_i32(0);
2703 
2704     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2705 
2706     gen_store_fpr_F(dc, rd, dst);
2707 }
2708 
2709 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2710 {
2711     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2712     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2713                         gen_load_fpr_D(dc, rs),
2714                         gen_load_fpr_D(dc, rd));
2715     gen_store_fpr_D(dc, rd, dst);
2716 }
2717 
2718 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2719 {
2720     int qd = QFPREG(rd);
2721     int qs = QFPREG(rs);
2722 
2723     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2724                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2725     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2726                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2727 
2728     gen_update_fprs_dirty(dc, qd);
2729 }
2730 
2731 #ifndef CONFIG_USER_ONLY
2732 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env tcg_env)
2733 {
2734     TCGv_i32 r_tl = tcg_temp_new_i32();
2735 
2736     /* load env->tl into r_tl */
2737     tcg_gen_ld_i32(r_tl, tcg_env, offsetof(CPUSPARCState, tl));
2738 
2739     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2740     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2741 
2742     /* calculate offset to current trap state from env->ts, reuse r_tl */
2743     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2744     tcg_gen_addi_ptr(r_tsptr, tcg_env, offsetof(CPUSPARCState, ts));
2745 
2746     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2747     {
2748         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2749         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2750         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2751     }
2752 }
2753 #endif
2754 
2755 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2756                      int width, bool cc, bool left)
2757 {
2758     TCGv lo1, lo2;
2759     uint64_t amask, tabl, tabr;
2760     int shift, imask, omask;
2761 
2762     if (cc) {
2763         tcg_gen_mov_tl(cpu_cc_src, s1);
2764         tcg_gen_mov_tl(cpu_cc_src2, s2);
2765         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2766         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2767         dc->cc_op = CC_OP_SUB;
2768     }
2769 
2770     /* Theory of operation: there are two tables, left and right (not to
2771        be confused with the left and right versions of the opcode).  These
2772        are indexed by the low 3 bits of the inputs.  To make things "easy",
2773        these tables are loaded into two constants, TABL and TABR below.
2774        The operation index = (input & imask) << shift calculates the index
2775        into the constant, while val = (table >> index) & omask calculates
2776        the value we're looking for.  */
2777     switch (width) {
2778     case 8:
2779         imask = 0x7;
2780         shift = 3;
2781         omask = 0xff;
2782         if (left) {
2783             tabl = 0x80c0e0f0f8fcfeffULL;
2784             tabr = 0xff7f3f1f0f070301ULL;
2785         } else {
2786             tabl = 0x0103070f1f3f7fffULL;
2787             tabr = 0xfffefcf8f0e0c080ULL;
2788         }
2789         break;
2790     case 16:
2791         imask = 0x6;
2792         shift = 1;
2793         omask = 0xf;
2794         if (left) {
2795             tabl = 0x8cef;
2796             tabr = 0xf731;
2797         } else {
2798             tabl = 0x137f;
2799             tabr = 0xfec8;
2800         }
2801         break;
2802     case 32:
2803         imask = 0x4;
2804         shift = 0;
2805         omask = 0x3;
2806         if (left) {
2807             tabl = (2 << 2) | 3;
2808             tabr = (3 << 2) | 1;
2809         } else {
2810             tabl = (1 << 2) | 3;
2811             tabr = (3 << 2) | 2;
2812         }
2813         break;
2814     default:
2815         abort();
2816     }
2817 
2818     lo1 = tcg_temp_new();
2819     lo2 = tcg_temp_new();
2820     tcg_gen_andi_tl(lo1, s1, imask);
2821     tcg_gen_andi_tl(lo2, s2, imask);
2822     tcg_gen_shli_tl(lo1, lo1, shift);
2823     tcg_gen_shli_tl(lo2, lo2, shift);
2824 
2825     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2826     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2827     tcg_gen_andi_tl(lo1, lo1, omask);
2828     tcg_gen_andi_tl(lo2, lo2, omask);
2829 
2830     amask = -8;
2831     if (AM_CHECK(dc)) {
2832         amask &= 0xffffffffULL;
2833     }
2834     tcg_gen_andi_tl(s1, s1, amask);
2835     tcg_gen_andi_tl(s2, s2, amask);
2836 
2837     /* Compute dst = (s1 == s2 ? lo1 : lo1 & lo2). */
2838     tcg_gen_and_tl(lo2, lo2, lo1);
2839     tcg_gen_movcond_tl(TCG_COND_EQ, dst, s1, s2, lo1, lo2);
2840 }
2841 
2842 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2843 {
2844     TCGv tmp = tcg_temp_new();
2845 
2846     tcg_gen_add_tl(tmp, s1, s2);
2847     tcg_gen_andi_tl(dst, tmp, -8);
2848     if (left) {
2849         tcg_gen_neg_tl(tmp, tmp);
2850     }
2851     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2852 }
2853 
2854 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2855 {
2856     TCGv t1, t2, shift;
2857 
2858     t1 = tcg_temp_new();
2859     t2 = tcg_temp_new();
2860     shift = tcg_temp_new();
2861 
2862     tcg_gen_andi_tl(shift, gsr, 7);
2863     tcg_gen_shli_tl(shift, shift, 3);
2864     tcg_gen_shl_tl(t1, s1, shift);
2865 
2866     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2867        shift of (up to 63) followed by a constant shift of 1.  */
2868     tcg_gen_xori_tl(shift, shift, 63);
2869     tcg_gen_shr_tl(t2, s2, shift);
2870     tcg_gen_shri_tl(t2, t2, 1);
2871 
2872     tcg_gen_or_tl(dst, t1, t2);
2873 }
2874 #endif
2875 
2876 /* Include the auto-generated decoder.  */
2877 #include "decode-insns.c.inc"
2878 
2879 #define TRANS(NAME, AVAIL, FUNC, ...) \
2880     static bool trans_##NAME(DisasContext *dc, arg_##NAME *a) \
2881     { return avail_##AVAIL(dc) && FUNC(dc, __VA_ARGS__); }
2882 
2883 #define avail_ALL(C)      true
2884 #ifdef TARGET_SPARC64
2885 # define avail_32(C)      false
2886 # define avail_64(C)      true
2887 #else
2888 # define avail_32(C)      true
2889 # define avail_64(C)      false
2890 #endif
2891 
2892 /* Default case for non jump instructions. */
2893 static bool advance_pc(DisasContext *dc)
2894 {
2895     if (dc->npc & 3) {
2896         switch (dc->npc) {
2897         case DYNAMIC_PC:
2898         case DYNAMIC_PC_LOOKUP:
2899             dc->pc = dc->npc;
2900             gen_op_next_insn();
2901             break;
2902         case JUMP_PC:
2903             /* we can do a static jump */
2904             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
2905             dc->base.is_jmp = DISAS_NORETURN;
2906             break;
2907         default:
2908             g_assert_not_reached();
2909         }
2910     } else {
2911         dc->pc = dc->npc;
2912         dc->npc = dc->npc + 4;
2913     }
2914     return true;
2915 }
2916 
2917 static bool advance_jump_uncond_never(DisasContext *dc, bool annul)
2918 {
2919     if (annul) {
2920         dc->pc = dc->npc + 4;
2921         dc->npc = dc->pc + 4;
2922     } else {
2923         dc->pc = dc->npc;
2924         dc->npc = dc->pc + 4;
2925     }
2926     return true;
2927 }
2928 
2929 static bool advance_jump_uncond_always(DisasContext *dc, bool annul,
2930                                        target_ulong dest)
2931 {
2932     if (annul) {
2933         dc->pc = dest;
2934         dc->npc = dest + 4;
2935     } else {
2936         dc->pc = dc->npc;
2937         dc->npc = dest;
2938         tcg_gen_mov_tl(cpu_pc, cpu_npc);
2939     }
2940     return true;
2941 }
2942 
2943 static bool advance_jump_cond(DisasContext *dc, bool annul, target_ulong dest)
2944 {
2945     if (annul) {
2946         gen_branch_a(dc, dest);
2947     } else {
2948         gen_branch_n(dc, dest);
2949     }
2950     return true;
2951 }
2952 
2953 static bool do_bpcc(DisasContext *dc, arg_bcc *a)
2954 {
2955     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2956     DisasCompare cmp;
2957 
2958     switch (a->cond) {
2959     case 0x0:
2960         return advance_jump_uncond_never(dc, a->a);
2961     case 0x8:
2962         return advance_jump_uncond_always(dc, a->a, target);
2963     default:
2964         flush_cond(dc);
2965 
2966         gen_compare(&cmp, a->cc, a->cond, dc);
2967         if (cmp.is_bool) {
2968             tcg_gen_mov_tl(cpu_cond, cmp.c1);
2969         } else {
2970             tcg_gen_setcond_tl(cmp.cond, cpu_cond, cmp.c1, cmp.c2);
2971         }
2972         return advance_jump_cond(dc, a->a, target);
2973     }
2974 }
2975 
2976 TRANS(Bicc, ALL, do_bpcc, a)
2977 TRANS(BPcc,  64, do_bpcc, a)
2978 
2979 static bool do_fbpfcc(DisasContext *dc, arg_bcc *a)
2980 {
2981     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2982     DisasCompare cmp;
2983 
2984     if (gen_trap_ifnofpu(dc)) {
2985         return true;
2986     }
2987     switch (a->cond) {
2988     case 0x0:
2989         return advance_jump_uncond_never(dc, a->a);
2990     case 0x8:
2991         return advance_jump_uncond_always(dc, a->a, target);
2992     default:
2993         flush_cond(dc);
2994 
2995         gen_fcompare(&cmp, a->cc, a->cond);
2996         if (cmp.is_bool) {
2997             tcg_gen_mov_tl(cpu_cond, cmp.c1);
2998         } else {
2999             tcg_gen_setcond_tl(cmp.cond, cpu_cond, cmp.c1, cmp.c2);
3000         }
3001         return advance_jump_cond(dc, a->a, target);
3002     }
3003 }
3004 
3005 TRANS(FBPfcc,  64, do_fbpfcc, a)
3006 TRANS(FBfcc,  ALL, do_fbpfcc, a)
3007 
3008 static bool trans_BPr(DisasContext *dc, arg_BPr *a)
3009 {
3010     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3011     DisasCompare cmp;
3012 
3013     if (!avail_64(dc)) {
3014         return false;
3015     }
3016     if (gen_tcg_cond_reg[a->cond] == TCG_COND_NEVER) {
3017         return false;
3018     }
3019 
3020     flush_cond(dc);
3021     gen_compare_reg(&cmp, a->cond, gen_load_gpr(dc, a->rs1));
3022     tcg_gen_setcond_tl(cmp.cond, cpu_cond, cmp.c1, cmp.c2);
3023     return advance_jump_cond(dc, a->a, target);
3024 }
3025 
3026 static bool trans_CALL(DisasContext *dc, arg_CALL *a)
3027 {
3028     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3029 
3030     gen_store_gpr(dc, 15, tcg_constant_tl(dc->pc));
3031     gen_mov_pc_npc(dc);
3032     dc->npc = target;
3033     return true;
3034 }
3035 
3036 static bool trans_NCP(DisasContext *dc, arg_NCP *a)
3037 {
3038     /*
3039      * For sparc32, always generate the no-coprocessor exception.
3040      * For sparc64, always generate illegal instruction.
3041      */
3042 #ifdef TARGET_SPARC64
3043     return false;
3044 #else
3045     gen_exception(dc, TT_NCP_INSN);
3046     return true;
3047 #endif
3048 }
3049 
3050 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3051     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3052         goto illegal_insn;
3053 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3054     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3055         goto nfpu_insn;
3056 
3057 /* before an instruction, dc->pc must be static */
3058 static void disas_sparc_legacy(DisasContext *dc, unsigned int insn)
3059 {
3060     unsigned int opc, rs1, rs2, rd;
3061     TCGv cpu_src1, cpu_src2;
3062     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3063     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3064     target_long simm;
3065 
3066     opc = GET_FIELD(insn, 0, 1);
3067     rd = GET_FIELD(insn, 2, 6);
3068 
3069     switch (opc) {
3070     case 0:                     /* branches/sethi */
3071         {
3072             unsigned int xop = GET_FIELD(insn, 7, 9);
3073             switch (xop) {
3074 #ifdef TARGET_SPARC64
3075             case 0x1:           /* V9 BPcc */
3076                 g_assert_not_reached(); /* in decodetree */
3077             case 0x3:           /* V9 BPr */
3078                 g_assert_not_reached(); /* in decodetree */
3079             case 0x5:           /* V9 FBPcc */
3080                 g_assert_not_reached(); /* in decodetree */
3081 #else
3082             case 0x7:           /* CBN+x */
3083                 g_assert_not_reached(); /* in decodetree */
3084 #endif
3085             case 0x2:           /* BN+x */
3086                 g_assert_not_reached(); /* in decodetree */
3087             case 0x6:           /* FBN+x */
3088                 g_assert_not_reached(); /* in decodetree */
3089             case 0x4:           /* SETHI */
3090                 /* Special-case %g0 because that's the canonical nop.  */
3091                 if (rd) {
3092                     uint32_t value = GET_FIELD(insn, 10, 31);
3093                     TCGv t = gen_dest_gpr(dc, rd);
3094                     tcg_gen_movi_tl(t, value << 10);
3095                     gen_store_gpr(dc, rd, t);
3096                 }
3097                 break;
3098             case 0x0:           /* UNIMPL */
3099             default:
3100                 goto illegal_insn;
3101             }
3102             break;
3103         }
3104         break;
3105     case 1:
3106         g_assert_not_reached(); /* in decodetree */
3107     case 2:                     /* FPU & Logical Operations */
3108         {
3109             unsigned int xop = GET_FIELD(insn, 7, 12);
3110             TCGv cpu_dst = tcg_temp_new();
3111             TCGv cpu_tmp0;
3112 
3113             if (xop == 0x3a) {  /* generate trap */
3114                 int cond = GET_FIELD(insn, 3, 6);
3115                 TCGv_i32 trap;
3116                 TCGLabel *l1 = NULL;
3117                 int mask;
3118 
3119                 if (cond == 0) {
3120                     /* Trap never.  */
3121                     break;
3122                 }
3123 
3124                 save_state(dc);
3125 
3126                 if (cond != 8) {
3127                     /* Conditional trap.  */
3128                     DisasCompare cmp;
3129 #ifdef TARGET_SPARC64
3130                     /* V9 icc/xcc */
3131                     int cc = GET_FIELD_SP(insn, 11, 12);
3132                     if (cc == 0) {
3133                         gen_compare(&cmp, 0, cond, dc);
3134                     } else if (cc == 2) {
3135                         gen_compare(&cmp, 1, cond, dc);
3136                     } else {
3137                         goto illegal_insn;
3138                     }
3139 #else
3140                     gen_compare(&cmp, 0, cond, dc);
3141 #endif
3142                     l1 = gen_new_label();
3143                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3144                                       cmp.c1, cmp.c2, l1);
3145                 }
3146 
3147                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3148                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3149 
3150                 /* Don't use the normal temporaries, as they may well have
3151                    gone out of scope with the branch above.  While we're
3152                    doing that we might as well pre-truncate to 32-bit.  */
3153                 trap = tcg_temp_new_i32();
3154 
3155                 rs1 = GET_FIELD_SP(insn, 14, 18);
3156                 if (IS_IMM) {
3157                     rs2 = GET_FIELD_SP(insn, 0, 7);
3158                     if (rs1 == 0) {
3159                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3160                         /* Signal that the trap value is fully constant.  */
3161                         mask = 0;
3162                     } else {
3163                         TCGv t1 = gen_load_gpr(dc, rs1);
3164                         tcg_gen_trunc_tl_i32(trap, t1);
3165                         tcg_gen_addi_i32(trap, trap, rs2);
3166                     }
3167                 } else {
3168                     TCGv t1, t2;
3169                     rs2 = GET_FIELD_SP(insn, 0, 4);
3170                     t1 = gen_load_gpr(dc, rs1);
3171                     t2 = gen_load_gpr(dc, rs2);
3172                     tcg_gen_add_tl(t1, t1, t2);
3173                     tcg_gen_trunc_tl_i32(trap, t1);
3174                 }
3175                 if (mask != 0) {
3176                     tcg_gen_andi_i32(trap, trap, mask);
3177                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3178                 }
3179 
3180                 gen_helper_raise_exception(tcg_env, trap);
3181 
3182                 if (cond == 8) {
3183                     /* An unconditional trap ends the TB.  */
3184                     dc->base.is_jmp = DISAS_NORETURN;
3185                     goto jmp_insn;
3186                 } else {
3187                     /* A conditional trap falls through to the next insn.  */
3188                     gen_set_label(l1);
3189                     break;
3190                 }
3191             } else if (xop == 0x28) {
3192                 rs1 = GET_FIELD(insn, 13, 17);
3193                 switch(rs1) {
3194                 case 0: /* rdy */
3195 #ifndef TARGET_SPARC64
3196                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3197                                        manual, rdy on the microSPARC
3198                                        II */
3199                 case 0x0f:          /* stbar in the SPARCv8 manual,
3200                                        rdy on the microSPARC II */
3201                 case 0x10 ... 0x1f: /* implementation-dependent in the
3202                                        SPARCv8 manual, rdy on the
3203                                        microSPARC II */
3204                     /* Read Asr17 */
3205                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3206                         TCGv t = gen_dest_gpr(dc, rd);
3207                         /* Read Asr17 for a Leon3 monoprocessor */
3208                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3209                         gen_store_gpr(dc, rd, t);
3210                         break;
3211                     }
3212 #endif
3213                     gen_store_gpr(dc, rd, cpu_y);
3214                     break;
3215 #ifdef TARGET_SPARC64
3216                 case 0x2: /* V9 rdccr */
3217                     update_psr(dc);
3218                     gen_helper_rdccr(cpu_dst, tcg_env);
3219                     gen_store_gpr(dc, rd, cpu_dst);
3220                     break;
3221                 case 0x3: /* V9 rdasi */
3222                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3223                     gen_store_gpr(dc, rd, cpu_dst);
3224                     break;
3225                 case 0x4: /* V9 rdtick */
3226                     {
3227                         TCGv_ptr r_tickptr;
3228                         TCGv_i32 r_const;
3229 
3230                         r_tickptr = tcg_temp_new_ptr();
3231                         r_const = tcg_constant_i32(dc->mem_idx);
3232                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3233                                        offsetof(CPUSPARCState, tick));
3234                         if (translator_io_start(&dc->base)) {
3235                             dc->base.is_jmp = DISAS_EXIT;
3236                         }
3237                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3238                                                   r_const);
3239                         gen_store_gpr(dc, rd, cpu_dst);
3240                     }
3241                     break;
3242                 case 0x5: /* V9 rdpc */
3243                     {
3244                         TCGv t = gen_dest_gpr(dc, rd);
3245                         if (unlikely(AM_CHECK(dc))) {
3246                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3247                         } else {
3248                             tcg_gen_movi_tl(t, dc->pc);
3249                         }
3250                         gen_store_gpr(dc, rd, t);
3251                     }
3252                     break;
3253                 case 0x6: /* V9 rdfprs */
3254                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3255                     gen_store_gpr(dc, rd, cpu_dst);
3256                     break;
3257                 case 0xf: /* V9 membar */
3258                     break; /* no effect */
3259                 case 0x13: /* Graphics Status */
3260                     if (gen_trap_ifnofpu(dc)) {
3261                         goto jmp_insn;
3262                     }
3263                     gen_store_gpr(dc, rd, cpu_gsr);
3264                     break;
3265                 case 0x16: /* Softint */
3266                     tcg_gen_ld32s_tl(cpu_dst, tcg_env,
3267                                      offsetof(CPUSPARCState, softint));
3268                     gen_store_gpr(dc, rd, cpu_dst);
3269                     break;
3270                 case 0x17: /* Tick compare */
3271                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3272                     break;
3273                 case 0x18: /* System tick */
3274                     {
3275                         TCGv_ptr r_tickptr;
3276                         TCGv_i32 r_const;
3277 
3278                         r_tickptr = tcg_temp_new_ptr();
3279                         r_const = tcg_constant_i32(dc->mem_idx);
3280                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3281                                        offsetof(CPUSPARCState, stick));
3282                         if (translator_io_start(&dc->base)) {
3283                             dc->base.is_jmp = DISAS_EXIT;
3284                         }
3285                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3286                                                   r_const);
3287                         gen_store_gpr(dc, rd, cpu_dst);
3288                     }
3289                     break;
3290                 case 0x19: /* System tick compare */
3291                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3292                     break;
3293                 case 0x1a: /* UltraSPARC-T1 Strand status */
3294                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3295                      * this ASR as impl. dep
3296                      */
3297                     CHECK_IU_FEATURE(dc, HYPV);
3298                     {
3299                         TCGv t = gen_dest_gpr(dc, rd);
3300                         tcg_gen_movi_tl(t, 1UL);
3301                         gen_store_gpr(dc, rd, t);
3302                     }
3303                     break;
3304                 case 0x10: /* Performance Control */
3305                 case 0x11: /* Performance Instrumentation Counter */
3306                 case 0x12: /* Dispatch Control */
3307                 case 0x14: /* Softint set, WO */
3308                 case 0x15: /* Softint clear, WO */
3309 #endif
3310                 default:
3311                     goto illegal_insn;
3312                 }
3313 #if !defined(CONFIG_USER_ONLY)
3314             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3315 #ifndef TARGET_SPARC64
3316                 if (!supervisor(dc)) {
3317                     goto priv_insn;
3318                 }
3319                 update_psr(dc);
3320                 gen_helper_rdpsr(cpu_dst, tcg_env);
3321 #else
3322                 CHECK_IU_FEATURE(dc, HYPV);
3323                 if (!hypervisor(dc))
3324                     goto priv_insn;
3325                 rs1 = GET_FIELD(insn, 13, 17);
3326                 switch (rs1) {
3327                 case 0: // hpstate
3328                     tcg_gen_ld_i64(cpu_dst, tcg_env,
3329                                    offsetof(CPUSPARCState, hpstate));
3330                     break;
3331                 case 1: // htstate
3332                     // gen_op_rdhtstate();
3333                     break;
3334                 case 3: // hintp
3335                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3336                     break;
3337                 case 5: // htba
3338                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3339                     break;
3340                 case 6: // hver
3341                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3342                     break;
3343                 case 31: // hstick_cmpr
3344                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3345                     break;
3346                 default:
3347                     goto illegal_insn;
3348                 }
3349 #endif
3350                 gen_store_gpr(dc, rd, cpu_dst);
3351                 break;
3352             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3353                 if (!supervisor(dc)) {
3354                     goto priv_insn;
3355                 }
3356                 cpu_tmp0 = tcg_temp_new();
3357 #ifdef TARGET_SPARC64
3358                 rs1 = GET_FIELD(insn, 13, 17);
3359                 switch (rs1) {
3360                 case 0: // tpc
3361                     {
3362                         TCGv_ptr r_tsptr;
3363 
3364                         r_tsptr = tcg_temp_new_ptr();
3365                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3366                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3367                                       offsetof(trap_state, tpc));
3368                     }
3369                     break;
3370                 case 1: // tnpc
3371                     {
3372                         TCGv_ptr r_tsptr;
3373 
3374                         r_tsptr = tcg_temp_new_ptr();
3375                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3376                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3377                                       offsetof(trap_state, tnpc));
3378                     }
3379                     break;
3380                 case 2: // tstate
3381                     {
3382                         TCGv_ptr r_tsptr;
3383 
3384                         r_tsptr = tcg_temp_new_ptr();
3385                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3386                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3387                                       offsetof(trap_state, tstate));
3388                     }
3389                     break;
3390                 case 3: // tt
3391                     {
3392                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3393 
3394                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3395                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3396                                          offsetof(trap_state, tt));
3397                     }
3398                     break;
3399                 case 4: // tick
3400                     {
3401                         TCGv_ptr r_tickptr;
3402                         TCGv_i32 r_const;
3403 
3404                         r_tickptr = tcg_temp_new_ptr();
3405                         r_const = tcg_constant_i32(dc->mem_idx);
3406                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3407                                        offsetof(CPUSPARCState, tick));
3408                         if (translator_io_start(&dc->base)) {
3409                             dc->base.is_jmp = DISAS_EXIT;
3410                         }
3411                         gen_helper_tick_get_count(cpu_tmp0, tcg_env,
3412                                                   r_tickptr, r_const);
3413                     }
3414                     break;
3415                 case 5: // tba
3416                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3417                     break;
3418                 case 6: // pstate
3419                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3420                                      offsetof(CPUSPARCState, pstate));
3421                     break;
3422                 case 7: // tl
3423                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3424                                      offsetof(CPUSPARCState, tl));
3425                     break;
3426                 case 8: // pil
3427                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3428                                      offsetof(CPUSPARCState, psrpil));
3429                     break;
3430                 case 9: // cwp
3431                     gen_helper_rdcwp(cpu_tmp0, tcg_env);
3432                     break;
3433                 case 10: // cansave
3434                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3435                                      offsetof(CPUSPARCState, cansave));
3436                     break;
3437                 case 11: // canrestore
3438                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3439                                      offsetof(CPUSPARCState, canrestore));
3440                     break;
3441                 case 12: // cleanwin
3442                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3443                                      offsetof(CPUSPARCState, cleanwin));
3444                     break;
3445                 case 13: // otherwin
3446                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3447                                      offsetof(CPUSPARCState, otherwin));
3448                     break;
3449                 case 14: // wstate
3450                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3451                                      offsetof(CPUSPARCState, wstate));
3452                     break;
3453                 case 16: // UA2005 gl
3454                     CHECK_IU_FEATURE(dc, GL);
3455                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3456                                      offsetof(CPUSPARCState, gl));
3457                     break;
3458                 case 26: // UA2005 strand status
3459                     CHECK_IU_FEATURE(dc, HYPV);
3460                     if (!hypervisor(dc))
3461                         goto priv_insn;
3462                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3463                     break;
3464                 case 31: // ver
3465                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3466                     break;
3467                 case 15: // fq
3468                 default:
3469                     goto illegal_insn;
3470                 }
3471 #else
3472                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3473 #endif
3474                 gen_store_gpr(dc, rd, cpu_tmp0);
3475                 break;
3476 #endif
3477 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3478             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3479 #ifdef TARGET_SPARC64
3480                 gen_helper_flushw(tcg_env);
3481 #else
3482                 if (!supervisor(dc))
3483                     goto priv_insn;
3484                 gen_store_gpr(dc, rd, cpu_tbr);
3485 #endif
3486                 break;
3487 #endif
3488             } else if (xop == 0x34) {   /* FPU Operations */
3489                 if (gen_trap_ifnofpu(dc)) {
3490                     goto jmp_insn;
3491                 }
3492                 gen_op_clear_ieee_excp_and_FTT();
3493                 rs1 = GET_FIELD(insn, 13, 17);
3494                 rs2 = GET_FIELD(insn, 27, 31);
3495                 xop = GET_FIELD(insn, 18, 26);
3496 
3497                 switch (xop) {
3498                 case 0x1: /* fmovs */
3499                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3500                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3501                     break;
3502                 case 0x5: /* fnegs */
3503                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3504                     break;
3505                 case 0x9: /* fabss */
3506                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3507                     break;
3508                 case 0x29: /* fsqrts */
3509                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3510                     break;
3511                 case 0x2a: /* fsqrtd */
3512                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3513                     break;
3514                 case 0x2b: /* fsqrtq */
3515                     CHECK_FPU_FEATURE(dc, FLOAT128);
3516                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3517                     break;
3518                 case 0x41: /* fadds */
3519                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3520                     break;
3521                 case 0x42: /* faddd */
3522                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3523                     break;
3524                 case 0x43: /* faddq */
3525                     CHECK_FPU_FEATURE(dc, FLOAT128);
3526                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3527                     break;
3528                 case 0x45: /* fsubs */
3529                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3530                     break;
3531                 case 0x46: /* fsubd */
3532                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3533                     break;
3534                 case 0x47: /* fsubq */
3535                     CHECK_FPU_FEATURE(dc, FLOAT128);
3536                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3537                     break;
3538                 case 0x49: /* fmuls */
3539                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3540                     break;
3541                 case 0x4a: /* fmuld */
3542                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3543                     break;
3544                 case 0x4b: /* fmulq */
3545                     CHECK_FPU_FEATURE(dc, FLOAT128);
3546                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3547                     break;
3548                 case 0x4d: /* fdivs */
3549                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3550                     break;
3551                 case 0x4e: /* fdivd */
3552                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3553                     break;
3554                 case 0x4f: /* fdivq */
3555                     CHECK_FPU_FEATURE(dc, FLOAT128);
3556                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3557                     break;
3558                 case 0x69: /* fsmuld */
3559                     CHECK_FPU_FEATURE(dc, FSMULD);
3560                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3561                     break;
3562                 case 0x6e: /* fdmulq */
3563                     CHECK_FPU_FEATURE(dc, FLOAT128);
3564                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3565                     break;
3566                 case 0xc4: /* fitos */
3567                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3568                     break;
3569                 case 0xc6: /* fdtos */
3570                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3571                     break;
3572                 case 0xc7: /* fqtos */
3573                     CHECK_FPU_FEATURE(dc, FLOAT128);
3574                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3575                     break;
3576                 case 0xc8: /* fitod */
3577                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3578                     break;
3579                 case 0xc9: /* fstod */
3580                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3581                     break;
3582                 case 0xcb: /* fqtod */
3583                     CHECK_FPU_FEATURE(dc, FLOAT128);
3584                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3585                     break;
3586                 case 0xcc: /* fitoq */
3587                     CHECK_FPU_FEATURE(dc, FLOAT128);
3588                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3589                     break;
3590                 case 0xcd: /* fstoq */
3591                     CHECK_FPU_FEATURE(dc, FLOAT128);
3592                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3593                     break;
3594                 case 0xce: /* fdtoq */
3595                     CHECK_FPU_FEATURE(dc, FLOAT128);
3596                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3597                     break;
3598                 case 0xd1: /* fstoi */
3599                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3600                     break;
3601                 case 0xd2: /* fdtoi */
3602                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3603                     break;
3604                 case 0xd3: /* fqtoi */
3605                     CHECK_FPU_FEATURE(dc, FLOAT128);
3606                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3607                     break;
3608 #ifdef TARGET_SPARC64
3609                 case 0x2: /* V9 fmovd */
3610                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3611                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3612                     break;
3613                 case 0x3: /* V9 fmovq */
3614                     CHECK_FPU_FEATURE(dc, FLOAT128);
3615                     gen_move_Q(dc, rd, rs2);
3616                     break;
3617                 case 0x6: /* V9 fnegd */
3618                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3619                     break;
3620                 case 0x7: /* V9 fnegq */
3621                     CHECK_FPU_FEATURE(dc, FLOAT128);
3622                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3623                     break;
3624                 case 0xa: /* V9 fabsd */
3625                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3626                     break;
3627                 case 0xb: /* V9 fabsq */
3628                     CHECK_FPU_FEATURE(dc, FLOAT128);
3629                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3630                     break;
3631                 case 0x81: /* V9 fstox */
3632                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3633                     break;
3634                 case 0x82: /* V9 fdtox */
3635                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3636                     break;
3637                 case 0x83: /* V9 fqtox */
3638                     CHECK_FPU_FEATURE(dc, FLOAT128);
3639                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3640                     break;
3641                 case 0x84: /* V9 fxtos */
3642                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3643                     break;
3644                 case 0x88: /* V9 fxtod */
3645                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3646                     break;
3647                 case 0x8c: /* V9 fxtoq */
3648                     CHECK_FPU_FEATURE(dc, FLOAT128);
3649                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3650                     break;
3651 #endif
3652                 default:
3653                     goto illegal_insn;
3654                 }
3655             } else if (xop == 0x35) {   /* FPU Operations */
3656 #ifdef TARGET_SPARC64
3657                 int cond;
3658 #endif
3659                 if (gen_trap_ifnofpu(dc)) {
3660                     goto jmp_insn;
3661                 }
3662                 gen_op_clear_ieee_excp_and_FTT();
3663                 rs1 = GET_FIELD(insn, 13, 17);
3664                 rs2 = GET_FIELD(insn, 27, 31);
3665                 xop = GET_FIELD(insn, 18, 26);
3666 
3667 #ifdef TARGET_SPARC64
3668 #define FMOVR(sz)                                                  \
3669                 do {                                               \
3670                     DisasCompare cmp;                              \
3671                     cond = GET_FIELD_SP(insn, 10, 12);             \
3672                     cpu_src1 = get_src1(dc, insn);                 \
3673                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3674                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3675                 } while (0)
3676 
3677                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3678                     FMOVR(s);
3679                     break;
3680                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3681                     FMOVR(d);
3682                     break;
3683                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3684                     CHECK_FPU_FEATURE(dc, FLOAT128);
3685                     FMOVR(q);
3686                     break;
3687                 }
3688 #undef FMOVR
3689 #endif
3690                 switch (xop) {
3691 #ifdef TARGET_SPARC64
3692 #define FMOVCC(fcc, sz)                                                 \
3693                     do {                                                \
3694                         DisasCompare cmp;                               \
3695                         cond = GET_FIELD_SP(insn, 14, 17);              \
3696                         gen_fcompare(&cmp, fcc, cond);                  \
3697                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3698                     } while (0)
3699 
3700                     case 0x001: /* V9 fmovscc %fcc0 */
3701                         FMOVCC(0, s);
3702                         break;
3703                     case 0x002: /* V9 fmovdcc %fcc0 */
3704                         FMOVCC(0, d);
3705                         break;
3706                     case 0x003: /* V9 fmovqcc %fcc0 */
3707                         CHECK_FPU_FEATURE(dc, FLOAT128);
3708                         FMOVCC(0, q);
3709                         break;
3710                     case 0x041: /* V9 fmovscc %fcc1 */
3711                         FMOVCC(1, s);
3712                         break;
3713                     case 0x042: /* V9 fmovdcc %fcc1 */
3714                         FMOVCC(1, d);
3715                         break;
3716                     case 0x043: /* V9 fmovqcc %fcc1 */
3717                         CHECK_FPU_FEATURE(dc, FLOAT128);
3718                         FMOVCC(1, q);
3719                         break;
3720                     case 0x081: /* V9 fmovscc %fcc2 */
3721                         FMOVCC(2, s);
3722                         break;
3723                     case 0x082: /* V9 fmovdcc %fcc2 */
3724                         FMOVCC(2, d);
3725                         break;
3726                     case 0x083: /* V9 fmovqcc %fcc2 */
3727                         CHECK_FPU_FEATURE(dc, FLOAT128);
3728                         FMOVCC(2, q);
3729                         break;
3730                     case 0x0c1: /* V9 fmovscc %fcc3 */
3731                         FMOVCC(3, s);
3732                         break;
3733                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3734                         FMOVCC(3, d);
3735                         break;
3736                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3737                         CHECK_FPU_FEATURE(dc, FLOAT128);
3738                         FMOVCC(3, q);
3739                         break;
3740 #undef FMOVCC
3741 #define FMOVCC(xcc, sz)                                                 \
3742                     do {                                                \
3743                         DisasCompare cmp;                               \
3744                         cond = GET_FIELD_SP(insn, 14, 17);              \
3745                         gen_compare(&cmp, xcc, cond, dc);               \
3746                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3747                     } while (0)
3748 
3749                     case 0x101: /* V9 fmovscc %icc */
3750                         FMOVCC(0, s);
3751                         break;
3752                     case 0x102: /* V9 fmovdcc %icc */
3753                         FMOVCC(0, d);
3754                         break;
3755                     case 0x103: /* V9 fmovqcc %icc */
3756                         CHECK_FPU_FEATURE(dc, FLOAT128);
3757                         FMOVCC(0, q);
3758                         break;
3759                     case 0x181: /* V9 fmovscc %xcc */
3760                         FMOVCC(1, s);
3761                         break;
3762                     case 0x182: /* V9 fmovdcc %xcc */
3763                         FMOVCC(1, d);
3764                         break;
3765                     case 0x183: /* V9 fmovqcc %xcc */
3766                         CHECK_FPU_FEATURE(dc, FLOAT128);
3767                         FMOVCC(1, q);
3768                         break;
3769 #undef FMOVCC
3770 #endif
3771                     case 0x51: /* fcmps, V9 %fcc */
3772                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3773                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3774                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3775                         break;
3776                     case 0x52: /* fcmpd, V9 %fcc */
3777                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3778                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3779                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3780                         break;
3781                     case 0x53: /* fcmpq, V9 %fcc */
3782                         CHECK_FPU_FEATURE(dc, FLOAT128);
3783                         gen_op_load_fpr_QT0(QFPREG(rs1));
3784                         gen_op_load_fpr_QT1(QFPREG(rs2));
3785                         gen_op_fcmpq(rd & 3);
3786                         break;
3787                     case 0x55: /* fcmpes, V9 %fcc */
3788                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3789                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3790                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3791                         break;
3792                     case 0x56: /* fcmped, V9 %fcc */
3793                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3794                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3795                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3796                         break;
3797                     case 0x57: /* fcmpeq, V9 %fcc */
3798                         CHECK_FPU_FEATURE(dc, FLOAT128);
3799                         gen_op_load_fpr_QT0(QFPREG(rs1));
3800                         gen_op_load_fpr_QT1(QFPREG(rs2));
3801                         gen_op_fcmpeq(rd & 3);
3802                         break;
3803                     default:
3804                         goto illegal_insn;
3805                 }
3806             } else if (xop == 0x2) {
3807                 TCGv dst = gen_dest_gpr(dc, rd);
3808                 rs1 = GET_FIELD(insn, 13, 17);
3809                 if (rs1 == 0) {
3810                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3811                     if (IS_IMM) {       /* immediate */
3812                         simm = GET_FIELDs(insn, 19, 31);
3813                         tcg_gen_movi_tl(dst, simm);
3814                         gen_store_gpr(dc, rd, dst);
3815                     } else {            /* register */
3816                         rs2 = GET_FIELD(insn, 27, 31);
3817                         if (rs2 == 0) {
3818                             tcg_gen_movi_tl(dst, 0);
3819                             gen_store_gpr(dc, rd, dst);
3820                         } else {
3821                             cpu_src2 = gen_load_gpr(dc, rs2);
3822                             gen_store_gpr(dc, rd, cpu_src2);
3823                         }
3824                     }
3825                 } else {
3826                     cpu_src1 = get_src1(dc, insn);
3827                     if (IS_IMM) {       /* immediate */
3828                         simm = GET_FIELDs(insn, 19, 31);
3829                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3830                         gen_store_gpr(dc, rd, dst);
3831                     } else {            /* register */
3832                         rs2 = GET_FIELD(insn, 27, 31);
3833                         if (rs2 == 0) {
3834                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3835                             gen_store_gpr(dc, rd, cpu_src1);
3836                         } else {
3837                             cpu_src2 = gen_load_gpr(dc, rs2);
3838                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3839                             gen_store_gpr(dc, rd, dst);
3840                         }
3841                     }
3842                 }
3843 #ifdef TARGET_SPARC64
3844             } else if (xop == 0x25) { /* sll, V9 sllx */
3845                 cpu_src1 = get_src1(dc, insn);
3846                 if (IS_IMM) {   /* immediate */
3847                     simm = GET_FIELDs(insn, 20, 31);
3848                     if (insn & (1 << 12)) {
3849                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3850                     } else {
3851                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3852                     }
3853                 } else {                /* register */
3854                     rs2 = GET_FIELD(insn, 27, 31);
3855                     cpu_src2 = gen_load_gpr(dc, rs2);
3856                     cpu_tmp0 = tcg_temp_new();
3857                     if (insn & (1 << 12)) {
3858                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3859                     } else {
3860                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3861                     }
3862                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3863                 }
3864                 gen_store_gpr(dc, rd, cpu_dst);
3865             } else if (xop == 0x26) { /* srl, V9 srlx */
3866                 cpu_src1 = get_src1(dc, insn);
3867                 if (IS_IMM) {   /* immediate */
3868                     simm = GET_FIELDs(insn, 20, 31);
3869                     if (insn & (1 << 12)) {
3870                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3871                     } else {
3872                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3873                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3874                     }
3875                 } else {                /* register */
3876                     rs2 = GET_FIELD(insn, 27, 31);
3877                     cpu_src2 = gen_load_gpr(dc, rs2);
3878                     cpu_tmp0 = tcg_temp_new();
3879                     if (insn & (1 << 12)) {
3880                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3881                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3882                     } else {
3883                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3884                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3885                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3886                     }
3887                 }
3888                 gen_store_gpr(dc, rd, cpu_dst);
3889             } else if (xop == 0x27) { /* sra, V9 srax */
3890                 cpu_src1 = get_src1(dc, insn);
3891                 if (IS_IMM) {   /* immediate */
3892                     simm = GET_FIELDs(insn, 20, 31);
3893                     if (insn & (1 << 12)) {
3894                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3895                     } else {
3896                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3897                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3898                     }
3899                 } else {                /* register */
3900                     rs2 = GET_FIELD(insn, 27, 31);
3901                     cpu_src2 = gen_load_gpr(dc, rs2);
3902                     cpu_tmp0 = tcg_temp_new();
3903                     if (insn & (1 << 12)) {
3904                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3905                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3906                     } else {
3907                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3908                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3909                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3910                     }
3911                 }
3912                 gen_store_gpr(dc, rd, cpu_dst);
3913 #endif
3914             } else if (xop < 0x36) {
3915                 if (xop < 0x20) {
3916                     cpu_src1 = get_src1(dc, insn);
3917                     cpu_src2 = get_src2(dc, insn);
3918                     switch (xop & ~0x10) {
3919                     case 0x0: /* add */
3920                         if (xop & 0x10) {
3921                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3922                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3923                             dc->cc_op = CC_OP_ADD;
3924                         } else {
3925                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3926                         }
3927                         break;
3928                     case 0x1: /* and */
3929                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3930                         if (xop & 0x10) {
3931                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3932                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3933                             dc->cc_op = CC_OP_LOGIC;
3934                         }
3935                         break;
3936                     case 0x2: /* or */
3937                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3938                         if (xop & 0x10) {
3939                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3940                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3941                             dc->cc_op = CC_OP_LOGIC;
3942                         }
3943                         break;
3944                     case 0x3: /* xor */
3945                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3946                         if (xop & 0x10) {
3947                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3948                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3949                             dc->cc_op = CC_OP_LOGIC;
3950                         }
3951                         break;
3952                     case 0x4: /* sub */
3953                         if (xop & 0x10) {
3954                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3955                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3956                             dc->cc_op = CC_OP_SUB;
3957                         } else {
3958                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3959                         }
3960                         break;
3961                     case 0x5: /* andn */
3962                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3963                         if (xop & 0x10) {
3964                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3965                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3966                             dc->cc_op = CC_OP_LOGIC;
3967                         }
3968                         break;
3969                     case 0x6: /* orn */
3970                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3971                         if (xop & 0x10) {
3972                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3973                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3974                             dc->cc_op = CC_OP_LOGIC;
3975                         }
3976                         break;
3977                     case 0x7: /* xorn */
3978                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3979                         if (xop & 0x10) {
3980                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3981                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3982                             dc->cc_op = CC_OP_LOGIC;
3983                         }
3984                         break;
3985                     case 0x8: /* addx, V9 addc */
3986                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3987                                         (xop & 0x10));
3988                         break;
3989 #ifdef TARGET_SPARC64
3990                     case 0x9: /* V9 mulx */
3991                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3992                         break;
3993 #endif
3994                     case 0xa: /* umul */
3995                         CHECK_IU_FEATURE(dc, MUL);
3996                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3997                         if (xop & 0x10) {
3998                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3999                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4000                             dc->cc_op = CC_OP_LOGIC;
4001                         }
4002                         break;
4003                     case 0xb: /* smul */
4004                         CHECK_IU_FEATURE(dc, MUL);
4005                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4006                         if (xop & 0x10) {
4007                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4008                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4009                             dc->cc_op = CC_OP_LOGIC;
4010                         }
4011                         break;
4012                     case 0xc: /* subx, V9 subc */
4013                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4014                                         (xop & 0x10));
4015                         break;
4016 #ifdef TARGET_SPARC64
4017                     case 0xd: /* V9 udivx */
4018                         gen_helper_udivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4019                         break;
4020 #endif
4021                     case 0xe: /* udiv */
4022                         CHECK_IU_FEATURE(dc, DIV);
4023                         if (xop & 0x10) {
4024                             gen_helper_udiv_cc(cpu_dst, tcg_env, cpu_src1,
4025                                                cpu_src2);
4026                             dc->cc_op = CC_OP_DIV;
4027                         } else {
4028                             gen_helper_udiv(cpu_dst, tcg_env, cpu_src1,
4029                                             cpu_src2);
4030                         }
4031                         break;
4032                     case 0xf: /* sdiv */
4033                         CHECK_IU_FEATURE(dc, DIV);
4034                         if (xop & 0x10) {
4035                             gen_helper_sdiv_cc(cpu_dst, tcg_env, cpu_src1,
4036                                                cpu_src2);
4037                             dc->cc_op = CC_OP_DIV;
4038                         } else {
4039                             gen_helper_sdiv(cpu_dst, tcg_env, cpu_src1,
4040                                             cpu_src2);
4041                         }
4042                         break;
4043                     default:
4044                         goto illegal_insn;
4045                     }
4046                     gen_store_gpr(dc, rd, cpu_dst);
4047                 } else {
4048                     cpu_src1 = get_src1(dc, insn);
4049                     cpu_src2 = get_src2(dc, insn);
4050                     switch (xop) {
4051                     case 0x20: /* taddcc */
4052                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4053                         gen_store_gpr(dc, rd, cpu_dst);
4054                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4055                         dc->cc_op = CC_OP_TADD;
4056                         break;
4057                     case 0x21: /* tsubcc */
4058                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4059                         gen_store_gpr(dc, rd, cpu_dst);
4060                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4061                         dc->cc_op = CC_OP_TSUB;
4062                         break;
4063                     case 0x22: /* taddcctv */
4064                         gen_helper_taddcctv(cpu_dst, tcg_env,
4065                                             cpu_src1, cpu_src2);
4066                         gen_store_gpr(dc, rd, cpu_dst);
4067                         dc->cc_op = CC_OP_TADDTV;
4068                         break;
4069                     case 0x23: /* tsubcctv */
4070                         gen_helper_tsubcctv(cpu_dst, tcg_env,
4071                                             cpu_src1, cpu_src2);
4072                         gen_store_gpr(dc, rd, cpu_dst);
4073                         dc->cc_op = CC_OP_TSUBTV;
4074                         break;
4075                     case 0x24: /* mulscc */
4076                         update_psr(dc);
4077                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4078                         gen_store_gpr(dc, rd, cpu_dst);
4079                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4080                         dc->cc_op = CC_OP_ADD;
4081                         break;
4082 #ifndef TARGET_SPARC64
4083                     case 0x25:  /* sll */
4084                         if (IS_IMM) { /* immediate */
4085                             simm = GET_FIELDs(insn, 20, 31);
4086                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4087                         } else { /* register */
4088                             cpu_tmp0 = tcg_temp_new();
4089                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4090                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4091                         }
4092                         gen_store_gpr(dc, rd, cpu_dst);
4093                         break;
4094                     case 0x26:  /* srl */
4095                         if (IS_IMM) { /* immediate */
4096                             simm = GET_FIELDs(insn, 20, 31);
4097                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4098                         } else { /* register */
4099                             cpu_tmp0 = tcg_temp_new();
4100                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4101                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4102                         }
4103                         gen_store_gpr(dc, rd, cpu_dst);
4104                         break;
4105                     case 0x27:  /* sra */
4106                         if (IS_IMM) { /* immediate */
4107                             simm = GET_FIELDs(insn, 20, 31);
4108                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4109                         } else { /* register */
4110                             cpu_tmp0 = tcg_temp_new();
4111                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4112                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4113                         }
4114                         gen_store_gpr(dc, rd, cpu_dst);
4115                         break;
4116 #endif
4117                     case 0x30:
4118                         {
4119                             cpu_tmp0 = tcg_temp_new();
4120                             switch(rd) {
4121                             case 0: /* wry */
4122                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4123                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4124                                 break;
4125 #ifndef TARGET_SPARC64
4126                             case 0x01 ... 0x0f: /* undefined in the
4127                                                    SPARCv8 manual, nop
4128                                                    on the microSPARC
4129                                                    II */
4130                             case 0x10 ... 0x1f: /* implementation-dependent
4131                                                    in the SPARCv8
4132                                                    manual, nop on the
4133                                                    microSPARC II */
4134                                 if ((rd == 0x13) && (dc->def->features &
4135                                                      CPU_FEATURE_POWERDOWN)) {
4136                                     /* LEON3 power-down */
4137                                     save_state(dc);
4138                                     gen_helper_power_down(tcg_env);
4139                                 }
4140                                 break;
4141 #else
4142                             case 0x2: /* V9 wrccr */
4143                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4144                                 gen_helper_wrccr(tcg_env, cpu_tmp0);
4145                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4146                                 dc->cc_op = CC_OP_FLAGS;
4147                                 break;
4148                             case 0x3: /* V9 wrasi */
4149                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4150                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4151                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4152                                                 offsetof(CPUSPARCState, asi));
4153                                 /*
4154                                  * End TB to notice changed ASI.
4155                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4156                                  * update DisasContext and not exit the TB.
4157                                  */
4158                                 save_state(dc);
4159                                 gen_op_next_insn();
4160                                 tcg_gen_lookup_and_goto_ptr();
4161                                 dc->base.is_jmp = DISAS_NORETURN;
4162                                 break;
4163                             case 0x6: /* V9 wrfprs */
4164                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4165                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4166                                 dc->fprs_dirty = 0;
4167                                 save_state(dc);
4168                                 gen_op_next_insn();
4169                                 tcg_gen_exit_tb(NULL, 0);
4170                                 dc->base.is_jmp = DISAS_NORETURN;
4171                                 break;
4172                             case 0xf: /* V9 sir, nop if user */
4173 #if !defined(CONFIG_USER_ONLY)
4174                                 if (supervisor(dc)) {
4175                                     ; // XXX
4176                                 }
4177 #endif
4178                                 break;
4179                             case 0x13: /* Graphics Status */
4180                                 if (gen_trap_ifnofpu(dc)) {
4181                                     goto jmp_insn;
4182                                 }
4183                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4184                                 break;
4185                             case 0x14: /* Softint set */
4186                                 if (!supervisor(dc))
4187                                     goto illegal_insn;
4188                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4189                                 gen_helper_set_softint(tcg_env, cpu_tmp0);
4190                                 break;
4191                             case 0x15: /* Softint clear */
4192                                 if (!supervisor(dc))
4193                                     goto illegal_insn;
4194                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4195                                 gen_helper_clear_softint(tcg_env, cpu_tmp0);
4196                                 break;
4197                             case 0x16: /* Softint write */
4198                                 if (!supervisor(dc))
4199                                     goto illegal_insn;
4200                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4201                                 gen_helper_write_softint(tcg_env, cpu_tmp0);
4202                                 break;
4203                             case 0x17: /* Tick compare */
4204 #if !defined(CONFIG_USER_ONLY)
4205                                 if (!supervisor(dc))
4206                                     goto illegal_insn;
4207 #endif
4208                                 {
4209                                     TCGv_ptr r_tickptr;
4210 
4211                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4212                                                    cpu_src2);
4213                                     r_tickptr = tcg_temp_new_ptr();
4214                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4215                                                    offsetof(CPUSPARCState, tick));
4216                                     translator_io_start(&dc->base);
4217                                     gen_helper_tick_set_limit(r_tickptr,
4218                                                               cpu_tick_cmpr);
4219                                     /* End TB to handle timer interrupt */
4220                                     dc->base.is_jmp = DISAS_EXIT;
4221                                 }
4222                                 break;
4223                             case 0x18: /* System tick */
4224 #if !defined(CONFIG_USER_ONLY)
4225                                 if (!supervisor(dc))
4226                                     goto illegal_insn;
4227 #endif
4228                                 {
4229                                     TCGv_ptr r_tickptr;
4230 
4231                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4232                                                    cpu_src2);
4233                                     r_tickptr = tcg_temp_new_ptr();
4234                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4235                                                    offsetof(CPUSPARCState, stick));
4236                                     translator_io_start(&dc->base);
4237                                     gen_helper_tick_set_count(r_tickptr,
4238                                                               cpu_tmp0);
4239                                     /* End TB to handle timer interrupt */
4240                                     dc->base.is_jmp = DISAS_EXIT;
4241                                 }
4242                                 break;
4243                             case 0x19: /* System tick compare */
4244 #if !defined(CONFIG_USER_ONLY)
4245                                 if (!supervisor(dc))
4246                                     goto illegal_insn;
4247 #endif
4248                                 {
4249                                     TCGv_ptr r_tickptr;
4250 
4251                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4252                                                    cpu_src2);
4253                                     r_tickptr = tcg_temp_new_ptr();
4254                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4255                                                    offsetof(CPUSPARCState, stick));
4256                                     translator_io_start(&dc->base);
4257                                     gen_helper_tick_set_limit(r_tickptr,
4258                                                               cpu_stick_cmpr);
4259                                     /* End TB to handle timer interrupt */
4260                                     dc->base.is_jmp = DISAS_EXIT;
4261                                 }
4262                                 break;
4263 
4264                             case 0x10: /* Performance Control */
4265                             case 0x11: /* Performance Instrumentation
4266                                           Counter */
4267                             case 0x12: /* Dispatch Control */
4268 #endif
4269                             default:
4270                                 goto illegal_insn;
4271                             }
4272                         }
4273                         break;
4274 #if !defined(CONFIG_USER_ONLY)
4275                     case 0x31: /* wrpsr, V9 saved, restored */
4276                         {
4277                             if (!supervisor(dc))
4278                                 goto priv_insn;
4279 #ifdef TARGET_SPARC64
4280                             switch (rd) {
4281                             case 0:
4282                                 gen_helper_saved(tcg_env);
4283                                 break;
4284                             case 1:
4285                                 gen_helper_restored(tcg_env);
4286                                 break;
4287                             case 2: /* UA2005 allclean */
4288                             case 3: /* UA2005 otherw */
4289                             case 4: /* UA2005 normalw */
4290                             case 5: /* UA2005 invalw */
4291                                 // XXX
4292                             default:
4293                                 goto illegal_insn;
4294                             }
4295 #else
4296                             cpu_tmp0 = tcg_temp_new();
4297                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4298                             gen_helper_wrpsr(tcg_env, cpu_tmp0);
4299                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4300                             dc->cc_op = CC_OP_FLAGS;
4301                             save_state(dc);
4302                             gen_op_next_insn();
4303                             tcg_gen_exit_tb(NULL, 0);
4304                             dc->base.is_jmp = DISAS_NORETURN;
4305 #endif
4306                         }
4307                         break;
4308                     case 0x32: /* wrwim, V9 wrpr */
4309                         {
4310                             if (!supervisor(dc))
4311                                 goto priv_insn;
4312                             cpu_tmp0 = tcg_temp_new();
4313                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4314 #ifdef TARGET_SPARC64
4315                             switch (rd) {
4316                             case 0: // tpc
4317                                 {
4318                                     TCGv_ptr r_tsptr;
4319 
4320                                     r_tsptr = tcg_temp_new_ptr();
4321                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4322                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4323                                                   offsetof(trap_state, tpc));
4324                                 }
4325                                 break;
4326                             case 1: // tnpc
4327                                 {
4328                                     TCGv_ptr r_tsptr;
4329 
4330                                     r_tsptr = tcg_temp_new_ptr();
4331                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4332                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4333                                                   offsetof(trap_state, tnpc));
4334                                 }
4335                                 break;
4336                             case 2: // tstate
4337                                 {
4338                                     TCGv_ptr r_tsptr;
4339 
4340                                     r_tsptr = tcg_temp_new_ptr();
4341                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4342                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4343                                                   offsetof(trap_state,
4344                                                            tstate));
4345                                 }
4346                                 break;
4347                             case 3: // tt
4348                                 {
4349                                     TCGv_ptr r_tsptr;
4350 
4351                                     r_tsptr = tcg_temp_new_ptr();
4352                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4353                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4354                                                     offsetof(trap_state, tt));
4355                                 }
4356                                 break;
4357                             case 4: // tick
4358                                 {
4359                                     TCGv_ptr r_tickptr;
4360 
4361                                     r_tickptr = tcg_temp_new_ptr();
4362                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4363                                                    offsetof(CPUSPARCState, tick));
4364                                     translator_io_start(&dc->base);
4365                                     gen_helper_tick_set_count(r_tickptr,
4366                                                               cpu_tmp0);
4367                                     /* End TB to handle timer interrupt */
4368                                     dc->base.is_jmp = DISAS_EXIT;
4369                                 }
4370                                 break;
4371                             case 5: // tba
4372                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4373                                 break;
4374                             case 6: // pstate
4375                                 save_state(dc);
4376                                 if (translator_io_start(&dc->base)) {
4377                                     dc->base.is_jmp = DISAS_EXIT;
4378                                 }
4379                                 gen_helper_wrpstate(tcg_env, cpu_tmp0);
4380                                 dc->npc = DYNAMIC_PC;
4381                                 break;
4382                             case 7: // tl
4383                                 save_state(dc);
4384                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4385                                                offsetof(CPUSPARCState, tl));
4386                                 dc->npc = DYNAMIC_PC;
4387                                 break;
4388                             case 8: // pil
4389                                 if (translator_io_start(&dc->base)) {
4390                                     dc->base.is_jmp = DISAS_EXIT;
4391                                 }
4392                                 gen_helper_wrpil(tcg_env, cpu_tmp0);
4393                                 break;
4394                             case 9: // cwp
4395                                 gen_helper_wrcwp(tcg_env, cpu_tmp0);
4396                                 break;
4397                             case 10: // cansave
4398                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4399                                                 offsetof(CPUSPARCState,
4400                                                          cansave));
4401                                 break;
4402                             case 11: // canrestore
4403                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4404                                                 offsetof(CPUSPARCState,
4405                                                          canrestore));
4406                                 break;
4407                             case 12: // cleanwin
4408                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4409                                                 offsetof(CPUSPARCState,
4410                                                          cleanwin));
4411                                 break;
4412                             case 13: // otherwin
4413                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4414                                                 offsetof(CPUSPARCState,
4415                                                          otherwin));
4416                                 break;
4417                             case 14: // wstate
4418                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4419                                                 offsetof(CPUSPARCState,
4420                                                          wstate));
4421                                 break;
4422                             case 16: // UA2005 gl
4423                                 CHECK_IU_FEATURE(dc, GL);
4424                                 gen_helper_wrgl(tcg_env, cpu_tmp0);
4425                                 break;
4426                             case 26: // UA2005 strand status
4427                                 CHECK_IU_FEATURE(dc, HYPV);
4428                                 if (!hypervisor(dc))
4429                                     goto priv_insn;
4430                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4431                                 break;
4432                             default:
4433                                 goto illegal_insn;
4434                             }
4435 #else
4436                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4437                             if (dc->def->nwindows != 32) {
4438                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4439                                                 (1 << dc->def->nwindows) - 1);
4440                             }
4441 #endif
4442                         }
4443                         break;
4444                     case 0x33: /* wrtbr, UA2005 wrhpr */
4445                         {
4446 #ifndef TARGET_SPARC64
4447                             if (!supervisor(dc))
4448                                 goto priv_insn;
4449                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4450 #else
4451                             CHECK_IU_FEATURE(dc, HYPV);
4452                             if (!hypervisor(dc))
4453                                 goto priv_insn;
4454                             cpu_tmp0 = tcg_temp_new();
4455                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4456                             switch (rd) {
4457                             case 0: // hpstate
4458                                 tcg_gen_st_i64(cpu_tmp0, tcg_env,
4459                                                offsetof(CPUSPARCState,
4460                                                         hpstate));
4461                                 save_state(dc);
4462                                 gen_op_next_insn();
4463                                 tcg_gen_exit_tb(NULL, 0);
4464                                 dc->base.is_jmp = DISAS_NORETURN;
4465                                 break;
4466                             case 1: // htstate
4467                                 // XXX gen_op_wrhtstate();
4468                                 break;
4469                             case 3: // hintp
4470                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4471                                 break;
4472                             case 5: // htba
4473                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4474                                 break;
4475                             case 31: // hstick_cmpr
4476                                 {
4477                                     TCGv_ptr r_tickptr;
4478 
4479                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4480                                     r_tickptr = tcg_temp_new_ptr();
4481                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4482                                                    offsetof(CPUSPARCState, hstick));
4483                                     translator_io_start(&dc->base);
4484                                     gen_helper_tick_set_limit(r_tickptr,
4485                                                               cpu_hstick_cmpr);
4486                                     /* End TB to handle timer interrupt */
4487                                     dc->base.is_jmp = DISAS_EXIT;
4488                                 }
4489                                 break;
4490                             case 6: // hver readonly
4491                             default:
4492                                 goto illegal_insn;
4493                             }
4494 #endif
4495                         }
4496                         break;
4497 #endif
4498 #ifdef TARGET_SPARC64
4499                     case 0x2c: /* V9 movcc */
4500                         {
4501                             int cc = GET_FIELD_SP(insn, 11, 12);
4502                             int cond = GET_FIELD_SP(insn, 14, 17);
4503                             DisasCompare cmp;
4504                             TCGv dst;
4505 
4506                             if (insn & (1 << 18)) {
4507                                 if (cc == 0) {
4508                                     gen_compare(&cmp, 0, cond, dc);
4509                                 } else if (cc == 2) {
4510                                     gen_compare(&cmp, 1, cond, dc);
4511                                 } else {
4512                                     goto illegal_insn;
4513                                 }
4514                             } else {
4515                                 gen_fcompare(&cmp, cc, cond);
4516                             }
4517 
4518                             /* The get_src2 above loaded the normal 13-bit
4519                                immediate field, not the 11-bit field we have
4520                                in movcc.  But it did handle the reg case.  */
4521                             if (IS_IMM) {
4522                                 simm = GET_FIELD_SPs(insn, 0, 10);
4523                                 tcg_gen_movi_tl(cpu_src2, simm);
4524                             }
4525 
4526                             dst = gen_load_gpr(dc, rd);
4527                             tcg_gen_movcond_tl(cmp.cond, dst,
4528                                                cmp.c1, cmp.c2,
4529                                                cpu_src2, dst);
4530                             gen_store_gpr(dc, rd, dst);
4531                             break;
4532                         }
4533                     case 0x2d: /* V9 sdivx */
4534                         gen_helper_sdivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4535                         gen_store_gpr(dc, rd, cpu_dst);
4536                         break;
4537                     case 0x2e: /* V9 popc */
4538                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4539                         gen_store_gpr(dc, rd, cpu_dst);
4540                         break;
4541                     case 0x2f: /* V9 movr */
4542                         {
4543                             int cond = GET_FIELD_SP(insn, 10, 12);
4544                             DisasCompare cmp;
4545                             TCGv dst;
4546 
4547                             gen_compare_reg(&cmp, cond, cpu_src1);
4548 
4549                             /* The get_src2 above loaded the normal 13-bit
4550                                immediate field, not the 10-bit field we have
4551                                in movr.  But it did handle the reg case.  */
4552                             if (IS_IMM) {
4553                                 simm = GET_FIELD_SPs(insn, 0, 9);
4554                                 tcg_gen_movi_tl(cpu_src2, simm);
4555                             }
4556 
4557                             dst = gen_load_gpr(dc, rd);
4558                             tcg_gen_movcond_tl(cmp.cond, dst,
4559                                                cmp.c1, cmp.c2,
4560                                                cpu_src2, dst);
4561                             gen_store_gpr(dc, rd, dst);
4562                             break;
4563                         }
4564 #endif
4565                     default:
4566                         goto illegal_insn;
4567                     }
4568                 }
4569             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4570 #ifdef TARGET_SPARC64
4571                 int opf = GET_FIELD_SP(insn, 5, 13);
4572                 rs1 = GET_FIELD(insn, 13, 17);
4573                 rs2 = GET_FIELD(insn, 27, 31);
4574                 if (gen_trap_ifnofpu(dc)) {
4575                     goto jmp_insn;
4576                 }
4577 
4578                 switch (opf) {
4579                 case 0x000: /* VIS I edge8cc */
4580                     CHECK_FPU_FEATURE(dc, VIS1);
4581                     cpu_src1 = gen_load_gpr(dc, rs1);
4582                     cpu_src2 = gen_load_gpr(dc, rs2);
4583                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4584                     gen_store_gpr(dc, rd, cpu_dst);
4585                     break;
4586                 case 0x001: /* VIS II edge8n */
4587                     CHECK_FPU_FEATURE(dc, VIS2);
4588                     cpu_src1 = gen_load_gpr(dc, rs1);
4589                     cpu_src2 = gen_load_gpr(dc, rs2);
4590                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4591                     gen_store_gpr(dc, rd, cpu_dst);
4592                     break;
4593                 case 0x002: /* VIS I edge8lcc */
4594                     CHECK_FPU_FEATURE(dc, VIS1);
4595                     cpu_src1 = gen_load_gpr(dc, rs1);
4596                     cpu_src2 = gen_load_gpr(dc, rs2);
4597                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4598                     gen_store_gpr(dc, rd, cpu_dst);
4599                     break;
4600                 case 0x003: /* VIS II edge8ln */
4601                     CHECK_FPU_FEATURE(dc, VIS2);
4602                     cpu_src1 = gen_load_gpr(dc, rs1);
4603                     cpu_src2 = gen_load_gpr(dc, rs2);
4604                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4605                     gen_store_gpr(dc, rd, cpu_dst);
4606                     break;
4607                 case 0x004: /* VIS I edge16cc */
4608                     CHECK_FPU_FEATURE(dc, VIS1);
4609                     cpu_src1 = gen_load_gpr(dc, rs1);
4610                     cpu_src2 = gen_load_gpr(dc, rs2);
4611                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4612                     gen_store_gpr(dc, rd, cpu_dst);
4613                     break;
4614                 case 0x005: /* VIS II edge16n */
4615                     CHECK_FPU_FEATURE(dc, VIS2);
4616                     cpu_src1 = gen_load_gpr(dc, rs1);
4617                     cpu_src2 = gen_load_gpr(dc, rs2);
4618                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4619                     gen_store_gpr(dc, rd, cpu_dst);
4620                     break;
4621                 case 0x006: /* VIS I edge16lcc */
4622                     CHECK_FPU_FEATURE(dc, VIS1);
4623                     cpu_src1 = gen_load_gpr(dc, rs1);
4624                     cpu_src2 = gen_load_gpr(dc, rs2);
4625                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4626                     gen_store_gpr(dc, rd, cpu_dst);
4627                     break;
4628                 case 0x007: /* VIS II edge16ln */
4629                     CHECK_FPU_FEATURE(dc, VIS2);
4630                     cpu_src1 = gen_load_gpr(dc, rs1);
4631                     cpu_src2 = gen_load_gpr(dc, rs2);
4632                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4633                     gen_store_gpr(dc, rd, cpu_dst);
4634                     break;
4635                 case 0x008: /* VIS I edge32cc */
4636                     CHECK_FPU_FEATURE(dc, VIS1);
4637                     cpu_src1 = gen_load_gpr(dc, rs1);
4638                     cpu_src2 = gen_load_gpr(dc, rs2);
4639                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4640                     gen_store_gpr(dc, rd, cpu_dst);
4641                     break;
4642                 case 0x009: /* VIS II edge32n */
4643                     CHECK_FPU_FEATURE(dc, VIS2);
4644                     cpu_src1 = gen_load_gpr(dc, rs1);
4645                     cpu_src2 = gen_load_gpr(dc, rs2);
4646                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4647                     gen_store_gpr(dc, rd, cpu_dst);
4648                     break;
4649                 case 0x00a: /* VIS I edge32lcc */
4650                     CHECK_FPU_FEATURE(dc, VIS1);
4651                     cpu_src1 = gen_load_gpr(dc, rs1);
4652                     cpu_src2 = gen_load_gpr(dc, rs2);
4653                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4654                     gen_store_gpr(dc, rd, cpu_dst);
4655                     break;
4656                 case 0x00b: /* VIS II edge32ln */
4657                     CHECK_FPU_FEATURE(dc, VIS2);
4658                     cpu_src1 = gen_load_gpr(dc, rs1);
4659                     cpu_src2 = gen_load_gpr(dc, rs2);
4660                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4661                     gen_store_gpr(dc, rd, cpu_dst);
4662                     break;
4663                 case 0x010: /* VIS I array8 */
4664                     CHECK_FPU_FEATURE(dc, VIS1);
4665                     cpu_src1 = gen_load_gpr(dc, rs1);
4666                     cpu_src2 = gen_load_gpr(dc, rs2);
4667                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4668                     gen_store_gpr(dc, rd, cpu_dst);
4669                     break;
4670                 case 0x012: /* VIS I array16 */
4671                     CHECK_FPU_FEATURE(dc, VIS1);
4672                     cpu_src1 = gen_load_gpr(dc, rs1);
4673                     cpu_src2 = gen_load_gpr(dc, rs2);
4674                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4675                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4676                     gen_store_gpr(dc, rd, cpu_dst);
4677                     break;
4678                 case 0x014: /* VIS I array32 */
4679                     CHECK_FPU_FEATURE(dc, VIS1);
4680                     cpu_src1 = gen_load_gpr(dc, rs1);
4681                     cpu_src2 = gen_load_gpr(dc, rs2);
4682                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4683                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4684                     gen_store_gpr(dc, rd, cpu_dst);
4685                     break;
4686                 case 0x018: /* VIS I alignaddr */
4687                     CHECK_FPU_FEATURE(dc, VIS1);
4688                     cpu_src1 = gen_load_gpr(dc, rs1);
4689                     cpu_src2 = gen_load_gpr(dc, rs2);
4690                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4691                     gen_store_gpr(dc, rd, cpu_dst);
4692                     break;
4693                 case 0x01a: /* VIS I alignaddrl */
4694                     CHECK_FPU_FEATURE(dc, VIS1);
4695                     cpu_src1 = gen_load_gpr(dc, rs1);
4696                     cpu_src2 = gen_load_gpr(dc, rs2);
4697                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4698                     gen_store_gpr(dc, rd, cpu_dst);
4699                     break;
4700                 case 0x019: /* VIS II bmask */
4701                     CHECK_FPU_FEATURE(dc, VIS2);
4702                     cpu_src1 = gen_load_gpr(dc, rs1);
4703                     cpu_src2 = gen_load_gpr(dc, rs2);
4704                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4705                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4706                     gen_store_gpr(dc, rd, cpu_dst);
4707                     break;
4708                 case 0x020: /* VIS I fcmple16 */
4709                     CHECK_FPU_FEATURE(dc, VIS1);
4710                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4711                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4712                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4713                     gen_store_gpr(dc, rd, cpu_dst);
4714                     break;
4715                 case 0x022: /* VIS I fcmpne16 */
4716                     CHECK_FPU_FEATURE(dc, VIS1);
4717                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4718                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4719                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4720                     gen_store_gpr(dc, rd, cpu_dst);
4721                     break;
4722                 case 0x024: /* VIS I fcmple32 */
4723                     CHECK_FPU_FEATURE(dc, VIS1);
4724                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4725                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4726                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4727                     gen_store_gpr(dc, rd, cpu_dst);
4728                     break;
4729                 case 0x026: /* VIS I fcmpne32 */
4730                     CHECK_FPU_FEATURE(dc, VIS1);
4731                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4732                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4733                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4734                     gen_store_gpr(dc, rd, cpu_dst);
4735                     break;
4736                 case 0x028: /* VIS I fcmpgt16 */
4737                     CHECK_FPU_FEATURE(dc, VIS1);
4738                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4739                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4740                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4741                     gen_store_gpr(dc, rd, cpu_dst);
4742                     break;
4743                 case 0x02a: /* VIS I fcmpeq16 */
4744                     CHECK_FPU_FEATURE(dc, VIS1);
4745                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4746                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4747                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4748                     gen_store_gpr(dc, rd, cpu_dst);
4749                     break;
4750                 case 0x02c: /* VIS I fcmpgt32 */
4751                     CHECK_FPU_FEATURE(dc, VIS1);
4752                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4753                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4754                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4755                     gen_store_gpr(dc, rd, cpu_dst);
4756                     break;
4757                 case 0x02e: /* VIS I fcmpeq32 */
4758                     CHECK_FPU_FEATURE(dc, VIS1);
4759                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4760                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4761                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4762                     gen_store_gpr(dc, rd, cpu_dst);
4763                     break;
4764                 case 0x031: /* VIS I fmul8x16 */
4765                     CHECK_FPU_FEATURE(dc, VIS1);
4766                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4767                     break;
4768                 case 0x033: /* VIS I fmul8x16au */
4769                     CHECK_FPU_FEATURE(dc, VIS1);
4770                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4771                     break;
4772                 case 0x035: /* VIS I fmul8x16al */
4773                     CHECK_FPU_FEATURE(dc, VIS1);
4774                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4775                     break;
4776                 case 0x036: /* VIS I fmul8sux16 */
4777                     CHECK_FPU_FEATURE(dc, VIS1);
4778                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4779                     break;
4780                 case 0x037: /* VIS I fmul8ulx16 */
4781                     CHECK_FPU_FEATURE(dc, VIS1);
4782                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4783                     break;
4784                 case 0x038: /* VIS I fmuld8sux16 */
4785                     CHECK_FPU_FEATURE(dc, VIS1);
4786                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4787                     break;
4788                 case 0x039: /* VIS I fmuld8ulx16 */
4789                     CHECK_FPU_FEATURE(dc, VIS1);
4790                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4791                     break;
4792                 case 0x03a: /* VIS I fpack32 */
4793                     CHECK_FPU_FEATURE(dc, VIS1);
4794                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4795                     break;
4796                 case 0x03b: /* VIS I fpack16 */
4797                     CHECK_FPU_FEATURE(dc, VIS1);
4798                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4799                     cpu_dst_32 = gen_dest_fpr_F(dc);
4800                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4801                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4802                     break;
4803                 case 0x03d: /* VIS I fpackfix */
4804                     CHECK_FPU_FEATURE(dc, VIS1);
4805                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4806                     cpu_dst_32 = gen_dest_fpr_F(dc);
4807                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4808                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4809                     break;
4810                 case 0x03e: /* VIS I pdist */
4811                     CHECK_FPU_FEATURE(dc, VIS1);
4812                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4813                     break;
4814                 case 0x048: /* VIS I faligndata */
4815                     CHECK_FPU_FEATURE(dc, VIS1);
4816                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4817                     break;
4818                 case 0x04b: /* VIS I fpmerge */
4819                     CHECK_FPU_FEATURE(dc, VIS1);
4820                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4821                     break;
4822                 case 0x04c: /* VIS II bshuffle */
4823                     CHECK_FPU_FEATURE(dc, VIS2);
4824                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4825                     break;
4826                 case 0x04d: /* VIS I fexpand */
4827                     CHECK_FPU_FEATURE(dc, VIS1);
4828                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4829                     break;
4830                 case 0x050: /* VIS I fpadd16 */
4831                     CHECK_FPU_FEATURE(dc, VIS1);
4832                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4833                     break;
4834                 case 0x051: /* VIS I fpadd16s */
4835                     CHECK_FPU_FEATURE(dc, VIS1);
4836                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4837                     break;
4838                 case 0x052: /* VIS I fpadd32 */
4839                     CHECK_FPU_FEATURE(dc, VIS1);
4840                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4841                     break;
4842                 case 0x053: /* VIS I fpadd32s */
4843                     CHECK_FPU_FEATURE(dc, VIS1);
4844                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4845                     break;
4846                 case 0x054: /* VIS I fpsub16 */
4847                     CHECK_FPU_FEATURE(dc, VIS1);
4848                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4849                     break;
4850                 case 0x055: /* VIS I fpsub16s */
4851                     CHECK_FPU_FEATURE(dc, VIS1);
4852                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4853                     break;
4854                 case 0x056: /* VIS I fpsub32 */
4855                     CHECK_FPU_FEATURE(dc, VIS1);
4856                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4857                     break;
4858                 case 0x057: /* VIS I fpsub32s */
4859                     CHECK_FPU_FEATURE(dc, VIS1);
4860                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4861                     break;
4862                 case 0x060: /* VIS I fzero */
4863                     CHECK_FPU_FEATURE(dc, VIS1);
4864                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4865                     tcg_gen_movi_i64(cpu_dst_64, 0);
4866                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4867                     break;
4868                 case 0x061: /* VIS I fzeros */
4869                     CHECK_FPU_FEATURE(dc, VIS1);
4870                     cpu_dst_32 = gen_dest_fpr_F(dc);
4871                     tcg_gen_movi_i32(cpu_dst_32, 0);
4872                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4873                     break;
4874                 case 0x062: /* VIS I fnor */
4875                     CHECK_FPU_FEATURE(dc, VIS1);
4876                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4877                     break;
4878                 case 0x063: /* VIS I fnors */
4879                     CHECK_FPU_FEATURE(dc, VIS1);
4880                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4881                     break;
4882                 case 0x064: /* VIS I fandnot2 */
4883                     CHECK_FPU_FEATURE(dc, VIS1);
4884                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4885                     break;
4886                 case 0x065: /* VIS I fandnot2s */
4887                     CHECK_FPU_FEATURE(dc, VIS1);
4888                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4889                     break;
4890                 case 0x066: /* VIS I fnot2 */
4891                     CHECK_FPU_FEATURE(dc, VIS1);
4892                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4893                     break;
4894                 case 0x067: /* VIS I fnot2s */
4895                     CHECK_FPU_FEATURE(dc, VIS1);
4896                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4897                     break;
4898                 case 0x068: /* VIS I fandnot1 */
4899                     CHECK_FPU_FEATURE(dc, VIS1);
4900                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4901                     break;
4902                 case 0x069: /* VIS I fandnot1s */
4903                     CHECK_FPU_FEATURE(dc, VIS1);
4904                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4905                     break;
4906                 case 0x06a: /* VIS I fnot1 */
4907                     CHECK_FPU_FEATURE(dc, VIS1);
4908                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4909                     break;
4910                 case 0x06b: /* VIS I fnot1s */
4911                     CHECK_FPU_FEATURE(dc, VIS1);
4912                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4913                     break;
4914                 case 0x06c: /* VIS I fxor */
4915                     CHECK_FPU_FEATURE(dc, VIS1);
4916                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4917                     break;
4918                 case 0x06d: /* VIS I fxors */
4919                     CHECK_FPU_FEATURE(dc, VIS1);
4920                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4921                     break;
4922                 case 0x06e: /* VIS I fnand */
4923                     CHECK_FPU_FEATURE(dc, VIS1);
4924                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4925                     break;
4926                 case 0x06f: /* VIS I fnands */
4927                     CHECK_FPU_FEATURE(dc, VIS1);
4928                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4929                     break;
4930                 case 0x070: /* VIS I fand */
4931                     CHECK_FPU_FEATURE(dc, VIS1);
4932                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4933                     break;
4934                 case 0x071: /* VIS I fands */
4935                     CHECK_FPU_FEATURE(dc, VIS1);
4936                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4937                     break;
4938                 case 0x072: /* VIS I fxnor */
4939                     CHECK_FPU_FEATURE(dc, VIS1);
4940                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4941                     break;
4942                 case 0x073: /* VIS I fxnors */
4943                     CHECK_FPU_FEATURE(dc, VIS1);
4944                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4945                     break;
4946                 case 0x074: /* VIS I fsrc1 */
4947                     CHECK_FPU_FEATURE(dc, VIS1);
4948                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4949                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4950                     break;
4951                 case 0x075: /* VIS I fsrc1s */
4952                     CHECK_FPU_FEATURE(dc, VIS1);
4953                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4954                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4955                     break;
4956                 case 0x076: /* VIS I fornot2 */
4957                     CHECK_FPU_FEATURE(dc, VIS1);
4958                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4959                     break;
4960                 case 0x077: /* VIS I fornot2s */
4961                     CHECK_FPU_FEATURE(dc, VIS1);
4962                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4963                     break;
4964                 case 0x078: /* VIS I fsrc2 */
4965                     CHECK_FPU_FEATURE(dc, VIS1);
4966                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4967                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4968                     break;
4969                 case 0x079: /* VIS I fsrc2s */
4970                     CHECK_FPU_FEATURE(dc, VIS1);
4971                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4972                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4973                     break;
4974                 case 0x07a: /* VIS I fornot1 */
4975                     CHECK_FPU_FEATURE(dc, VIS1);
4976                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4977                     break;
4978                 case 0x07b: /* VIS I fornot1s */
4979                     CHECK_FPU_FEATURE(dc, VIS1);
4980                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4981                     break;
4982                 case 0x07c: /* VIS I for */
4983                     CHECK_FPU_FEATURE(dc, VIS1);
4984                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4985                     break;
4986                 case 0x07d: /* VIS I fors */
4987                     CHECK_FPU_FEATURE(dc, VIS1);
4988                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4989                     break;
4990                 case 0x07e: /* VIS I fone */
4991                     CHECK_FPU_FEATURE(dc, VIS1);
4992                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4993                     tcg_gen_movi_i64(cpu_dst_64, -1);
4994                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4995                     break;
4996                 case 0x07f: /* VIS I fones */
4997                     CHECK_FPU_FEATURE(dc, VIS1);
4998                     cpu_dst_32 = gen_dest_fpr_F(dc);
4999                     tcg_gen_movi_i32(cpu_dst_32, -1);
5000                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5001                     break;
5002                 case 0x080: /* VIS I shutdown */
5003                 case 0x081: /* VIS II siam */
5004                     // XXX
5005                     goto illegal_insn;
5006                 default:
5007                     goto illegal_insn;
5008                 }
5009 #else
5010                 goto ncp_insn;
5011 #endif
5012             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5013 #ifdef TARGET_SPARC64
5014                 goto illegal_insn;
5015 #else
5016                 goto ncp_insn;
5017 #endif
5018 #ifdef TARGET_SPARC64
5019             } else if (xop == 0x39) { /* V9 return */
5020                 save_state(dc);
5021                 cpu_src1 = get_src1(dc, insn);
5022                 cpu_tmp0 = tcg_temp_new();
5023                 if (IS_IMM) {   /* immediate */
5024                     simm = GET_FIELDs(insn, 19, 31);
5025                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5026                 } else {                /* register */
5027                     rs2 = GET_FIELD(insn, 27, 31);
5028                     if (rs2) {
5029                         cpu_src2 = gen_load_gpr(dc, rs2);
5030                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5031                     } else {
5032                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5033                     }
5034                 }
5035                 gen_check_align(dc, cpu_tmp0, 3);
5036                 gen_helper_restore(tcg_env);
5037                 gen_mov_pc_npc(dc);
5038                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5039                 dc->npc = DYNAMIC_PC_LOOKUP;
5040                 goto jmp_insn;
5041 #endif
5042             } else {
5043                 cpu_src1 = get_src1(dc, insn);
5044                 cpu_tmp0 = tcg_temp_new();
5045                 if (IS_IMM) {   /* immediate */
5046                     simm = GET_FIELDs(insn, 19, 31);
5047                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5048                 } else {                /* register */
5049                     rs2 = GET_FIELD(insn, 27, 31);
5050                     if (rs2) {
5051                         cpu_src2 = gen_load_gpr(dc, rs2);
5052                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5053                     } else {
5054                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5055                     }
5056                 }
5057                 switch (xop) {
5058                 case 0x38:      /* jmpl */
5059                     {
5060                         gen_check_align(dc, cpu_tmp0, 3);
5061                         gen_store_gpr(dc, rd, tcg_constant_tl(dc->pc));
5062                         gen_mov_pc_npc(dc);
5063                         gen_address_mask(dc, cpu_tmp0);
5064                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5065                         dc->npc = DYNAMIC_PC_LOOKUP;
5066                     }
5067                     goto jmp_insn;
5068 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5069                 case 0x39:      /* rett, V9 return */
5070                     {
5071                         if (!supervisor(dc))
5072                             goto priv_insn;
5073                         gen_check_align(dc, cpu_tmp0, 3);
5074                         gen_mov_pc_npc(dc);
5075                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5076                         dc->npc = DYNAMIC_PC;
5077                         gen_helper_rett(tcg_env);
5078                     }
5079                     goto jmp_insn;
5080 #endif
5081                 case 0x3b: /* flush */
5082                     /* nop */
5083                     break;
5084                 case 0x3c:      /* save */
5085                     gen_helper_save(tcg_env);
5086                     gen_store_gpr(dc, rd, cpu_tmp0);
5087                     break;
5088                 case 0x3d:      /* restore */
5089                     gen_helper_restore(tcg_env);
5090                     gen_store_gpr(dc, rd, cpu_tmp0);
5091                     break;
5092 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5093                 case 0x3e:      /* V9 done/retry */
5094                     {
5095                         switch (rd) {
5096                         case 0:
5097                             if (!supervisor(dc))
5098                                 goto priv_insn;
5099                             dc->npc = DYNAMIC_PC;
5100                             dc->pc = DYNAMIC_PC;
5101                             translator_io_start(&dc->base);
5102                             gen_helper_done(tcg_env);
5103                             goto jmp_insn;
5104                         case 1:
5105                             if (!supervisor(dc))
5106                                 goto priv_insn;
5107                             dc->npc = DYNAMIC_PC;
5108                             dc->pc = DYNAMIC_PC;
5109                             translator_io_start(&dc->base);
5110                             gen_helper_retry(tcg_env);
5111                             goto jmp_insn;
5112                         default:
5113                             goto illegal_insn;
5114                         }
5115                     }
5116                     break;
5117 #endif
5118                 default:
5119                     goto illegal_insn;
5120                 }
5121             }
5122             break;
5123         }
5124         break;
5125     case 3:                     /* load/store instructions */
5126         {
5127             unsigned int xop = GET_FIELD(insn, 7, 12);
5128             /* ??? gen_address_mask prevents us from using a source
5129                register directly.  Always generate a temporary.  */
5130             TCGv cpu_addr = tcg_temp_new();
5131 
5132             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5133             if (xop == 0x3c || xop == 0x3e) {
5134                 /* V9 casa/casxa : no offset */
5135             } else if (IS_IMM) {     /* immediate */
5136                 simm = GET_FIELDs(insn, 19, 31);
5137                 if (simm != 0) {
5138                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5139                 }
5140             } else {            /* register */
5141                 rs2 = GET_FIELD(insn, 27, 31);
5142                 if (rs2 != 0) {
5143                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5144                 }
5145             }
5146             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5147                 (xop > 0x17 && xop <= 0x1d ) ||
5148                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5149                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5150 
5151                 switch (xop) {
5152                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5153                     gen_address_mask(dc, cpu_addr);
5154                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5155                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5156                     break;
5157                 case 0x1:       /* ldub, load unsigned byte */
5158                     gen_address_mask(dc, cpu_addr);
5159                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5160                                        dc->mem_idx, MO_UB);
5161                     break;
5162                 case 0x2:       /* lduh, load unsigned halfword */
5163                     gen_address_mask(dc, cpu_addr);
5164                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5165                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5166                     break;
5167                 case 0x3:       /* ldd, load double word */
5168                     if (rd & 1)
5169                         goto illegal_insn;
5170                     else {
5171                         TCGv_i64 t64;
5172 
5173                         gen_address_mask(dc, cpu_addr);
5174                         t64 = tcg_temp_new_i64();
5175                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5176                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5177                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5178                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5179                         gen_store_gpr(dc, rd + 1, cpu_val);
5180                         tcg_gen_shri_i64(t64, t64, 32);
5181                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5182                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5183                     }
5184                     break;
5185                 case 0x9:       /* ldsb, load signed byte */
5186                     gen_address_mask(dc, cpu_addr);
5187                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5188                     break;
5189                 case 0xa:       /* ldsh, load signed halfword */
5190                     gen_address_mask(dc, cpu_addr);
5191                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5192                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5193                     break;
5194                 case 0xd:       /* ldstub */
5195                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5196                     break;
5197                 case 0x0f:
5198                     /* swap, swap register with memory. Also atomically */
5199                     cpu_src1 = gen_load_gpr(dc, rd);
5200                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5201                              dc->mem_idx, MO_TEUL);
5202                     break;
5203 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5204                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5205                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5206                     break;
5207                 case 0x11:      /* lduba, load unsigned byte alternate */
5208                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5209                     break;
5210                 case 0x12:      /* lduha, load unsigned halfword alternate */
5211                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5212                     break;
5213                 case 0x13:      /* ldda, load double word alternate */
5214                     if (rd & 1) {
5215                         goto illegal_insn;
5216                     }
5217                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5218                     goto skip_move;
5219                 case 0x19:      /* ldsba, load signed byte alternate */
5220                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5221                     break;
5222                 case 0x1a:      /* ldsha, load signed halfword alternate */
5223                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5224                     break;
5225                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5226                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5227                     break;
5228                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5229                                    atomically */
5230                     cpu_src1 = gen_load_gpr(dc, rd);
5231                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5232                     break;
5233 
5234 #ifndef TARGET_SPARC64
5235                 case 0x30: /* ldc */
5236                 case 0x31: /* ldcsr */
5237                 case 0x33: /* lddc */
5238                     goto ncp_insn;
5239 #endif
5240 #endif
5241 #ifdef TARGET_SPARC64
5242                 case 0x08: /* V9 ldsw */
5243                     gen_address_mask(dc, cpu_addr);
5244                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5245                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5246                     break;
5247                 case 0x0b: /* V9 ldx */
5248                     gen_address_mask(dc, cpu_addr);
5249                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5250                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5251                     break;
5252                 case 0x18: /* V9 ldswa */
5253                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5254                     break;
5255                 case 0x1b: /* V9 ldxa */
5256                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5257                     break;
5258                 case 0x2d: /* V9 prefetch, no effect */
5259                     goto skip_move;
5260                 case 0x30: /* V9 ldfa */
5261                     if (gen_trap_ifnofpu(dc)) {
5262                         goto jmp_insn;
5263                     }
5264                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5265                     gen_update_fprs_dirty(dc, rd);
5266                     goto skip_move;
5267                 case 0x33: /* V9 lddfa */
5268                     if (gen_trap_ifnofpu(dc)) {
5269                         goto jmp_insn;
5270                     }
5271                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5272                     gen_update_fprs_dirty(dc, DFPREG(rd));
5273                     goto skip_move;
5274                 case 0x3d: /* V9 prefetcha, no effect */
5275                     goto skip_move;
5276                 case 0x32: /* V9 ldqfa */
5277                     CHECK_FPU_FEATURE(dc, FLOAT128);
5278                     if (gen_trap_ifnofpu(dc)) {
5279                         goto jmp_insn;
5280                     }
5281                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5282                     gen_update_fprs_dirty(dc, QFPREG(rd));
5283                     goto skip_move;
5284 #endif
5285                 default:
5286                     goto illegal_insn;
5287                 }
5288                 gen_store_gpr(dc, rd, cpu_val);
5289 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5290             skip_move: ;
5291 #endif
5292             } else if (xop >= 0x20 && xop < 0x24) {
5293                 if (gen_trap_ifnofpu(dc)) {
5294                     goto jmp_insn;
5295                 }
5296                 switch (xop) {
5297                 case 0x20:      /* ldf, load fpreg */
5298                     gen_address_mask(dc, cpu_addr);
5299                     cpu_dst_32 = gen_dest_fpr_F(dc);
5300                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5301                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5302                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5303                     break;
5304                 case 0x21:      /* ldfsr, V9 ldxfsr */
5305 #ifdef TARGET_SPARC64
5306                     gen_address_mask(dc, cpu_addr);
5307                     if (rd == 1) {
5308                         TCGv_i64 t64 = tcg_temp_new_i64();
5309                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5310                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5311                         gen_helper_ldxfsr(cpu_fsr, tcg_env, cpu_fsr, t64);
5312                         break;
5313                     }
5314 #endif
5315                     cpu_dst_32 = tcg_temp_new_i32();
5316                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5317                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5318                     gen_helper_ldfsr(cpu_fsr, tcg_env, cpu_fsr, cpu_dst_32);
5319                     break;
5320                 case 0x22:      /* ldqf, load quad fpreg */
5321                     CHECK_FPU_FEATURE(dc, FLOAT128);
5322                     gen_address_mask(dc, cpu_addr);
5323                     cpu_src1_64 = tcg_temp_new_i64();
5324                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5325                                         MO_TEUQ | MO_ALIGN_4);
5326                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5327                     cpu_src2_64 = tcg_temp_new_i64();
5328                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5329                                         MO_TEUQ | MO_ALIGN_4);
5330                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5331                     break;
5332                 case 0x23:      /* lddf, load double fpreg */
5333                     gen_address_mask(dc, cpu_addr);
5334                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5335                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5336                                         MO_TEUQ | MO_ALIGN_4);
5337                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5338                     break;
5339                 default:
5340                     goto illegal_insn;
5341                 }
5342             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5343                        xop == 0xe || xop == 0x1e) {
5344                 TCGv cpu_val = gen_load_gpr(dc, rd);
5345 
5346                 switch (xop) {
5347                 case 0x4: /* st, store word */
5348                     gen_address_mask(dc, cpu_addr);
5349                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5350                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5351                     break;
5352                 case 0x5: /* stb, store byte */
5353                     gen_address_mask(dc, cpu_addr);
5354                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5355                     break;
5356                 case 0x6: /* sth, store halfword */
5357                     gen_address_mask(dc, cpu_addr);
5358                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5359                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5360                     break;
5361                 case 0x7: /* std, store double word */
5362                     if (rd & 1)
5363                         goto illegal_insn;
5364                     else {
5365                         TCGv_i64 t64;
5366                         TCGv lo;
5367 
5368                         gen_address_mask(dc, cpu_addr);
5369                         lo = gen_load_gpr(dc, rd + 1);
5370                         t64 = tcg_temp_new_i64();
5371                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5372                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5373                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5374                     }
5375                     break;
5376 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5377                 case 0x14: /* sta, V9 stwa, store word alternate */
5378                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5379                     break;
5380                 case 0x15: /* stba, store byte alternate */
5381                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5382                     break;
5383                 case 0x16: /* stha, store halfword alternate */
5384                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5385                     break;
5386                 case 0x17: /* stda, store double word alternate */
5387                     if (rd & 1) {
5388                         goto illegal_insn;
5389                     }
5390                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5391                     break;
5392 #endif
5393 #ifdef TARGET_SPARC64
5394                 case 0x0e: /* V9 stx */
5395                     gen_address_mask(dc, cpu_addr);
5396                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5397                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5398                     break;
5399                 case 0x1e: /* V9 stxa */
5400                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5401                     break;
5402 #endif
5403                 default:
5404                     goto illegal_insn;
5405                 }
5406             } else if (xop > 0x23 && xop < 0x28) {
5407                 if (gen_trap_ifnofpu(dc)) {
5408                     goto jmp_insn;
5409                 }
5410                 switch (xop) {
5411                 case 0x24: /* stf, store fpreg */
5412                     gen_address_mask(dc, cpu_addr);
5413                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5414                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5415                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5416                     break;
5417                 case 0x25: /* stfsr, V9 stxfsr */
5418                     {
5419 #ifdef TARGET_SPARC64
5420                         gen_address_mask(dc, cpu_addr);
5421                         if (rd == 1) {
5422                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5423                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5424                             break;
5425                         }
5426 #endif
5427                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5428                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5429                     }
5430                     break;
5431                 case 0x26:
5432 #ifdef TARGET_SPARC64
5433                     /* V9 stqf, store quad fpreg */
5434                     CHECK_FPU_FEATURE(dc, FLOAT128);
5435                     gen_address_mask(dc, cpu_addr);
5436                     /* ??? While stqf only requires 4-byte alignment, it is
5437                        legal for the cpu to signal the unaligned exception.
5438                        The OS trap handler is then required to fix it up.
5439                        For qemu, this avoids having to probe the second page
5440                        before performing the first write.  */
5441                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5442                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5443                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5444                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5445                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5446                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5447                                         dc->mem_idx, MO_TEUQ);
5448                     break;
5449 #else /* !TARGET_SPARC64 */
5450                     /* stdfq, store floating point queue */
5451 #if defined(CONFIG_USER_ONLY)
5452                     goto illegal_insn;
5453 #else
5454                     if (!supervisor(dc))
5455                         goto priv_insn;
5456                     if (gen_trap_ifnofpu(dc)) {
5457                         goto jmp_insn;
5458                     }
5459                     goto nfq_insn;
5460 #endif
5461 #endif
5462                 case 0x27: /* stdf, store double fpreg */
5463                     gen_address_mask(dc, cpu_addr);
5464                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5465                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5466                                         MO_TEUQ | MO_ALIGN_4);
5467                     break;
5468                 default:
5469                     goto illegal_insn;
5470                 }
5471             } else if (xop > 0x33 && xop < 0x3f) {
5472                 switch (xop) {
5473 #ifdef TARGET_SPARC64
5474                 case 0x34: /* V9 stfa */
5475                     if (gen_trap_ifnofpu(dc)) {
5476                         goto jmp_insn;
5477                     }
5478                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5479                     break;
5480                 case 0x36: /* V9 stqfa */
5481                     {
5482                         CHECK_FPU_FEATURE(dc, FLOAT128);
5483                         if (gen_trap_ifnofpu(dc)) {
5484                             goto jmp_insn;
5485                         }
5486                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5487                     }
5488                     break;
5489                 case 0x37: /* V9 stdfa */
5490                     if (gen_trap_ifnofpu(dc)) {
5491                         goto jmp_insn;
5492                     }
5493                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5494                     break;
5495                 case 0x3e: /* V9 casxa */
5496                     rs2 = GET_FIELD(insn, 27, 31);
5497                     cpu_src2 = gen_load_gpr(dc, rs2);
5498                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5499                     break;
5500 #else
5501                 case 0x34: /* stc */
5502                 case 0x35: /* stcsr */
5503                 case 0x36: /* stdcq */
5504                 case 0x37: /* stdc */
5505                     goto ncp_insn;
5506 #endif
5507 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5508                 case 0x3c: /* V9 or LEON3 casa */
5509 #ifndef TARGET_SPARC64
5510                     CHECK_IU_FEATURE(dc, CASA);
5511 #endif
5512                     rs2 = GET_FIELD(insn, 27, 31);
5513                     cpu_src2 = gen_load_gpr(dc, rs2);
5514                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5515                     break;
5516 #endif
5517                 default:
5518                     goto illegal_insn;
5519                 }
5520             } else {
5521                 goto illegal_insn;
5522             }
5523         }
5524         break;
5525     }
5526     advance_pc(dc);
5527  jmp_insn:
5528     return;
5529  illegal_insn:
5530     gen_exception(dc, TT_ILL_INSN);
5531     return;
5532 #if !defined(CONFIG_USER_ONLY)
5533  priv_insn:
5534     gen_exception(dc, TT_PRIV_INSN);
5535     return;
5536 #endif
5537  nfpu_insn:
5538     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5539     return;
5540 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5541  nfq_insn:
5542     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5543     return;
5544 #endif
5545 #ifndef TARGET_SPARC64
5546  ncp_insn:
5547     gen_exception(dc, TT_NCP_INSN);
5548     return;
5549 #endif
5550 }
5551 
5552 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5553 {
5554     DisasContext *dc = container_of(dcbase, DisasContext, base);
5555     CPUSPARCState *env = cpu_env(cs);
5556     int bound;
5557 
5558     dc->pc = dc->base.pc_first;
5559     dc->npc = (target_ulong)dc->base.tb->cs_base;
5560     dc->cc_op = CC_OP_DYNAMIC;
5561     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5562     dc->def = &env->def;
5563     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5564     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5565 #ifndef CONFIG_USER_ONLY
5566     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5567 #endif
5568 #ifdef TARGET_SPARC64
5569     dc->fprs_dirty = 0;
5570     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5571 #ifndef CONFIG_USER_ONLY
5572     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5573 #endif
5574 #endif
5575     /*
5576      * if we reach a page boundary, we stop generation so that the
5577      * PC of a TT_TFAULT exception is always in the right page
5578      */
5579     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5580     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5581 }
5582 
5583 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5584 {
5585 }
5586 
5587 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5588 {
5589     DisasContext *dc = container_of(dcbase, DisasContext, base);
5590     target_ulong npc = dc->npc;
5591 
5592     if (npc & 3) {
5593         switch (npc) {
5594         case JUMP_PC:
5595             assert(dc->jump_pc[1] == dc->pc + 4);
5596             npc = dc->jump_pc[0] | JUMP_PC;
5597             break;
5598         case DYNAMIC_PC:
5599         case DYNAMIC_PC_LOOKUP:
5600             npc = DYNAMIC_PC;
5601             break;
5602         default:
5603             g_assert_not_reached();
5604         }
5605     }
5606     tcg_gen_insn_start(dc->pc, npc);
5607 }
5608 
5609 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5610 {
5611     DisasContext *dc = container_of(dcbase, DisasContext, base);
5612     CPUSPARCState *env = cpu_env(cs);
5613     unsigned int insn;
5614 
5615     insn = translator_ldl(env, &dc->base, dc->pc);
5616     dc->base.pc_next += 4;
5617 
5618     if (!decode(dc, insn)) {
5619         disas_sparc_legacy(dc, insn);
5620     }
5621 
5622     if (dc->base.is_jmp == DISAS_NORETURN) {
5623         return;
5624     }
5625     if (dc->pc != dc->base.pc_next) {
5626         dc->base.is_jmp = DISAS_TOO_MANY;
5627     }
5628 }
5629 
5630 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5631 {
5632     DisasContext *dc = container_of(dcbase, DisasContext, base);
5633     DisasDelayException *e, *e_next;
5634     bool may_lookup;
5635 
5636     switch (dc->base.is_jmp) {
5637     case DISAS_NEXT:
5638     case DISAS_TOO_MANY:
5639         if (((dc->pc | dc->npc) & 3) == 0) {
5640             /* static PC and NPC: we can use direct chaining */
5641             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5642             break;
5643         }
5644 
5645         may_lookup = true;
5646         if (dc->pc & 3) {
5647             switch (dc->pc) {
5648             case DYNAMIC_PC_LOOKUP:
5649                 break;
5650             case DYNAMIC_PC:
5651                 may_lookup = false;
5652                 break;
5653             default:
5654                 g_assert_not_reached();
5655             }
5656         } else {
5657             tcg_gen_movi_tl(cpu_pc, dc->pc);
5658         }
5659 
5660         if (dc->npc & 3) {
5661             switch (dc->npc) {
5662             case JUMP_PC:
5663                 gen_generic_branch(dc);
5664                 break;
5665             case DYNAMIC_PC:
5666                 may_lookup = false;
5667                 break;
5668             case DYNAMIC_PC_LOOKUP:
5669                 break;
5670             default:
5671                 g_assert_not_reached();
5672             }
5673         } else {
5674             tcg_gen_movi_tl(cpu_npc, dc->npc);
5675         }
5676         if (may_lookup) {
5677             tcg_gen_lookup_and_goto_ptr();
5678         } else {
5679             tcg_gen_exit_tb(NULL, 0);
5680         }
5681         break;
5682 
5683     case DISAS_NORETURN:
5684        break;
5685 
5686     case DISAS_EXIT:
5687         /* Exit TB */
5688         save_state(dc);
5689         tcg_gen_exit_tb(NULL, 0);
5690         break;
5691 
5692     default:
5693         g_assert_not_reached();
5694     }
5695 
5696     for (e = dc->delay_excp_list; e ; e = e_next) {
5697         gen_set_label(e->lab);
5698 
5699         tcg_gen_movi_tl(cpu_pc, e->pc);
5700         if (e->npc % 4 == 0) {
5701             tcg_gen_movi_tl(cpu_npc, e->npc);
5702         }
5703         gen_helper_raise_exception(tcg_env, e->excp);
5704 
5705         e_next = e->next;
5706         g_free(e);
5707     }
5708 }
5709 
5710 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5711                                CPUState *cpu, FILE *logfile)
5712 {
5713     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5714     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5715 }
5716 
5717 static const TranslatorOps sparc_tr_ops = {
5718     .init_disas_context = sparc_tr_init_disas_context,
5719     .tb_start           = sparc_tr_tb_start,
5720     .insn_start         = sparc_tr_insn_start,
5721     .translate_insn     = sparc_tr_translate_insn,
5722     .tb_stop            = sparc_tr_tb_stop,
5723     .disas_log          = sparc_tr_disas_log,
5724 };
5725 
5726 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5727                            target_ulong pc, void *host_pc)
5728 {
5729     DisasContext dc = {};
5730 
5731     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5732 }
5733 
5734 void sparc_tcg_init(void)
5735 {
5736     static const char gregnames[32][4] = {
5737         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5738         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5739         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5740         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5741     };
5742     static const char fregnames[32][4] = {
5743         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5744         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5745         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5746         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5747     };
5748 
5749     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5750 #ifdef TARGET_SPARC64
5751         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5752         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5753 #else
5754         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5755 #endif
5756         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5757         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5758     };
5759 
5760     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5761 #ifdef TARGET_SPARC64
5762         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5763         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5764         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5765         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5766           "hstick_cmpr" },
5767         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5768         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5769         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5770         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5771         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5772 #endif
5773         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5774         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5775         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5776         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5777         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5778         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5779         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5780         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5781 #ifndef CONFIG_USER_ONLY
5782         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5783 #endif
5784     };
5785 
5786     unsigned int i;
5787 
5788     cpu_regwptr = tcg_global_mem_new_ptr(tcg_env,
5789                                          offsetof(CPUSPARCState, regwptr),
5790                                          "regwptr");
5791 
5792     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5793         *r32[i].ptr = tcg_global_mem_new_i32(tcg_env, r32[i].off, r32[i].name);
5794     }
5795 
5796     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5797         *rtl[i].ptr = tcg_global_mem_new(tcg_env, rtl[i].off, rtl[i].name);
5798     }
5799 
5800     cpu_regs[0] = NULL;
5801     for (i = 1; i < 8; ++i) {
5802         cpu_regs[i] = tcg_global_mem_new(tcg_env,
5803                                          offsetof(CPUSPARCState, gregs[i]),
5804                                          gregnames[i]);
5805     }
5806 
5807     for (i = 8; i < 32; ++i) {
5808         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5809                                          (i - 8) * sizeof(target_ulong),
5810                                          gregnames[i]);
5811     }
5812 
5813     for (i = 0; i < TARGET_DPREGS; i++) {
5814         cpu_fpr[i] = tcg_global_mem_new_i64(tcg_env,
5815                                             offsetof(CPUSPARCState, fpr[i]),
5816                                             fregnames[i]);
5817     }
5818 }
5819 
5820 void sparc_restore_state_to_opc(CPUState *cs,
5821                                 const TranslationBlock *tb,
5822                                 const uint64_t *data)
5823 {
5824     SPARCCPU *cpu = SPARC_CPU(cs);
5825     CPUSPARCState *env = &cpu->env;
5826     target_ulong pc = data[0];
5827     target_ulong npc = data[1];
5828 
5829     env->pc = pc;
5830     if (npc == DYNAMIC_PC) {
5831         /* dynamic NPC: already stored */
5832     } else if (npc & JUMP_PC) {
5833         /* jump PC: use 'cond' and the jump targets of the translation */
5834         if (env->cond) {
5835             env->npc = npc & ~3;
5836         } else {
5837             env->npc = pc + 4;
5838         }
5839     } else {
5840         env->npc = npc;
5841     }
5842 }
5843