xref: /openbmc/qemu/target/sparc/translate.c (revision 45196ea4f47b84ed0a9890f309de80665a3e7a81)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 
29 #include "exec/helper-gen.h"
30 
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 #include "asi.h"
34 
35 #define HELPER_H "helper.h"
36 #include "exec/helper-info.c.inc"
37 #undef  HELPER_H
38 
39 /* Dynamic PC, must exit to main loop. */
40 #define DYNAMIC_PC         1
41 /* Dynamic PC, one of two values according to jump_pc[T2]. */
42 #define JUMP_PC            2
43 /* Dynamic PC, may lookup next TB. */
44 #define DYNAMIC_PC_LOOKUP  3
45 
46 #define DISAS_EXIT  DISAS_TARGET_0
47 
48 /* global register indexes */
49 static TCGv_ptr cpu_regwptr;
50 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
51 static TCGv_i32 cpu_cc_op;
52 static TCGv_i32 cpu_psr;
53 static TCGv cpu_fsr, cpu_pc, cpu_npc;
54 static TCGv cpu_regs[32];
55 static TCGv cpu_y;
56 #ifndef CONFIG_USER_ONLY
57 static TCGv cpu_tbr;
58 #endif
59 static TCGv cpu_cond;
60 #ifdef TARGET_SPARC64
61 static TCGv_i32 cpu_xcc, cpu_fprs;
62 static TCGv cpu_gsr;
63 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
64 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
65 #else
66 static TCGv cpu_wim;
67 #endif
68 /* Floating point registers */
69 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
70 
71 typedef struct DisasDelayException {
72     struct DisasDelayException *next;
73     TCGLabel *lab;
74     TCGv_i32 excp;
75     /* Saved state at parent insn. */
76     target_ulong pc;
77     target_ulong npc;
78 } DisasDelayException;
79 
80 typedef struct DisasContext {
81     DisasContextBase base;
82     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
83     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
84     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
85     int mem_idx;
86     bool fpu_enabled;
87     bool address_mask_32bit;
88 #ifndef CONFIG_USER_ONLY
89     bool supervisor;
90 #ifdef TARGET_SPARC64
91     bool hypervisor;
92 #endif
93 #endif
94 
95     uint32_t cc_op;  /* current CC operation */
96     sparc_def_t *def;
97 #ifdef TARGET_SPARC64
98     int fprs_dirty;
99     int asi;
100 #endif
101     DisasDelayException *delay_excp_list;
102 } DisasContext;
103 
104 typedef struct {
105     TCGCond cond;
106     bool is_bool;
107     TCGv c1, c2;
108 } DisasCompare;
109 
110 // This function uses non-native bit order
111 #define GET_FIELD(X, FROM, TO)                                  \
112     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
113 
114 // This function uses the order in the manuals, i.e. bit 0 is 2^0
115 #define GET_FIELD_SP(X, FROM, TO)               \
116     GET_FIELD(X, 31 - (TO), 31 - (FROM))
117 
118 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
119 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
120 
121 #ifdef TARGET_SPARC64
122 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
123 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
124 #else
125 #define DFPREG(r) (r & 0x1e)
126 #define QFPREG(r) (r & 0x1c)
127 #endif
128 
129 #define UA2005_HTRAP_MASK 0xff
130 #define V8_TRAP_MASK 0x7f
131 
132 static int sign_extend(int x, int len)
133 {
134     len = 32 - len;
135     return (x << len) >> len;
136 }
137 
138 #define IS_IMM (insn & (1<<13))
139 
140 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     int bit = (rd < 32) ? 1 : 2;
144     /* If we know we've already set this bit within the TB,
145        we can avoid setting it again.  */
146     if (!(dc->fprs_dirty & bit)) {
147         dc->fprs_dirty |= bit;
148         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
149     }
150 #endif
151 }
152 
153 /* floating point registers moves */
154 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
155 {
156     TCGv_i32 ret = tcg_temp_new_i32();
157     if (src & 1) {
158         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
159     } else {
160         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
161     }
162     return ret;
163 }
164 
165 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
166 {
167     TCGv_i64 t = tcg_temp_new_i64();
168 
169     tcg_gen_extu_i32_i64(t, v);
170     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
171                         (dst & 1 ? 0 : 32), 32);
172     gen_update_fprs_dirty(dc, dst);
173 }
174 
175 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
176 {
177     return tcg_temp_new_i32();
178 }
179 
180 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
181 {
182     src = DFPREG(src);
183     return cpu_fpr[src / 2];
184 }
185 
186 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
187 {
188     dst = DFPREG(dst);
189     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
190     gen_update_fprs_dirty(dc, dst);
191 }
192 
193 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
194 {
195     return cpu_fpr[DFPREG(dst) / 2];
196 }
197 
198 static void gen_op_load_fpr_QT0(unsigned int src)
199 {
200     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
201                    offsetof(CPU_QuadU, ll.upper));
202     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.lower));
204 }
205 
206 static void gen_op_load_fpr_QT1(unsigned int src)
207 {
208     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt1) +
209                    offsetof(CPU_QuadU, ll.upper));
210     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.lower));
212 }
213 
214 static void gen_op_store_QT0_fpr(unsigned int dst)
215 {
216     tcg_gen_ld_i64(cpu_fpr[dst / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221 
222 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
223                             TCGv_i64 v1, TCGv_i64 v2)
224 {
225     dst = QFPREG(dst);
226 
227     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
228     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
229     gen_update_fprs_dirty(dc, dst);
230 }
231 
232 #ifdef TARGET_SPARC64
233 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
234 {
235     src = QFPREG(src);
236     return cpu_fpr[src / 2];
237 }
238 
239 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
240 {
241     src = QFPREG(src);
242     return cpu_fpr[src / 2 + 1];
243 }
244 
245 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
246 {
247     rd = QFPREG(rd);
248     rs = QFPREG(rs);
249 
250     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
251     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
252     gen_update_fprs_dirty(dc, rd);
253 }
254 #endif
255 
256 /* moves */
257 #ifdef CONFIG_USER_ONLY
258 #define supervisor(dc) 0
259 #ifdef TARGET_SPARC64
260 #define hypervisor(dc) 0
261 #endif
262 #else
263 #ifdef TARGET_SPARC64
264 #define hypervisor(dc) (dc->hypervisor)
265 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
266 #else
267 #define supervisor(dc) (dc->supervisor)
268 #endif
269 #endif
270 
271 #if !defined(TARGET_SPARC64)
272 # define AM_CHECK(dc)  false
273 #elif defined(TARGET_ABI32)
274 # define AM_CHECK(dc)  true
275 #elif defined(CONFIG_USER_ONLY)
276 # define AM_CHECK(dc)  false
277 #else
278 # define AM_CHECK(dc)  ((dc)->address_mask_32bit)
279 #endif
280 
281 static void gen_address_mask(DisasContext *dc, TCGv addr)
282 {
283     if (AM_CHECK(dc)) {
284         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
285     }
286 }
287 
288 static target_ulong address_mask_i(DisasContext *dc, target_ulong addr)
289 {
290     return AM_CHECK(dc) ? (uint32_t)addr : addr;
291 }
292 
293 static TCGv gen_load_gpr(DisasContext *dc, int reg)
294 {
295     if (reg > 0) {
296         assert(reg < 32);
297         return cpu_regs[reg];
298     } else {
299         TCGv t = tcg_temp_new();
300         tcg_gen_movi_tl(t, 0);
301         return t;
302     }
303 }
304 
305 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
306 {
307     if (reg > 0) {
308         assert(reg < 32);
309         tcg_gen_mov_tl(cpu_regs[reg], v);
310     }
311 }
312 
313 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
314 {
315     if (reg > 0) {
316         assert(reg < 32);
317         return cpu_regs[reg];
318     } else {
319         return tcg_temp_new();
320     }
321 }
322 
323 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
324 {
325     return translator_use_goto_tb(&s->base, pc) &&
326            translator_use_goto_tb(&s->base, npc);
327 }
328 
329 static void gen_goto_tb(DisasContext *s, int tb_num,
330                         target_ulong pc, target_ulong npc)
331 {
332     if (use_goto_tb(s, pc, npc))  {
333         /* jump to same page: we can use a direct jump */
334         tcg_gen_goto_tb(tb_num);
335         tcg_gen_movi_tl(cpu_pc, pc);
336         tcg_gen_movi_tl(cpu_npc, npc);
337         tcg_gen_exit_tb(s->base.tb, tb_num);
338     } else {
339         /* jump to another page: we can use an indirect jump */
340         tcg_gen_movi_tl(cpu_pc, pc);
341         tcg_gen_movi_tl(cpu_npc, npc);
342         tcg_gen_lookup_and_goto_ptr();
343     }
344 }
345 
346 // XXX suboptimal
347 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
348 {
349     tcg_gen_extu_i32_tl(reg, src);
350     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
351 }
352 
353 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
354 {
355     tcg_gen_extu_i32_tl(reg, src);
356     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
357 }
358 
359 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
360 {
361     tcg_gen_extu_i32_tl(reg, src);
362     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
363 }
364 
365 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
366 {
367     tcg_gen_extu_i32_tl(reg, src);
368     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
369 }
370 
371 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
372 {
373     tcg_gen_mov_tl(cpu_cc_src, src1);
374     tcg_gen_mov_tl(cpu_cc_src2, src2);
375     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
376     tcg_gen_mov_tl(dst, cpu_cc_dst);
377 }
378 
379 static TCGv_i32 gen_add32_carry32(void)
380 {
381     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
382 
383     /* Carry is computed from a previous add: (dst < src)  */
384 #if TARGET_LONG_BITS == 64
385     cc_src1_32 = tcg_temp_new_i32();
386     cc_src2_32 = tcg_temp_new_i32();
387     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
388     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
389 #else
390     cc_src1_32 = cpu_cc_dst;
391     cc_src2_32 = cpu_cc_src;
392 #endif
393 
394     carry_32 = tcg_temp_new_i32();
395     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
396 
397     return carry_32;
398 }
399 
400 static TCGv_i32 gen_sub32_carry32(void)
401 {
402     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
403 
404     /* Carry is computed from a previous borrow: (src1 < src2)  */
405 #if TARGET_LONG_BITS == 64
406     cc_src1_32 = tcg_temp_new_i32();
407     cc_src2_32 = tcg_temp_new_i32();
408     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
409     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
410 #else
411     cc_src1_32 = cpu_cc_src;
412     cc_src2_32 = cpu_cc_src2;
413 #endif
414 
415     carry_32 = tcg_temp_new_i32();
416     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
417 
418     return carry_32;
419 }
420 
421 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
422                             TCGv src2, int update_cc)
423 {
424     TCGv_i32 carry_32;
425     TCGv carry;
426 
427     switch (dc->cc_op) {
428     case CC_OP_DIV:
429     case CC_OP_LOGIC:
430         /* Carry is known to be zero.  Fall back to plain ADD.  */
431         if (update_cc) {
432             gen_op_add_cc(dst, src1, src2);
433         } else {
434             tcg_gen_add_tl(dst, src1, src2);
435         }
436         return;
437 
438     case CC_OP_ADD:
439     case CC_OP_TADD:
440     case CC_OP_TADDTV:
441         if (TARGET_LONG_BITS == 32) {
442             /* We can re-use the host's hardware carry generation by using
443                an ADD2 opcode.  We discard the low part of the output.
444                Ideally we'd combine this operation with the add that
445                generated the carry in the first place.  */
446             carry = tcg_temp_new();
447             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
448             goto add_done;
449         }
450         carry_32 = gen_add32_carry32();
451         break;
452 
453     case CC_OP_SUB:
454     case CC_OP_TSUB:
455     case CC_OP_TSUBTV:
456         carry_32 = gen_sub32_carry32();
457         break;
458 
459     default:
460         /* We need external help to produce the carry.  */
461         carry_32 = tcg_temp_new_i32();
462         gen_helper_compute_C_icc(carry_32, tcg_env);
463         break;
464     }
465 
466 #if TARGET_LONG_BITS == 64
467     carry = tcg_temp_new();
468     tcg_gen_extu_i32_i64(carry, carry_32);
469 #else
470     carry = carry_32;
471 #endif
472 
473     tcg_gen_add_tl(dst, src1, src2);
474     tcg_gen_add_tl(dst, dst, carry);
475 
476  add_done:
477     if (update_cc) {
478         tcg_gen_mov_tl(cpu_cc_src, src1);
479         tcg_gen_mov_tl(cpu_cc_src2, src2);
480         tcg_gen_mov_tl(cpu_cc_dst, dst);
481         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
482         dc->cc_op = CC_OP_ADDX;
483     }
484 }
485 
486 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
487 {
488     tcg_gen_mov_tl(cpu_cc_src, src1);
489     tcg_gen_mov_tl(cpu_cc_src2, src2);
490     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
491     tcg_gen_mov_tl(dst, cpu_cc_dst);
492 }
493 
494 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
495                             TCGv src2, int update_cc)
496 {
497     TCGv_i32 carry_32;
498     TCGv carry;
499 
500     switch (dc->cc_op) {
501     case CC_OP_DIV:
502     case CC_OP_LOGIC:
503         /* Carry is known to be zero.  Fall back to plain SUB.  */
504         if (update_cc) {
505             gen_op_sub_cc(dst, src1, src2);
506         } else {
507             tcg_gen_sub_tl(dst, src1, src2);
508         }
509         return;
510 
511     case CC_OP_ADD:
512     case CC_OP_TADD:
513     case CC_OP_TADDTV:
514         carry_32 = gen_add32_carry32();
515         break;
516 
517     case CC_OP_SUB:
518     case CC_OP_TSUB:
519     case CC_OP_TSUBTV:
520         if (TARGET_LONG_BITS == 32) {
521             /* We can re-use the host's hardware carry generation by using
522                a SUB2 opcode.  We discard the low part of the output.
523                Ideally we'd combine this operation with the add that
524                generated the carry in the first place.  */
525             carry = tcg_temp_new();
526             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
527             goto sub_done;
528         }
529         carry_32 = gen_sub32_carry32();
530         break;
531 
532     default:
533         /* We need external help to produce the carry.  */
534         carry_32 = tcg_temp_new_i32();
535         gen_helper_compute_C_icc(carry_32, tcg_env);
536         break;
537     }
538 
539 #if TARGET_LONG_BITS == 64
540     carry = tcg_temp_new();
541     tcg_gen_extu_i32_i64(carry, carry_32);
542 #else
543     carry = carry_32;
544 #endif
545 
546     tcg_gen_sub_tl(dst, src1, src2);
547     tcg_gen_sub_tl(dst, dst, carry);
548 
549  sub_done:
550     if (update_cc) {
551         tcg_gen_mov_tl(cpu_cc_src, src1);
552         tcg_gen_mov_tl(cpu_cc_src2, src2);
553         tcg_gen_mov_tl(cpu_cc_dst, dst);
554         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
555         dc->cc_op = CC_OP_SUBX;
556     }
557 }
558 
559 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
560 {
561     TCGv r_temp, zero, t0;
562 
563     r_temp = tcg_temp_new();
564     t0 = tcg_temp_new();
565 
566     /* old op:
567     if (!(env->y & 1))
568         T1 = 0;
569     */
570     zero = tcg_constant_tl(0);
571     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
572     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
573     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
574     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
575                        zero, cpu_cc_src2);
576 
577     // b2 = T0 & 1;
578     // env->y = (b2 << 31) | (env->y >> 1);
579     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
580     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
581 
582     // b1 = N ^ V;
583     gen_mov_reg_N(t0, cpu_psr);
584     gen_mov_reg_V(r_temp, cpu_psr);
585     tcg_gen_xor_tl(t0, t0, r_temp);
586 
587     // T0 = (b1 << 31) | (T0 >> 1);
588     // src1 = T0;
589     tcg_gen_shli_tl(t0, t0, 31);
590     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
591     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
592 
593     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
594 
595     tcg_gen_mov_tl(dst, cpu_cc_dst);
596 }
597 
598 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
599 {
600 #if TARGET_LONG_BITS == 32
601     if (sign_ext) {
602         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
603     } else {
604         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
605     }
606 #else
607     TCGv t0 = tcg_temp_new_i64();
608     TCGv t1 = tcg_temp_new_i64();
609 
610     if (sign_ext) {
611         tcg_gen_ext32s_i64(t0, src1);
612         tcg_gen_ext32s_i64(t1, src2);
613     } else {
614         tcg_gen_ext32u_i64(t0, src1);
615         tcg_gen_ext32u_i64(t1, src2);
616     }
617 
618     tcg_gen_mul_i64(dst, t0, t1);
619     tcg_gen_shri_i64(cpu_y, dst, 32);
620 #endif
621 }
622 
623 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
624 {
625     /* zero-extend truncated operands before multiplication */
626     gen_op_multiply(dst, src1, src2, 0);
627 }
628 
629 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
630 {
631     /* sign-extend truncated operands before multiplication */
632     gen_op_multiply(dst, src1, src2, 1);
633 }
634 
635 // 1
636 static void gen_op_eval_ba(TCGv dst)
637 {
638     tcg_gen_movi_tl(dst, 1);
639 }
640 
641 // Z
642 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
643 {
644     gen_mov_reg_Z(dst, src);
645 }
646 
647 // Z | (N ^ V)
648 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
649 {
650     TCGv t0 = tcg_temp_new();
651     gen_mov_reg_N(t0, src);
652     gen_mov_reg_V(dst, src);
653     tcg_gen_xor_tl(dst, dst, t0);
654     gen_mov_reg_Z(t0, src);
655     tcg_gen_or_tl(dst, dst, t0);
656 }
657 
658 // N ^ V
659 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
660 {
661     TCGv t0 = tcg_temp_new();
662     gen_mov_reg_V(t0, src);
663     gen_mov_reg_N(dst, src);
664     tcg_gen_xor_tl(dst, dst, t0);
665 }
666 
667 // C | Z
668 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
669 {
670     TCGv t0 = tcg_temp_new();
671     gen_mov_reg_Z(t0, src);
672     gen_mov_reg_C(dst, src);
673     tcg_gen_or_tl(dst, dst, t0);
674 }
675 
676 // C
677 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
678 {
679     gen_mov_reg_C(dst, src);
680 }
681 
682 // V
683 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
684 {
685     gen_mov_reg_V(dst, src);
686 }
687 
688 // 0
689 static void gen_op_eval_bn(TCGv dst)
690 {
691     tcg_gen_movi_tl(dst, 0);
692 }
693 
694 // N
695 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
696 {
697     gen_mov_reg_N(dst, src);
698 }
699 
700 // !Z
701 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
702 {
703     gen_mov_reg_Z(dst, src);
704     tcg_gen_xori_tl(dst, dst, 0x1);
705 }
706 
707 // !(Z | (N ^ V))
708 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
709 {
710     gen_op_eval_ble(dst, src);
711     tcg_gen_xori_tl(dst, dst, 0x1);
712 }
713 
714 // !(N ^ V)
715 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
716 {
717     gen_op_eval_bl(dst, src);
718     tcg_gen_xori_tl(dst, dst, 0x1);
719 }
720 
721 // !(C | Z)
722 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
723 {
724     gen_op_eval_bleu(dst, src);
725     tcg_gen_xori_tl(dst, dst, 0x1);
726 }
727 
728 // !C
729 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
730 {
731     gen_mov_reg_C(dst, src);
732     tcg_gen_xori_tl(dst, dst, 0x1);
733 }
734 
735 // !N
736 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
737 {
738     gen_mov_reg_N(dst, src);
739     tcg_gen_xori_tl(dst, dst, 0x1);
740 }
741 
742 // !V
743 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
744 {
745     gen_mov_reg_V(dst, src);
746     tcg_gen_xori_tl(dst, dst, 0x1);
747 }
748 
749 /*
750   FPSR bit field FCC1 | FCC0:
751    0 =
752    1 <
753    2 >
754    3 unordered
755 */
756 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
757                                     unsigned int fcc_offset)
758 {
759     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
760     tcg_gen_andi_tl(reg, reg, 0x1);
761 }
762 
763 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
764 {
765     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
766     tcg_gen_andi_tl(reg, reg, 0x1);
767 }
768 
769 // !0: FCC0 | FCC1
770 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
771 {
772     TCGv t0 = tcg_temp_new();
773     gen_mov_reg_FCC0(dst, src, fcc_offset);
774     gen_mov_reg_FCC1(t0, src, fcc_offset);
775     tcg_gen_or_tl(dst, dst, t0);
776 }
777 
778 // 1 or 2: FCC0 ^ FCC1
779 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
780 {
781     TCGv t0 = tcg_temp_new();
782     gen_mov_reg_FCC0(dst, src, fcc_offset);
783     gen_mov_reg_FCC1(t0, src, fcc_offset);
784     tcg_gen_xor_tl(dst, dst, t0);
785 }
786 
787 // 1 or 3: FCC0
788 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
789 {
790     gen_mov_reg_FCC0(dst, src, fcc_offset);
791 }
792 
793 // 1: FCC0 & !FCC1
794 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
795 {
796     TCGv t0 = tcg_temp_new();
797     gen_mov_reg_FCC0(dst, src, fcc_offset);
798     gen_mov_reg_FCC1(t0, src, fcc_offset);
799     tcg_gen_andc_tl(dst, dst, t0);
800 }
801 
802 // 2 or 3: FCC1
803 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
804 {
805     gen_mov_reg_FCC1(dst, src, fcc_offset);
806 }
807 
808 // 2: !FCC0 & FCC1
809 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
810 {
811     TCGv t0 = tcg_temp_new();
812     gen_mov_reg_FCC0(dst, src, fcc_offset);
813     gen_mov_reg_FCC1(t0, src, fcc_offset);
814     tcg_gen_andc_tl(dst, t0, dst);
815 }
816 
817 // 3: FCC0 & FCC1
818 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
819 {
820     TCGv t0 = tcg_temp_new();
821     gen_mov_reg_FCC0(dst, src, fcc_offset);
822     gen_mov_reg_FCC1(t0, src, fcc_offset);
823     tcg_gen_and_tl(dst, dst, t0);
824 }
825 
826 // 0: !(FCC0 | FCC1)
827 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
828 {
829     TCGv t0 = tcg_temp_new();
830     gen_mov_reg_FCC0(dst, src, fcc_offset);
831     gen_mov_reg_FCC1(t0, src, fcc_offset);
832     tcg_gen_or_tl(dst, dst, t0);
833     tcg_gen_xori_tl(dst, dst, 0x1);
834 }
835 
836 // 0 or 3: !(FCC0 ^ FCC1)
837 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
838 {
839     TCGv t0 = tcg_temp_new();
840     gen_mov_reg_FCC0(dst, src, fcc_offset);
841     gen_mov_reg_FCC1(t0, src, fcc_offset);
842     tcg_gen_xor_tl(dst, dst, t0);
843     tcg_gen_xori_tl(dst, dst, 0x1);
844 }
845 
846 // 0 or 2: !FCC0
847 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
848 {
849     gen_mov_reg_FCC0(dst, src, fcc_offset);
850     tcg_gen_xori_tl(dst, dst, 0x1);
851 }
852 
853 // !1: !(FCC0 & !FCC1)
854 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
855 {
856     TCGv t0 = tcg_temp_new();
857     gen_mov_reg_FCC0(dst, src, fcc_offset);
858     gen_mov_reg_FCC1(t0, src, fcc_offset);
859     tcg_gen_andc_tl(dst, dst, t0);
860     tcg_gen_xori_tl(dst, dst, 0x1);
861 }
862 
863 // 0 or 1: !FCC1
864 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
865 {
866     gen_mov_reg_FCC1(dst, src, fcc_offset);
867     tcg_gen_xori_tl(dst, dst, 0x1);
868 }
869 
870 // !2: !(!FCC0 & FCC1)
871 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
872 {
873     TCGv t0 = tcg_temp_new();
874     gen_mov_reg_FCC0(dst, src, fcc_offset);
875     gen_mov_reg_FCC1(t0, src, fcc_offset);
876     tcg_gen_andc_tl(dst, t0, dst);
877     tcg_gen_xori_tl(dst, dst, 0x1);
878 }
879 
880 // !3: !(FCC0 & FCC1)
881 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
882 {
883     TCGv t0 = tcg_temp_new();
884     gen_mov_reg_FCC0(dst, src, fcc_offset);
885     gen_mov_reg_FCC1(t0, src, fcc_offset);
886     tcg_gen_and_tl(dst, dst, t0);
887     tcg_gen_xori_tl(dst, dst, 0x1);
888 }
889 
890 static void gen_branch2(DisasContext *dc, target_ulong pc1,
891                         target_ulong pc2, TCGv r_cond)
892 {
893     TCGLabel *l1 = gen_new_label();
894 
895     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
896 
897     gen_goto_tb(dc, 0, pc1, pc1 + 4);
898 
899     gen_set_label(l1);
900     gen_goto_tb(dc, 1, pc2, pc2 + 4);
901 }
902 
903 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
904 {
905     TCGLabel *l1 = gen_new_label();
906     target_ulong npc = dc->npc;
907 
908     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
909 
910     gen_goto_tb(dc, 0, npc, pc1);
911 
912     gen_set_label(l1);
913     gen_goto_tb(dc, 1, npc + 4, npc + 8);
914 
915     dc->base.is_jmp = DISAS_NORETURN;
916 }
917 
918 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
919 {
920     target_ulong npc = dc->npc;
921 
922     if (npc & 3) {
923         switch (npc) {
924         case DYNAMIC_PC:
925         case DYNAMIC_PC_LOOKUP:
926             tcg_gen_mov_tl(cpu_pc, cpu_npc);
927             tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
928             tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc,
929                                cpu_cond, tcg_constant_tl(0),
930                                tcg_constant_tl(pc1), cpu_npc);
931             dc->pc = npc;
932             break;
933         default:
934             g_assert_not_reached();
935         }
936     } else {
937         dc->pc = npc;
938         dc->jump_pc[0] = pc1;
939         dc->jump_pc[1] = npc + 4;
940         dc->npc = JUMP_PC;
941     }
942 }
943 
944 static void gen_generic_branch(DisasContext *dc)
945 {
946     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
947     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
948     TCGv zero = tcg_constant_tl(0);
949 
950     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
951 }
952 
953 /* call this function before using the condition register as it may
954    have been set for a jump */
955 static void flush_cond(DisasContext *dc)
956 {
957     if (dc->npc == JUMP_PC) {
958         gen_generic_branch(dc);
959         dc->npc = DYNAMIC_PC_LOOKUP;
960     }
961 }
962 
963 static void save_npc(DisasContext *dc)
964 {
965     if (dc->npc & 3) {
966         switch (dc->npc) {
967         case JUMP_PC:
968             gen_generic_branch(dc);
969             dc->npc = DYNAMIC_PC_LOOKUP;
970             break;
971         case DYNAMIC_PC:
972         case DYNAMIC_PC_LOOKUP:
973             break;
974         default:
975             g_assert_not_reached();
976         }
977     } else {
978         tcg_gen_movi_tl(cpu_npc, dc->npc);
979     }
980 }
981 
982 static void update_psr(DisasContext *dc)
983 {
984     if (dc->cc_op != CC_OP_FLAGS) {
985         dc->cc_op = CC_OP_FLAGS;
986         gen_helper_compute_psr(tcg_env);
987     }
988 }
989 
990 static void save_state(DisasContext *dc)
991 {
992     tcg_gen_movi_tl(cpu_pc, dc->pc);
993     save_npc(dc);
994 }
995 
996 static void gen_exception(DisasContext *dc, int which)
997 {
998     save_state(dc);
999     gen_helper_raise_exception(tcg_env, tcg_constant_i32(which));
1000     dc->base.is_jmp = DISAS_NORETURN;
1001 }
1002 
1003 static TCGLabel *delay_exceptionv(DisasContext *dc, TCGv_i32 excp)
1004 {
1005     DisasDelayException *e = g_new0(DisasDelayException, 1);
1006 
1007     e->next = dc->delay_excp_list;
1008     dc->delay_excp_list = e;
1009 
1010     e->lab = gen_new_label();
1011     e->excp = excp;
1012     e->pc = dc->pc;
1013     /* Caller must have used flush_cond before branch. */
1014     assert(e->npc != JUMP_PC);
1015     e->npc = dc->npc;
1016 
1017     return e->lab;
1018 }
1019 
1020 static TCGLabel *delay_exception(DisasContext *dc, int excp)
1021 {
1022     return delay_exceptionv(dc, tcg_constant_i32(excp));
1023 }
1024 
1025 static void gen_check_align(DisasContext *dc, TCGv addr, int mask)
1026 {
1027     TCGv t = tcg_temp_new();
1028     TCGLabel *lab;
1029 
1030     tcg_gen_andi_tl(t, addr, mask);
1031 
1032     flush_cond(dc);
1033     lab = delay_exception(dc, TT_UNALIGNED);
1034     tcg_gen_brcondi_tl(TCG_COND_NE, t, 0, lab);
1035 }
1036 
1037 static void gen_mov_pc_npc(DisasContext *dc)
1038 {
1039     if (dc->npc & 3) {
1040         switch (dc->npc) {
1041         case JUMP_PC:
1042             gen_generic_branch(dc);
1043             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1044             dc->pc = DYNAMIC_PC_LOOKUP;
1045             break;
1046         case DYNAMIC_PC:
1047         case DYNAMIC_PC_LOOKUP:
1048             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1049             dc->pc = dc->npc;
1050             break;
1051         default:
1052             g_assert_not_reached();
1053         }
1054     } else {
1055         dc->pc = dc->npc;
1056     }
1057 }
1058 
1059 static void gen_op_next_insn(void)
1060 {
1061     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1062     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1063 }
1064 
1065 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1066                         DisasContext *dc)
1067 {
1068     static int subcc_cond[16] = {
1069         TCG_COND_NEVER,
1070         TCG_COND_EQ,
1071         TCG_COND_LE,
1072         TCG_COND_LT,
1073         TCG_COND_LEU,
1074         TCG_COND_LTU,
1075         -1, /* neg */
1076         -1, /* overflow */
1077         TCG_COND_ALWAYS,
1078         TCG_COND_NE,
1079         TCG_COND_GT,
1080         TCG_COND_GE,
1081         TCG_COND_GTU,
1082         TCG_COND_GEU,
1083         -1, /* pos */
1084         -1, /* no overflow */
1085     };
1086 
1087     static int logic_cond[16] = {
1088         TCG_COND_NEVER,
1089         TCG_COND_EQ,     /* eq:  Z */
1090         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1091         TCG_COND_LT,     /* lt:  N ^ V -> N */
1092         TCG_COND_EQ,     /* leu: C | Z -> Z */
1093         TCG_COND_NEVER,  /* ltu: C -> 0 */
1094         TCG_COND_LT,     /* neg: N */
1095         TCG_COND_NEVER,  /* vs:  V -> 0 */
1096         TCG_COND_ALWAYS,
1097         TCG_COND_NE,     /* ne:  !Z */
1098         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1099         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1100         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1101         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1102         TCG_COND_GE,     /* pos: !N */
1103         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1104     };
1105 
1106     TCGv_i32 r_src;
1107     TCGv r_dst;
1108 
1109 #ifdef TARGET_SPARC64
1110     if (xcc) {
1111         r_src = cpu_xcc;
1112     } else {
1113         r_src = cpu_psr;
1114     }
1115 #else
1116     r_src = cpu_psr;
1117 #endif
1118 
1119     switch (dc->cc_op) {
1120     case CC_OP_LOGIC:
1121         cmp->cond = logic_cond[cond];
1122     do_compare_dst_0:
1123         cmp->is_bool = false;
1124         cmp->c2 = tcg_constant_tl(0);
1125 #ifdef TARGET_SPARC64
1126         if (!xcc) {
1127             cmp->c1 = tcg_temp_new();
1128             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1129             break;
1130         }
1131 #endif
1132         cmp->c1 = cpu_cc_dst;
1133         break;
1134 
1135     case CC_OP_SUB:
1136         switch (cond) {
1137         case 6:  /* neg */
1138         case 14: /* pos */
1139             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1140             goto do_compare_dst_0;
1141 
1142         case 7: /* overflow */
1143         case 15: /* !overflow */
1144             goto do_dynamic;
1145 
1146         default:
1147             cmp->cond = subcc_cond[cond];
1148             cmp->is_bool = false;
1149 #ifdef TARGET_SPARC64
1150             if (!xcc) {
1151                 /* Note that sign-extension works for unsigned compares as
1152                    long as both operands are sign-extended.  */
1153                 cmp->c1 = tcg_temp_new();
1154                 cmp->c2 = tcg_temp_new();
1155                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1156                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1157                 break;
1158             }
1159 #endif
1160             cmp->c1 = cpu_cc_src;
1161             cmp->c2 = cpu_cc_src2;
1162             break;
1163         }
1164         break;
1165 
1166     default:
1167     do_dynamic:
1168         gen_helper_compute_psr(tcg_env);
1169         dc->cc_op = CC_OP_FLAGS;
1170         /* FALLTHRU */
1171 
1172     case CC_OP_FLAGS:
1173         /* We're going to generate a boolean result.  */
1174         cmp->cond = TCG_COND_NE;
1175         cmp->is_bool = true;
1176         cmp->c1 = r_dst = tcg_temp_new();
1177         cmp->c2 = tcg_constant_tl(0);
1178 
1179         switch (cond) {
1180         case 0x0:
1181             gen_op_eval_bn(r_dst);
1182             break;
1183         case 0x1:
1184             gen_op_eval_be(r_dst, r_src);
1185             break;
1186         case 0x2:
1187             gen_op_eval_ble(r_dst, r_src);
1188             break;
1189         case 0x3:
1190             gen_op_eval_bl(r_dst, r_src);
1191             break;
1192         case 0x4:
1193             gen_op_eval_bleu(r_dst, r_src);
1194             break;
1195         case 0x5:
1196             gen_op_eval_bcs(r_dst, r_src);
1197             break;
1198         case 0x6:
1199             gen_op_eval_bneg(r_dst, r_src);
1200             break;
1201         case 0x7:
1202             gen_op_eval_bvs(r_dst, r_src);
1203             break;
1204         case 0x8:
1205             gen_op_eval_ba(r_dst);
1206             break;
1207         case 0x9:
1208             gen_op_eval_bne(r_dst, r_src);
1209             break;
1210         case 0xa:
1211             gen_op_eval_bg(r_dst, r_src);
1212             break;
1213         case 0xb:
1214             gen_op_eval_bge(r_dst, r_src);
1215             break;
1216         case 0xc:
1217             gen_op_eval_bgu(r_dst, r_src);
1218             break;
1219         case 0xd:
1220             gen_op_eval_bcc(r_dst, r_src);
1221             break;
1222         case 0xe:
1223             gen_op_eval_bpos(r_dst, r_src);
1224             break;
1225         case 0xf:
1226             gen_op_eval_bvc(r_dst, r_src);
1227             break;
1228         }
1229         break;
1230     }
1231 }
1232 
1233 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1234 {
1235     unsigned int offset;
1236     TCGv r_dst;
1237 
1238     /* For now we still generate a straight boolean result.  */
1239     cmp->cond = TCG_COND_NE;
1240     cmp->is_bool = true;
1241     cmp->c1 = r_dst = tcg_temp_new();
1242     cmp->c2 = tcg_constant_tl(0);
1243 
1244     switch (cc) {
1245     default:
1246     case 0x0:
1247         offset = 0;
1248         break;
1249     case 0x1:
1250         offset = 32 - 10;
1251         break;
1252     case 0x2:
1253         offset = 34 - 10;
1254         break;
1255     case 0x3:
1256         offset = 36 - 10;
1257         break;
1258     }
1259 
1260     switch (cond) {
1261     case 0x0:
1262         gen_op_eval_bn(r_dst);
1263         break;
1264     case 0x1:
1265         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1266         break;
1267     case 0x2:
1268         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1269         break;
1270     case 0x3:
1271         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1272         break;
1273     case 0x4:
1274         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1275         break;
1276     case 0x5:
1277         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1278         break;
1279     case 0x6:
1280         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1281         break;
1282     case 0x7:
1283         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1284         break;
1285     case 0x8:
1286         gen_op_eval_ba(r_dst);
1287         break;
1288     case 0x9:
1289         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1290         break;
1291     case 0xa:
1292         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1293         break;
1294     case 0xb:
1295         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1296         break;
1297     case 0xc:
1298         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1299         break;
1300     case 0xd:
1301         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1302         break;
1303     case 0xe:
1304         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1305         break;
1306     case 0xf:
1307         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1308         break;
1309     }
1310 }
1311 
1312 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1313                      DisasContext *dc)
1314 {
1315     DisasCompare cmp;
1316     gen_compare(&cmp, cc, cond, dc);
1317 
1318     /* The interface is to return a boolean in r_dst.  */
1319     if (cmp.is_bool) {
1320         tcg_gen_mov_tl(r_dst, cmp.c1);
1321     } else {
1322         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1323     }
1324 }
1325 
1326 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1327 {
1328     DisasCompare cmp;
1329     gen_fcompare(&cmp, cc, cond);
1330 
1331     /* The interface is to return a boolean in r_dst.  */
1332     if (cmp.is_bool) {
1333         tcg_gen_mov_tl(r_dst, cmp.c1);
1334     } else {
1335         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1336     }
1337 }
1338 
1339 // Inverted logic
1340 static const TCGCond gen_tcg_cond_reg[8] = {
1341     TCG_COND_NEVER,  /* reserved */
1342     TCG_COND_NE,
1343     TCG_COND_GT,
1344     TCG_COND_GE,
1345     TCG_COND_NEVER,  /* reserved */
1346     TCG_COND_EQ,
1347     TCG_COND_LE,
1348     TCG_COND_LT,
1349 };
1350 
1351 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1352 {
1353     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1354     cmp->is_bool = false;
1355     cmp->c1 = r_src;
1356     cmp->c2 = tcg_constant_tl(0);
1357 }
1358 
1359 #ifdef TARGET_SPARC64
1360 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1361 {
1362     switch (fccno) {
1363     case 0:
1364         gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1365         break;
1366     case 1:
1367         gen_helper_fcmps_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1368         break;
1369     case 2:
1370         gen_helper_fcmps_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1371         break;
1372     case 3:
1373         gen_helper_fcmps_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1374         break;
1375     }
1376 }
1377 
1378 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1379 {
1380     switch (fccno) {
1381     case 0:
1382         gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1383         break;
1384     case 1:
1385         gen_helper_fcmpd_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1386         break;
1387     case 2:
1388         gen_helper_fcmpd_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1389         break;
1390     case 3:
1391         gen_helper_fcmpd_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1392         break;
1393     }
1394 }
1395 
1396 static void gen_op_fcmpq(int fccno)
1397 {
1398     switch (fccno) {
1399     case 0:
1400         gen_helper_fcmpq(cpu_fsr, tcg_env);
1401         break;
1402     case 1:
1403         gen_helper_fcmpq_fcc1(cpu_fsr, tcg_env);
1404         break;
1405     case 2:
1406         gen_helper_fcmpq_fcc2(cpu_fsr, tcg_env);
1407         break;
1408     case 3:
1409         gen_helper_fcmpq_fcc3(cpu_fsr, tcg_env);
1410         break;
1411     }
1412 }
1413 
1414 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1415 {
1416     switch (fccno) {
1417     case 0:
1418         gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1419         break;
1420     case 1:
1421         gen_helper_fcmpes_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1422         break;
1423     case 2:
1424         gen_helper_fcmpes_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1425         break;
1426     case 3:
1427         gen_helper_fcmpes_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1428         break;
1429     }
1430 }
1431 
1432 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1433 {
1434     switch (fccno) {
1435     case 0:
1436         gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1437         break;
1438     case 1:
1439         gen_helper_fcmped_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1440         break;
1441     case 2:
1442         gen_helper_fcmped_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1443         break;
1444     case 3:
1445         gen_helper_fcmped_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1446         break;
1447     }
1448 }
1449 
1450 static void gen_op_fcmpeq(int fccno)
1451 {
1452     switch (fccno) {
1453     case 0:
1454         gen_helper_fcmpeq(cpu_fsr, tcg_env);
1455         break;
1456     case 1:
1457         gen_helper_fcmpeq_fcc1(cpu_fsr, tcg_env);
1458         break;
1459     case 2:
1460         gen_helper_fcmpeq_fcc2(cpu_fsr, tcg_env);
1461         break;
1462     case 3:
1463         gen_helper_fcmpeq_fcc3(cpu_fsr, tcg_env);
1464         break;
1465     }
1466 }
1467 
1468 #else
1469 
1470 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1471 {
1472     gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1473 }
1474 
1475 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1476 {
1477     gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1478 }
1479 
1480 static void gen_op_fcmpq(int fccno)
1481 {
1482     gen_helper_fcmpq(cpu_fsr, tcg_env);
1483 }
1484 
1485 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1486 {
1487     gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1488 }
1489 
1490 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1491 {
1492     gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1493 }
1494 
1495 static void gen_op_fcmpeq(int fccno)
1496 {
1497     gen_helper_fcmpeq(cpu_fsr, tcg_env);
1498 }
1499 #endif
1500 
1501 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1502 {
1503     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1504     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1505     gen_exception(dc, TT_FP_EXCP);
1506 }
1507 
1508 static int gen_trap_ifnofpu(DisasContext *dc)
1509 {
1510 #if !defined(CONFIG_USER_ONLY)
1511     if (!dc->fpu_enabled) {
1512         gen_exception(dc, TT_NFPU_INSN);
1513         return 1;
1514     }
1515 #endif
1516     return 0;
1517 }
1518 
1519 static void gen_op_clear_ieee_excp_and_FTT(void)
1520 {
1521     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1522 }
1523 
1524 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1525                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1526 {
1527     TCGv_i32 dst, src;
1528 
1529     src = gen_load_fpr_F(dc, rs);
1530     dst = gen_dest_fpr_F(dc);
1531 
1532     gen(dst, tcg_env, src);
1533     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1534 
1535     gen_store_fpr_F(dc, rd, dst);
1536 }
1537 
1538 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1539                           void (*gen)(TCGv_i32, TCGv_i32))
1540 {
1541     TCGv_i32 dst, src;
1542 
1543     src = gen_load_fpr_F(dc, rs);
1544     dst = gen_dest_fpr_F(dc);
1545 
1546     gen(dst, src);
1547 
1548     gen_store_fpr_F(dc, rd, dst);
1549 }
1550 
1551 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1552                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1553 {
1554     TCGv_i32 dst, src1, src2;
1555 
1556     src1 = gen_load_fpr_F(dc, rs1);
1557     src2 = gen_load_fpr_F(dc, rs2);
1558     dst = gen_dest_fpr_F(dc);
1559 
1560     gen(dst, tcg_env, src1, src2);
1561     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1562 
1563     gen_store_fpr_F(dc, rd, dst);
1564 }
1565 
1566 #ifdef TARGET_SPARC64
1567 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1568                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1569 {
1570     TCGv_i32 dst, src1, src2;
1571 
1572     src1 = gen_load_fpr_F(dc, rs1);
1573     src2 = gen_load_fpr_F(dc, rs2);
1574     dst = gen_dest_fpr_F(dc);
1575 
1576     gen(dst, src1, src2);
1577 
1578     gen_store_fpr_F(dc, rd, dst);
1579 }
1580 #endif
1581 
1582 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1583                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1584 {
1585     TCGv_i64 dst, src;
1586 
1587     src = gen_load_fpr_D(dc, rs);
1588     dst = gen_dest_fpr_D(dc, rd);
1589 
1590     gen(dst, tcg_env, src);
1591     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1592 
1593     gen_store_fpr_D(dc, rd, dst);
1594 }
1595 
1596 #ifdef TARGET_SPARC64
1597 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1598                           void (*gen)(TCGv_i64, TCGv_i64))
1599 {
1600     TCGv_i64 dst, src;
1601 
1602     src = gen_load_fpr_D(dc, rs);
1603     dst = gen_dest_fpr_D(dc, rd);
1604 
1605     gen(dst, src);
1606 
1607     gen_store_fpr_D(dc, rd, dst);
1608 }
1609 #endif
1610 
1611 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1612                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1613 {
1614     TCGv_i64 dst, src1, src2;
1615 
1616     src1 = gen_load_fpr_D(dc, rs1);
1617     src2 = gen_load_fpr_D(dc, rs2);
1618     dst = gen_dest_fpr_D(dc, rd);
1619 
1620     gen(dst, tcg_env, src1, src2);
1621     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1622 
1623     gen_store_fpr_D(dc, rd, dst);
1624 }
1625 
1626 #ifdef TARGET_SPARC64
1627 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1628                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1629 {
1630     TCGv_i64 dst, src1, src2;
1631 
1632     src1 = gen_load_fpr_D(dc, rs1);
1633     src2 = gen_load_fpr_D(dc, rs2);
1634     dst = gen_dest_fpr_D(dc, rd);
1635 
1636     gen(dst, src1, src2);
1637 
1638     gen_store_fpr_D(dc, rd, dst);
1639 }
1640 
1641 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1642                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1643 {
1644     TCGv_i64 dst, src1, src2;
1645 
1646     src1 = gen_load_fpr_D(dc, rs1);
1647     src2 = gen_load_fpr_D(dc, rs2);
1648     dst = gen_dest_fpr_D(dc, rd);
1649 
1650     gen(dst, cpu_gsr, src1, src2);
1651 
1652     gen_store_fpr_D(dc, rd, dst);
1653 }
1654 
1655 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1656                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1657 {
1658     TCGv_i64 dst, src0, src1, src2;
1659 
1660     src1 = gen_load_fpr_D(dc, rs1);
1661     src2 = gen_load_fpr_D(dc, rs2);
1662     src0 = gen_load_fpr_D(dc, rd);
1663     dst = gen_dest_fpr_D(dc, rd);
1664 
1665     gen(dst, src0, src1, src2);
1666 
1667     gen_store_fpr_D(dc, rd, dst);
1668 }
1669 #endif
1670 
1671 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1672                        void (*gen)(TCGv_ptr))
1673 {
1674     gen_op_load_fpr_QT1(QFPREG(rs));
1675 
1676     gen(tcg_env);
1677     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1678 
1679     gen_op_store_QT0_fpr(QFPREG(rd));
1680     gen_update_fprs_dirty(dc, QFPREG(rd));
1681 }
1682 
1683 #ifdef TARGET_SPARC64
1684 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1685                           void (*gen)(TCGv_ptr))
1686 {
1687     gen_op_load_fpr_QT1(QFPREG(rs));
1688 
1689     gen(tcg_env);
1690 
1691     gen_op_store_QT0_fpr(QFPREG(rd));
1692     gen_update_fprs_dirty(dc, QFPREG(rd));
1693 }
1694 #endif
1695 
1696 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1697                         void (*gen)(TCGv_ptr))
1698 {
1699     gen_op_load_fpr_QT0(QFPREG(rs1));
1700     gen_op_load_fpr_QT1(QFPREG(rs2));
1701 
1702     gen(tcg_env);
1703     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1704 
1705     gen_op_store_QT0_fpr(QFPREG(rd));
1706     gen_update_fprs_dirty(dc, QFPREG(rd));
1707 }
1708 
1709 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1710                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1711 {
1712     TCGv_i64 dst;
1713     TCGv_i32 src1, src2;
1714 
1715     src1 = gen_load_fpr_F(dc, rs1);
1716     src2 = gen_load_fpr_F(dc, rs2);
1717     dst = gen_dest_fpr_D(dc, rd);
1718 
1719     gen(dst, tcg_env, src1, src2);
1720     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1721 
1722     gen_store_fpr_D(dc, rd, dst);
1723 }
1724 
1725 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1726                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1727 {
1728     TCGv_i64 src1, src2;
1729 
1730     src1 = gen_load_fpr_D(dc, rs1);
1731     src2 = gen_load_fpr_D(dc, rs2);
1732 
1733     gen(tcg_env, src1, src2);
1734     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1735 
1736     gen_op_store_QT0_fpr(QFPREG(rd));
1737     gen_update_fprs_dirty(dc, QFPREG(rd));
1738 }
1739 
1740 #ifdef TARGET_SPARC64
1741 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1742                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1743 {
1744     TCGv_i64 dst;
1745     TCGv_i32 src;
1746 
1747     src = gen_load_fpr_F(dc, rs);
1748     dst = gen_dest_fpr_D(dc, rd);
1749 
1750     gen(dst, tcg_env, src);
1751     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1752 
1753     gen_store_fpr_D(dc, rd, dst);
1754 }
1755 #endif
1756 
1757 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1758                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1759 {
1760     TCGv_i64 dst;
1761     TCGv_i32 src;
1762 
1763     src = gen_load_fpr_F(dc, rs);
1764     dst = gen_dest_fpr_D(dc, rd);
1765 
1766     gen(dst, tcg_env, src);
1767 
1768     gen_store_fpr_D(dc, rd, dst);
1769 }
1770 
1771 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1772                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1773 {
1774     TCGv_i32 dst;
1775     TCGv_i64 src;
1776 
1777     src = gen_load_fpr_D(dc, rs);
1778     dst = gen_dest_fpr_F(dc);
1779 
1780     gen(dst, tcg_env, src);
1781     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1782 
1783     gen_store_fpr_F(dc, rd, dst);
1784 }
1785 
1786 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1787                        void (*gen)(TCGv_i32, TCGv_ptr))
1788 {
1789     TCGv_i32 dst;
1790 
1791     gen_op_load_fpr_QT1(QFPREG(rs));
1792     dst = gen_dest_fpr_F(dc);
1793 
1794     gen(dst, tcg_env);
1795     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1796 
1797     gen_store_fpr_F(dc, rd, dst);
1798 }
1799 
1800 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1801                        void (*gen)(TCGv_i64, TCGv_ptr))
1802 {
1803     TCGv_i64 dst;
1804 
1805     gen_op_load_fpr_QT1(QFPREG(rs));
1806     dst = gen_dest_fpr_D(dc, rd);
1807 
1808     gen(dst, tcg_env);
1809     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1810 
1811     gen_store_fpr_D(dc, rd, dst);
1812 }
1813 
1814 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1815                           void (*gen)(TCGv_ptr, TCGv_i32))
1816 {
1817     TCGv_i32 src;
1818 
1819     src = gen_load_fpr_F(dc, rs);
1820 
1821     gen(tcg_env, src);
1822 
1823     gen_op_store_QT0_fpr(QFPREG(rd));
1824     gen_update_fprs_dirty(dc, QFPREG(rd));
1825 }
1826 
1827 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1828                           void (*gen)(TCGv_ptr, TCGv_i64))
1829 {
1830     TCGv_i64 src;
1831 
1832     src = gen_load_fpr_D(dc, rs);
1833 
1834     gen(tcg_env, src);
1835 
1836     gen_op_store_QT0_fpr(QFPREG(rd));
1837     gen_update_fprs_dirty(dc, QFPREG(rd));
1838 }
1839 
1840 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1841                      TCGv addr, int mmu_idx, MemOp memop)
1842 {
1843     gen_address_mask(dc, addr);
1844     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1845 }
1846 
1847 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1848 {
1849     TCGv m1 = tcg_constant_tl(0xff);
1850     gen_address_mask(dc, addr);
1851     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1852 }
1853 
1854 /* asi moves */
1855 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1856 typedef enum {
1857     GET_ASI_HELPER,
1858     GET_ASI_EXCP,
1859     GET_ASI_DIRECT,
1860     GET_ASI_DTWINX,
1861     GET_ASI_BLOCK,
1862     GET_ASI_SHORT,
1863     GET_ASI_BCOPY,
1864     GET_ASI_BFILL,
1865 } ASIType;
1866 
1867 typedef struct {
1868     ASIType type;
1869     int asi;
1870     int mem_idx;
1871     MemOp memop;
1872 } DisasASI;
1873 
1874 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1875 {
1876     int asi = GET_FIELD(insn, 19, 26);
1877     ASIType type = GET_ASI_HELPER;
1878     int mem_idx = dc->mem_idx;
1879 
1880 #ifndef TARGET_SPARC64
1881     /* Before v9, all asis are immediate and privileged.  */
1882     if (IS_IMM) {
1883         gen_exception(dc, TT_ILL_INSN);
1884         type = GET_ASI_EXCP;
1885     } else if (supervisor(dc)
1886                /* Note that LEON accepts ASI_USERDATA in user mode, for
1887                   use with CASA.  Also note that previous versions of
1888                   QEMU allowed (and old versions of gcc emitted) ASI_P
1889                   for LEON, which is incorrect.  */
1890                || (asi == ASI_USERDATA
1891                    && (dc->def->features & CPU_FEATURE_CASA))) {
1892         switch (asi) {
1893         case ASI_USERDATA:   /* User data access */
1894             mem_idx = MMU_USER_IDX;
1895             type = GET_ASI_DIRECT;
1896             break;
1897         case ASI_KERNELDATA: /* Supervisor data access */
1898             mem_idx = MMU_KERNEL_IDX;
1899             type = GET_ASI_DIRECT;
1900             break;
1901         case ASI_M_BYPASS:    /* MMU passthrough */
1902         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1903             mem_idx = MMU_PHYS_IDX;
1904             type = GET_ASI_DIRECT;
1905             break;
1906         case ASI_M_BCOPY: /* Block copy, sta access */
1907             mem_idx = MMU_KERNEL_IDX;
1908             type = GET_ASI_BCOPY;
1909             break;
1910         case ASI_M_BFILL: /* Block fill, stda access */
1911             mem_idx = MMU_KERNEL_IDX;
1912             type = GET_ASI_BFILL;
1913             break;
1914         }
1915 
1916         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1917          * permissions check in get_physical_address(..).
1918          */
1919         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1920     } else {
1921         gen_exception(dc, TT_PRIV_INSN);
1922         type = GET_ASI_EXCP;
1923     }
1924 #else
1925     if (IS_IMM) {
1926         asi = dc->asi;
1927     }
1928     /* With v9, all asis below 0x80 are privileged.  */
1929     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1930        down that bit into DisasContext.  For the moment that's ok,
1931        since the direct implementations below doesn't have any ASIs
1932        in the restricted [0x30, 0x7f] range, and the check will be
1933        done properly in the helper.  */
1934     if (!supervisor(dc) && asi < 0x80) {
1935         gen_exception(dc, TT_PRIV_ACT);
1936         type = GET_ASI_EXCP;
1937     } else {
1938         switch (asi) {
1939         case ASI_REAL:      /* Bypass */
1940         case ASI_REAL_IO:   /* Bypass, non-cacheable */
1941         case ASI_REAL_L:    /* Bypass LE */
1942         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
1943         case ASI_TWINX_REAL:   /* Real address, twinx */
1944         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
1945         case ASI_QUAD_LDD_PHYS:
1946         case ASI_QUAD_LDD_PHYS_L:
1947             mem_idx = MMU_PHYS_IDX;
1948             break;
1949         case ASI_N:  /* Nucleus */
1950         case ASI_NL: /* Nucleus LE */
1951         case ASI_TWINX_N:
1952         case ASI_TWINX_NL:
1953         case ASI_NUCLEUS_QUAD_LDD:
1954         case ASI_NUCLEUS_QUAD_LDD_L:
1955             if (hypervisor(dc)) {
1956                 mem_idx = MMU_PHYS_IDX;
1957             } else {
1958                 mem_idx = MMU_NUCLEUS_IDX;
1959             }
1960             break;
1961         case ASI_AIUP:  /* As if user primary */
1962         case ASI_AIUPL: /* As if user primary LE */
1963         case ASI_TWINX_AIUP:
1964         case ASI_TWINX_AIUP_L:
1965         case ASI_BLK_AIUP_4V:
1966         case ASI_BLK_AIUP_L_4V:
1967         case ASI_BLK_AIUP:
1968         case ASI_BLK_AIUPL:
1969             mem_idx = MMU_USER_IDX;
1970             break;
1971         case ASI_AIUS:  /* As if user secondary */
1972         case ASI_AIUSL: /* As if user secondary LE */
1973         case ASI_TWINX_AIUS:
1974         case ASI_TWINX_AIUS_L:
1975         case ASI_BLK_AIUS_4V:
1976         case ASI_BLK_AIUS_L_4V:
1977         case ASI_BLK_AIUS:
1978         case ASI_BLK_AIUSL:
1979             mem_idx = MMU_USER_SECONDARY_IDX;
1980             break;
1981         case ASI_S:  /* Secondary */
1982         case ASI_SL: /* Secondary LE */
1983         case ASI_TWINX_S:
1984         case ASI_TWINX_SL:
1985         case ASI_BLK_COMMIT_S:
1986         case ASI_BLK_S:
1987         case ASI_BLK_SL:
1988         case ASI_FL8_S:
1989         case ASI_FL8_SL:
1990         case ASI_FL16_S:
1991         case ASI_FL16_SL:
1992             if (mem_idx == MMU_USER_IDX) {
1993                 mem_idx = MMU_USER_SECONDARY_IDX;
1994             } else if (mem_idx == MMU_KERNEL_IDX) {
1995                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
1996             }
1997             break;
1998         case ASI_P:  /* Primary */
1999         case ASI_PL: /* Primary LE */
2000         case ASI_TWINX_P:
2001         case ASI_TWINX_PL:
2002         case ASI_BLK_COMMIT_P:
2003         case ASI_BLK_P:
2004         case ASI_BLK_PL:
2005         case ASI_FL8_P:
2006         case ASI_FL8_PL:
2007         case ASI_FL16_P:
2008         case ASI_FL16_PL:
2009             break;
2010         }
2011         switch (asi) {
2012         case ASI_REAL:
2013         case ASI_REAL_IO:
2014         case ASI_REAL_L:
2015         case ASI_REAL_IO_L:
2016         case ASI_N:
2017         case ASI_NL:
2018         case ASI_AIUP:
2019         case ASI_AIUPL:
2020         case ASI_AIUS:
2021         case ASI_AIUSL:
2022         case ASI_S:
2023         case ASI_SL:
2024         case ASI_P:
2025         case ASI_PL:
2026             type = GET_ASI_DIRECT;
2027             break;
2028         case ASI_TWINX_REAL:
2029         case ASI_TWINX_REAL_L:
2030         case ASI_TWINX_N:
2031         case ASI_TWINX_NL:
2032         case ASI_TWINX_AIUP:
2033         case ASI_TWINX_AIUP_L:
2034         case ASI_TWINX_AIUS:
2035         case ASI_TWINX_AIUS_L:
2036         case ASI_TWINX_P:
2037         case ASI_TWINX_PL:
2038         case ASI_TWINX_S:
2039         case ASI_TWINX_SL:
2040         case ASI_QUAD_LDD_PHYS:
2041         case ASI_QUAD_LDD_PHYS_L:
2042         case ASI_NUCLEUS_QUAD_LDD:
2043         case ASI_NUCLEUS_QUAD_LDD_L:
2044             type = GET_ASI_DTWINX;
2045             break;
2046         case ASI_BLK_COMMIT_P:
2047         case ASI_BLK_COMMIT_S:
2048         case ASI_BLK_AIUP_4V:
2049         case ASI_BLK_AIUP_L_4V:
2050         case ASI_BLK_AIUP:
2051         case ASI_BLK_AIUPL:
2052         case ASI_BLK_AIUS_4V:
2053         case ASI_BLK_AIUS_L_4V:
2054         case ASI_BLK_AIUS:
2055         case ASI_BLK_AIUSL:
2056         case ASI_BLK_S:
2057         case ASI_BLK_SL:
2058         case ASI_BLK_P:
2059         case ASI_BLK_PL:
2060             type = GET_ASI_BLOCK;
2061             break;
2062         case ASI_FL8_S:
2063         case ASI_FL8_SL:
2064         case ASI_FL8_P:
2065         case ASI_FL8_PL:
2066             memop = MO_UB;
2067             type = GET_ASI_SHORT;
2068             break;
2069         case ASI_FL16_S:
2070         case ASI_FL16_SL:
2071         case ASI_FL16_P:
2072         case ASI_FL16_PL:
2073             memop = MO_TEUW;
2074             type = GET_ASI_SHORT;
2075             break;
2076         }
2077         /* The little-endian asis all have bit 3 set.  */
2078         if (asi & 8) {
2079             memop ^= MO_BSWAP;
2080         }
2081     }
2082 #endif
2083 
2084     return (DisasASI){ type, asi, mem_idx, memop };
2085 }
2086 
2087 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2088                        int insn, MemOp memop)
2089 {
2090     DisasASI da = get_asi(dc, insn, memop);
2091 
2092     switch (da.type) {
2093     case GET_ASI_EXCP:
2094         break;
2095     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2096         gen_exception(dc, TT_ILL_INSN);
2097         break;
2098     case GET_ASI_DIRECT:
2099         gen_address_mask(dc, addr);
2100         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2101         break;
2102     default:
2103         {
2104             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2105             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2106 
2107             save_state(dc);
2108 #ifdef TARGET_SPARC64
2109             gen_helper_ld_asi(dst, tcg_env, addr, r_asi, r_mop);
2110 #else
2111             {
2112                 TCGv_i64 t64 = tcg_temp_new_i64();
2113                 gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2114                 tcg_gen_trunc_i64_tl(dst, t64);
2115             }
2116 #endif
2117         }
2118         break;
2119     }
2120 }
2121 
2122 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2123                        int insn, MemOp memop)
2124 {
2125     DisasASI da = get_asi(dc, insn, memop);
2126 
2127     switch (da.type) {
2128     case GET_ASI_EXCP:
2129         break;
2130     case GET_ASI_DTWINX: /* Reserved for stda.  */
2131 #ifndef TARGET_SPARC64
2132         gen_exception(dc, TT_ILL_INSN);
2133         break;
2134 #else
2135         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2136             /* Pre OpenSPARC CPUs don't have these */
2137             gen_exception(dc, TT_ILL_INSN);
2138             return;
2139         }
2140         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2141          * are ST_BLKINIT_ ASIs */
2142 #endif
2143         /* fall through */
2144     case GET_ASI_DIRECT:
2145         gen_address_mask(dc, addr);
2146         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2147         break;
2148 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2149     case GET_ASI_BCOPY:
2150         /* Copy 32 bytes from the address in SRC to ADDR.  */
2151         /* ??? The original qemu code suggests 4-byte alignment, dropping
2152            the low bits, but the only place I can see this used is in the
2153            Linux kernel with 32 byte alignment, which would make more sense
2154            as a cacheline-style operation.  */
2155         {
2156             TCGv saddr = tcg_temp_new();
2157             TCGv daddr = tcg_temp_new();
2158             TCGv four = tcg_constant_tl(4);
2159             TCGv_i32 tmp = tcg_temp_new_i32();
2160             int i;
2161 
2162             tcg_gen_andi_tl(saddr, src, -4);
2163             tcg_gen_andi_tl(daddr, addr, -4);
2164             for (i = 0; i < 32; i += 4) {
2165                 /* Since the loads and stores are paired, allow the
2166                    copy to happen in the host endianness.  */
2167                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2168                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2169                 tcg_gen_add_tl(saddr, saddr, four);
2170                 tcg_gen_add_tl(daddr, daddr, four);
2171             }
2172         }
2173         break;
2174 #endif
2175     default:
2176         {
2177             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2178             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2179 
2180             save_state(dc);
2181 #ifdef TARGET_SPARC64
2182             gen_helper_st_asi(tcg_env, addr, src, r_asi, r_mop);
2183 #else
2184             {
2185                 TCGv_i64 t64 = tcg_temp_new_i64();
2186                 tcg_gen_extu_tl_i64(t64, src);
2187                 gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2188             }
2189 #endif
2190 
2191             /* A write to a TLB register may alter page maps.  End the TB. */
2192             dc->npc = DYNAMIC_PC;
2193         }
2194         break;
2195     }
2196 }
2197 
2198 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2199                          TCGv addr, int insn)
2200 {
2201     DisasASI da = get_asi(dc, insn, MO_TEUL);
2202 
2203     switch (da.type) {
2204     case GET_ASI_EXCP:
2205         break;
2206     case GET_ASI_DIRECT:
2207         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2208         break;
2209     default:
2210         /* ??? Should be DAE_invalid_asi.  */
2211         gen_exception(dc, TT_DATA_ACCESS);
2212         break;
2213     }
2214 }
2215 
2216 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2217                         int insn, int rd)
2218 {
2219     DisasASI da = get_asi(dc, insn, MO_TEUL);
2220     TCGv oldv;
2221 
2222     switch (da.type) {
2223     case GET_ASI_EXCP:
2224         return;
2225     case GET_ASI_DIRECT:
2226         oldv = tcg_temp_new();
2227         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2228                                   da.mem_idx, da.memop | MO_ALIGN);
2229         gen_store_gpr(dc, rd, oldv);
2230         break;
2231     default:
2232         /* ??? Should be DAE_invalid_asi.  */
2233         gen_exception(dc, TT_DATA_ACCESS);
2234         break;
2235     }
2236 }
2237 
2238 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2239 {
2240     DisasASI da = get_asi(dc, insn, MO_UB);
2241 
2242     switch (da.type) {
2243     case GET_ASI_EXCP:
2244         break;
2245     case GET_ASI_DIRECT:
2246         gen_ldstub(dc, dst, addr, da.mem_idx);
2247         break;
2248     default:
2249         /* ??? In theory, this should be raise DAE_invalid_asi.
2250            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2251         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2252             gen_helper_exit_atomic(tcg_env);
2253         } else {
2254             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2255             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2256             TCGv_i64 s64, t64;
2257 
2258             save_state(dc);
2259             t64 = tcg_temp_new_i64();
2260             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2261 
2262             s64 = tcg_constant_i64(0xff);
2263             gen_helper_st_asi(tcg_env, addr, s64, r_asi, r_mop);
2264 
2265             tcg_gen_trunc_i64_tl(dst, t64);
2266 
2267             /* End the TB.  */
2268             dc->npc = DYNAMIC_PC;
2269         }
2270         break;
2271     }
2272 }
2273 #endif
2274 
2275 #ifdef TARGET_SPARC64
2276 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2277                         int insn, int size, int rd)
2278 {
2279     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2280     TCGv_i32 d32;
2281     TCGv_i64 d64;
2282 
2283     switch (da.type) {
2284     case GET_ASI_EXCP:
2285         break;
2286 
2287     case GET_ASI_DIRECT:
2288         gen_address_mask(dc, addr);
2289         switch (size) {
2290         case 4:
2291             d32 = gen_dest_fpr_F(dc);
2292             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2293             gen_store_fpr_F(dc, rd, d32);
2294             break;
2295         case 8:
2296             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2297                                 da.memop | MO_ALIGN_4);
2298             break;
2299         case 16:
2300             d64 = tcg_temp_new_i64();
2301             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2302             tcg_gen_addi_tl(addr, addr, 8);
2303             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2304                                 da.memop | MO_ALIGN_4);
2305             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2306             break;
2307         default:
2308             g_assert_not_reached();
2309         }
2310         break;
2311 
2312     case GET_ASI_BLOCK:
2313         /* Valid for lddfa on aligned registers only.  */
2314         if (size == 8 && (rd & 7) == 0) {
2315             MemOp memop;
2316             TCGv eight;
2317             int i;
2318 
2319             gen_address_mask(dc, addr);
2320 
2321             /* The first operation checks required alignment.  */
2322             memop = da.memop | MO_ALIGN_64;
2323             eight = tcg_constant_tl(8);
2324             for (i = 0; ; ++i) {
2325                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2326                                     da.mem_idx, memop);
2327                 if (i == 7) {
2328                     break;
2329                 }
2330                 tcg_gen_add_tl(addr, addr, eight);
2331                 memop = da.memop;
2332             }
2333         } else {
2334             gen_exception(dc, TT_ILL_INSN);
2335         }
2336         break;
2337 
2338     case GET_ASI_SHORT:
2339         /* Valid for lddfa only.  */
2340         if (size == 8) {
2341             gen_address_mask(dc, addr);
2342             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2343                                 da.memop | MO_ALIGN);
2344         } else {
2345             gen_exception(dc, TT_ILL_INSN);
2346         }
2347         break;
2348 
2349     default:
2350         {
2351             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2352             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2353 
2354             save_state(dc);
2355             /* According to the table in the UA2011 manual, the only
2356                other asis that are valid for ldfa/lddfa/ldqfa are
2357                the NO_FAULT asis.  We still need a helper for these,
2358                but we can just use the integer asi helper for them.  */
2359             switch (size) {
2360             case 4:
2361                 d64 = tcg_temp_new_i64();
2362                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2363                 d32 = gen_dest_fpr_F(dc);
2364                 tcg_gen_extrl_i64_i32(d32, d64);
2365                 gen_store_fpr_F(dc, rd, d32);
2366                 break;
2367             case 8:
2368                 gen_helper_ld_asi(cpu_fpr[rd / 2], tcg_env, addr, r_asi, r_mop);
2369                 break;
2370             case 16:
2371                 d64 = tcg_temp_new_i64();
2372                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2373                 tcg_gen_addi_tl(addr, addr, 8);
2374                 gen_helper_ld_asi(cpu_fpr[rd/2+1], tcg_env, addr, r_asi, r_mop);
2375                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2376                 break;
2377             default:
2378                 g_assert_not_reached();
2379             }
2380         }
2381         break;
2382     }
2383 }
2384 
2385 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2386                         int insn, int size, int rd)
2387 {
2388     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2389     TCGv_i32 d32;
2390 
2391     switch (da.type) {
2392     case GET_ASI_EXCP:
2393         break;
2394 
2395     case GET_ASI_DIRECT:
2396         gen_address_mask(dc, addr);
2397         switch (size) {
2398         case 4:
2399             d32 = gen_load_fpr_F(dc, rd);
2400             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2401             break;
2402         case 8:
2403             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2404                                 da.memop | MO_ALIGN_4);
2405             break;
2406         case 16:
2407             /* Only 4-byte alignment required.  However, it is legal for the
2408                cpu to signal the alignment fault, and the OS trap handler is
2409                required to fix it up.  Requiring 16-byte alignment here avoids
2410                having to probe the second page before performing the first
2411                write.  */
2412             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2413                                 da.memop | MO_ALIGN_16);
2414             tcg_gen_addi_tl(addr, addr, 8);
2415             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2416             break;
2417         default:
2418             g_assert_not_reached();
2419         }
2420         break;
2421 
2422     case GET_ASI_BLOCK:
2423         /* Valid for stdfa on aligned registers only.  */
2424         if (size == 8 && (rd & 7) == 0) {
2425             MemOp memop;
2426             TCGv eight;
2427             int i;
2428 
2429             gen_address_mask(dc, addr);
2430 
2431             /* The first operation checks required alignment.  */
2432             memop = da.memop | MO_ALIGN_64;
2433             eight = tcg_constant_tl(8);
2434             for (i = 0; ; ++i) {
2435                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2436                                     da.mem_idx, memop);
2437                 if (i == 7) {
2438                     break;
2439                 }
2440                 tcg_gen_add_tl(addr, addr, eight);
2441                 memop = da.memop;
2442             }
2443         } else {
2444             gen_exception(dc, TT_ILL_INSN);
2445         }
2446         break;
2447 
2448     case GET_ASI_SHORT:
2449         /* Valid for stdfa only.  */
2450         if (size == 8) {
2451             gen_address_mask(dc, addr);
2452             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2453                                 da.memop | MO_ALIGN);
2454         } else {
2455             gen_exception(dc, TT_ILL_INSN);
2456         }
2457         break;
2458 
2459     default:
2460         /* According to the table in the UA2011 manual, the only
2461            other asis that are valid for ldfa/lddfa/ldqfa are
2462            the PST* asis, which aren't currently handled.  */
2463         gen_exception(dc, TT_ILL_INSN);
2464         break;
2465     }
2466 }
2467 
2468 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2469 {
2470     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2471     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2472     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2473 
2474     switch (da.type) {
2475     case GET_ASI_EXCP:
2476         return;
2477 
2478     case GET_ASI_DTWINX:
2479         gen_address_mask(dc, addr);
2480         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2481         tcg_gen_addi_tl(addr, addr, 8);
2482         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2483         break;
2484 
2485     case GET_ASI_DIRECT:
2486         {
2487             TCGv_i64 tmp = tcg_temp_new_i64();
2488 
2489             gen_address_mask(dc, addr);
2490             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2491 
2492             /* Note that LE ldda acts as if each 32-bit register
2493                result is byte swapped.  Having just performed one
2494                64-bit bswap, we need now to swap the writebacks.  */
2495             if ((da.memop & MO_BSWAP) == MO_TE) {
2496                 tcg_gen_extr32_i64(lo, hi, tmp);
2497             } else {
2498                 tcg_gen_extr32_i64(hi, lo, tmp);
2499             }
2500         }
2501         break;
2502 
2503     default:
2504         /* ??? In theory we've handled all of the ASIs that are valid
2505            for ldda, and this should raise DAE_invalid_asi.  However,
2506            real hardware allows others.  This can be seen with e.g.
2507            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2508         {
2509             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2510             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2511             TCGv_i64 tmp = tcg_temp_new_i64();
2512 
2513             save_state(dc);
2514             gen_helper_ld_asi(tmp, tcg_env, addr, r_asi, r_mop);
2515 
2516             /* See above.  */
2517             if ((da.memop & MO_BSWAP) == MO_TE) {
2518                 tcg_gen_extr32_i64(lo, hi, tmp);
2519             } else {
2520                 tcg_gen_extr32_i64(hi, lo, tmp);
2521             }
2522         }
2523         break;
2524     }
2525 
2526     gen_store_gpr(dc, rd, hi);
2527     gen_store_gpr(dc, rd + 1, lo);
2528 }
2529 
2530 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2531                          int insn, int rd)
2532 {
2533     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2534     TCGv lo = gen_load_gpr(dc, rd + 1);
2535 
2536     switch (da.type) {
2537     case GET_ASI_EXCP:
2538         break;
2539 
2540     case GET_ASI_DTWINX:
2541         gen_address_mask(dc, addr);
2542         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2543         tcg_gen_addi_tl(addr, addr, 8);
2544         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2545         break;
2546 
2547     case GET_ASI_DIRECT:
2548         {
2549             TCGv_i64 t64 = tcg_temp_new_i64();
2550 
2551             /* Note that LE stda acts as if each 32-bit register result is
2552                byte swapped.  We will perform one 64-bit LE store, so now
2553                we must swap the order of the construction.  */
2554             if ((da.memop & MO_BSWAP) == MO_TE) {
2555                 tcg_gen_concat32_i64(t64, lo, hi);
2556             } else {
2557                 tcg_gen_concat32_i64(t64, hi, lo);
2558             }
2559             gen_address_mask(dc, addr);
2560             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2561         }
2562         break;
2563 
2564     default:
2565         /* ??? In theory we've handled all of the ASIs that are valid
2566            for stda, and this should raise DAE_invalid_asi.  */
2567         {
2568             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2569             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2570             TCGv_i64 t64 = tcg_temp_new_i64();
2571 
2572             /* See above.  */
2573             if ((da.memop & MO_BSWAP) == MO_TE) {
2574                 tcg_gen_concat32_i64(t64, lo, hi);
2575             } else {
2576                 tcg_gen_concat32_i64(t64, hi, lo);
2577             }
2578 
2579             save_state(dc);
2580             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2581         }
2582         break;
2583     }
2584 }
2585 
2586 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2587                          int insn, int rd)
2588 {
2589     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2590     TCGv oldv;
2591 
2592     switch (da.type) {
2593     case GET_ASI_EXCP:
2594         return;
2595     case GET_ASI_DIRECT:
2596         oldv = tcg_temp_new();
2597         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2598                                   da.mem_idx, da.memop | MO_ALIGN);
2599         gen_store_gpr(dc, rd, oldv);
2600         break;
2601     default:
2602         /* ??? Should be DAE_invalid_asi.  */
2603         gen_exception(dc, TT_DATA_ACCESS);
2604         break;
2605     }
2606 }
2607 
2608 #elif !defined(CONFIG_USER_ONLY)
2609 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2610 {
2611     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2612        whereby "rd + 1" elicits "error: array subscript is above array".
2613        Since we have already asserted that rd is even, the semantics
2614        are unchanged.  */
2615     TCGv lo = gen_dest_gpr(dc, rd | 1);
2616     TCGv hi = gen_dest_gpr(dc, rd);
2617     TCGv_i64 t64 = tcg_temp_new_i64();
2618     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2619 
2620     switch (da.type) {
2621     case GET_ASI_EXCP:
2622         return;
2623     case GET_ASI_DIRECT:
2624         gen_address_mask(dc, addr);
2625         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2626         break;
2627     default:
2628         {
2629             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2630             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2631 
2632             save_state(dc);
2633             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2634         }
2635         break;
2636     }
2637 
2638     tcg_gen_extr_i64_i32(lo, hi, t64);
2639     gen_store_gpr(dc, rd | 1, lo);
2640     gen_store_gpr(dc, rd, hi);
2641 }
2642 
2643 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2644                          int insn, int rd)
2645 {
2646     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2647     TCGv lo = gen_load_gpr(dc, rd + 1);
2648     TCGv_i64 t64 = tcg_temp_new_i64();
2649 
2650     tcg_gen_concat_tl_i64(t64, lo, hi);
2651 
2652     switch (da.type) {
2653     case GET_ASI_EXCP:
2654         break;
2655     case GET_ASI_DIRECT:
2656         gen_address_mask(dc, addr);
2657         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2658         break;
2659     case GET_ASI_BFILL:
2660         /* Store 32 bytes of T64 to ADDR.  */
2661         /* ??? The original qemu code suggests 8-byte alignment, dropping
2662            the low bits, but the only place I can see this used is in the
2663            Linux kernel with 32 byte alignment, which would make more sense
2664            as a cacheline-style operation.  */
2665         {
2666             TCGv d_addr = tcg_temp_new();
2667             TCGv eight = tcg_constant_tl(8);
2668             int i;
2669 
2670             tcg_gen_andi_tl(d_addr, addr, -8);
2671             for (i = 0; i < 32; i += 8) {
2672                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2673                 tcg_gen_add_tl(d_addr, d_addr, eight);
2674             }
2675         }
2676         break;
2677     default:
2678         {
2679             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2680             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2681 
2682             save_state(dc);
2683             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2684         }
2685         break;
2686     }
2687 }
2688 #endif
2689 
2690 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2691 {
2692     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2693     return gen_load_gpr(dc, rs1);
2694 }
2695 
2696 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2697 {
2698     if (IS_IMM) { /* immediate */
2699         target_long simm = GET_FIELDs(insn, 19, 31);
2700         TCGv t = tcg_temp_new();
2701         tcg_gen_movi_tl(t, simm);
2702         return t;
2703     } else {      /* register */
2704         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2705         return gen_load_gpr(dc, rs2);
2706     }
2707 }
2708 
2709 #ifdef TARGET_SPARC64
2710 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2711 {
2712     TCGv_i32 c32, zero, dst, s1, s2;
2713 
2714     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2715        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2716        the later.  */
2717     c32 = tcg_temp_new_i32();
2718     if (cmp->is_bool) {
2719         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2720     } else {
2721         TCGv_i64 c64 = tcg_temp_new_i64();
2722         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2723         tcg_gen_extrl_i64_i32(c32, c64);
2724     }
2725 
2726     s1 = gen_load_fpr_F(dc, rs);
2727     s2 = gen_load_fpr_F(dc, rd);
2728     dst = gen_dest_fpr_F(dc);
2729     zero = tcg_constant_i32(0);
2730 
2731     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2732 
2733     gen_store_fpr_F(dc, rd, dst);
2734 }
2735 
2736 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2737 {
2738     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2739     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2740                         gen_load_fpr_D(dc, rs),
2741                         gen_load_fpr_D(dc, rd));
2742     gen_store_fpr_D(dc, rd, dst);
2743 }
2744 
2745 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2746 {
2747     int qd = QFPREG(rd);
2748     int qs = QFPREG(rs);
2749 
2750     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2751                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2752     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2753                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2754 
2755     gen_update_fprs_dirty(dc, qd);
2756 }
2757 
2758 #ifndef CONFIG_USER_ONLY
2759 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env tcg_env)
2760 {
2761     TCGv_i32 r_tl = tcg_temp_new_i32();
2762 
2763     /* load env->tl into r_tl */
2764     tcg_gen_ld_i32(r_tl, tcg_env, offsetof(CPUSPARCState, tl));
2765 
2766     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2767     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2768 
2769     /* calculate offset to current trap state from env->ts, reuse r_tl */
2770     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2771     tcg_gen_addi_ptr(r_tsptr, tcg_env, offsetof(CPUSPARCState, ts));
2772 
2773     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2774     {
2775         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2776         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2777         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2778     }
2779 }
2780 #endif
2781 
2782 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2783                      int width, bool cc, bool left)
2784 {
2785     TCGv lo1, lo2;
2786     uint64_t amask, tabl, tabr;
2787     int shift, imask, omask;
2788 
2789     if (cc) {
2790         tcg_gen_mov_tl(cpu_cc_src, s1);
2791         tcg_gen_mov_tl(cpu_cc_src2, s2);
2792         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2793         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2794         dc->cc_op = CC_OP_SUB;
2795     }
2796 
2797     /* Theory of operation: there are two tables, left and right (not to
2798        be confused with the left and right versions of the opcode).  These
2799        are indexed by the low 3 bits of the inputs.  To make things "easy",
2800        these tables are loaded into two constants, TABL and TABR below.
2801        The operation index = (input & imask) << shift calculates the index
2802        into the constant, while val = (table >> index) & omask calculates
2803        the value we're looking for.  */
2804     switch (width) {
2805     case 8:
2806         imask = 0x7;
2807         shift = 3;
2808         omask = 0xff;
2809         if (left) {
2810             tabl = 0x80c0e0f0f8fcfeffULL;
2811             tabr = 0xff7f3f1f0f070301ULL;
2812         } else {
2813             tabl = 0x0103070f1f3f7fffULL;
2814             tabr = 0xfffefcf8f0e0c080ULL;
2815         }
2816         break;
2817     case 16:
2818         imask = 0x6;
2819         shift = 1;
2820         omask = 0xf;
2821         if (left) {
2822             tabl = 0x8cef;
2823             tabr = 0xf731;
2824         } else {
2825             tabl = 0x137f;
2826             tabr = 0xfec8;
2827         }
2828         break;
2829     case 32:
2830         imask = 0x4;
2831         shift = 0;
2832         omask = 0x3;
2833         if (left) {
2834             tabl = (2 << 2) | 3;
2835             tabr = (3 << 2) | 1;
2836         } else {
2837             tabl = (1 << 2) | 3;
2838             tabr = (3 << 2) | 2;
2839         }
2840         break;
2841     default:
2842         abort();
2843     }
2844 
2845     lo1 = tcg_temp_new();
2846     lo2 = tcg_temp_new();
2847     tcg_gen_andi_tl(lo1, s1, imask);
2848     tcg_gen_andi_tl(lo2, s2, imask);
2849     tcg_gen_shli_tl(lo1, lo1, shift);
2850     tcg_gen_shli_tl(lo2, lo2, shift);
2851 
2852     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2853     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2854     tcg_gen_andi_tl(lo1, lo1, omask);
2855     tcg_gen_andi_tl(lo2, lo2, omask);
2856 
2857     amask = -8;
2858     if (AM_CHECK(dc)) {
2859         amask &= 0xffffffffULL;
2860     }
2861     tcg_gen_andi_tl(s1, s1, amask);
2862     tcg_gen_andi_tl(s2, s2, amask);
2863 
2864     /* Compute dst = (s1 == s2 ? lo1 : lo1 & lo2). */
2865     tcg_gen_and_tl(lo2, lo2, lo1);
2866     tcg_gen_movcond_tl(TCG_COND_EQ, dst, s1, s2, lo1, lo2);
2867 }
2868 
2869 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2870 {
2871     TCGv tmp = tcg_temp_new();
2872 
2873     tcg_gen_add_tl(tmp, s1, s2);
2874     tcg_gen_andi_tl(dst, tmp, -8);
2875     if (left) {
2876         tcg_gen_neg_tl(tmp, tmp);
2877     }
2878     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2879 }
2880 
2881 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2882 {
2883     TCGv t1, t2, shift;
2884 
2885     t1 = tcg_temp_new();
2886     t2 = tcg_temp_new();
2887     shift = tcg_temp_new();
2888 
2889     tcg_gen_andi_tl(shift, gsr, 7);
2890     tcg_gen_shli_tl(shift, shift, 3);
2891     tcg_gen_shl_tl(t1, s1, shift);
2892 
2893     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2894        shift of (up to 63) followed by a constant shift of 1.  */
2895     tcg_gen_xori_tl(shift, shift, 63);
2896     tcg_gen_shr_tl(t2, s2, shift);
2897     tcg_gen_shri_tl(t2, t2, 1);
2898 
2899     tcg_gen_or_tl(dst, t1, t2);
2900 }
2901 #endif
2902 
2903 /* Include the auto-generated decoder.  */
2904 #include "decode-insns.c.inc"
2905 
2906 #define TRANS(NAME, AVAIL, FUNC, ...) \
2907     static bool trans_##NAME(DisasContext *dc, arg_##NAME *a) \
2908     { return avail_##AVAIL(dc) && FUNC(dc, __VA_ARGS__); }
2909 
2910 #define avail_ALL(C)      true
2911 #ifdef TARGET_SPARC64
2912 # define avail_32(C)      false
2913 # define avail_64(C)      true
2914 #else
2915 # define avail_32(C)      true
2916 # define avail_64(C)      false
2917 #endif
2918 
2919 /* Default case for non jump instructions. */
2920 static bool advance_pc(DisasContext *dc)
2921 {
2922     if (dc->npc & 3) {
2923         switch (dc->npc) {
2924         case DYNAMIC_PC:
2925         case DYNAMIC_PC_LOOKUP:
2926             dc->pc = dc->npc;
2927             gen_op_next_insn();
2928             break;
2929         case JUMP_PC:
2930             /* we can do a static jump */
2931             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
2932             dc->base.is_jmp = DISAS_NORETURN;
2933             break;
2934         default:
2935             g_assert_not_reached();
2936         }
2937     } else {
2938         dc->pc = dc->npc;
2939         dc->npc = dc->npc + 4;
2940     }
2941     return true;
2942 }
2943 
2944 static bool advance_jump_uncond_never(DisasContext *dc, bool annul)
2945 {
2946     if (annul) {
2947         dc->pc = dc->npc + 4;
2948         dc->npc = dc->pc + 4;
2949     } else {
2950         dc->pc = dc->npc;
2951         dc->npc = dc->pc + 4;
2952     }
2953     return true;
2954 }
2955 
2956 static bool advance_jump_uncond_always(DisasContext *dc, bool annul,
2957                                        target_ulong dest)
2958 {
2959     if (annul) {
2960         dc->pc = dest;
2961         dc->npc = dest + 4;
2962     } else {
2963         dc->pc = dc->npc;
2964         dc->npc = dest;
2965         tcg_gen_mov_tl(cpu_pc, cpu_npc);
2966     }
2967     return true;
2968 }
2969 
2970 static bool advance_jump_cond(DisasContext *dc, bool annul, target_ulong dest)
2971 {
2972     if (annul) {
2973         gen_branch_a(dc, dest);
2974     } else {
2975         gen_branch_n(dc, dest);
2976     }
2977     return true;
2978 }
2979 
2980 static bool do_bpcc(DisasContext *dc, arg_bcc *a)
2981 {
2982     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2983 
2984     switch (a->cond) {
2985     case 0x0:
2986         return advance_jump_uncond_never(dc, a->a);
2987     case 0x8:
2988         return advance_jump_uncond_always(dc, a->a, target);
2989     default:
2990         flush_cond(dc);
2991         gen_cond(cpu_cond, a->cc, a->cond, dc);
2992         return advance_jump_cond(dc, a->a, target);
2993     }
2994 }
2995 
2996 TRANS(Bicc, ALL, do_bpcc, a)
2997 TRANS(BPcc,  64, do_bpcc, a)
2998 
2999 static bool do_fbpfcc(DisasContext *dc, arg_bcc *a)
3000 {
3001     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3002 
3003     if (gen_trap_ifnofpu(dc)) {
3004         return true;
3005     }
3006     switch (a->cond) {
3007     case 0x0:
3008         return advance_jump_uncond_never(dc, a->a);
3009     case 0x8:
3010         return advance_jump_uncond_always(dc, a->a, target);
3011     default:
3012         flush_cond(dc);
3013         gen_fcond(cpu_cond, a->cc, a->cond);
3014         return advance_jump_cond(dc, a->a, target);
3015     }
3016 }
3017 
3018 TRANS(FBPfcc,  64, do_fbpfcc, a)
3019 TRANS(FBfcc,  ALL, do_fbpfcc, a)
3020 
3021 static bool trans_BPr(DisasContext *dc, arg_BPr *a)
3022 {
3023     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3024     DisasCompare cmp;
3025 
3026     if (!avail_64(dc)) {
3027         return false;
3028     }
3029     if (gen_tcg_cond_reg[a->cond] == TCG_COND_NEVER) {
3030         return false;
3031     }
3032 
3033     flush_cond(dc);
3034     gen_compare_reg(&cmp, a->cond, gen_load_gpr(dc, a->rs1));
3035     tcg_gen_setcond_tl(cmp.cond, cpu_cond, cmp.c1, cmp.c2);
3036     return advance_jump_cond(dc, a->a, target);
3037 }
3038 
3039 static bool trans_CALL(DisasContext *dc, arg_CALL *a)
3040 {
3041     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3042 
3043     gen_store_gpr(dc, 15, tcg_constant_tl(dc->pc));
3044     gen_mov_pc_npc(dc);
3045     dc->npc = target;
3046     return true;
3047 }
3048 
3049 static bool trans_NCP(DisasContext *dc, arg_NCP *a)
3050 {
3051     /*
3052      * For sparc32, always generate the no-coprocessor exception.
3053      * For sparc64, always generate illegal instruction.
3054      */
3055 #ifdef TARGET_SPARC64
3056     return false;
3057 #else
3058     gen_exception(dc, TT_NCP_INSN);
3059     return true;
3060 #endif
3061 }
3062 
3063 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3064     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3065         goto illegal_insn;
3066 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3067     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3068         goto nfpu_insn;
3069 
3070 /* before an instruction, dc->pc must be static */
3071 static void disas_sparc_legacy(DisasContext *dc, unsigned int insn)
3072 {
3073     unsigned int opc, rs1, rs2, rd;
3074     TCGv cpu_src1, cpu_src2;
3075     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3076     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3077     target_long simm;
3078 
3079     opc = GET_FIELD(insn, 0, 1);
3080     rd = GET_FIELD(insn, 2, 6);
3081 
3082     switch (opc) {
3083     case 0:                     /* branches/sethi */
3084         {
3085             unsigned int xop = GET_FIELD(insn, 7, 9);
3086             switch (xop) {
3087 #ifdef TARGET_SPARC64
3088             case 0x1:           /* V9 BPcc */
3089                 g_assert_not_reached(); /* in decodetree */
3090             case 0x3:           /* V9 BPr */
3091                 g_assert_not_reached(); /* in decodetree */
3092             case 0x5:           /* V9 FBPcc */
3093                 g_assert_not_reached(); /* in decodetree */
3094 #else
3095             case 0x7:           /* CBN+x */
3096                 g_assert_not_reached(); /* in decodetree */
3097 #endif
3098             case 0x2:           /* BN+x */
3099                 g_assert_not_reached(); /* in decodetree */
3100             case 0x6:           /* FBN+x */
3101                 g_assert_not_reached(); /* in decodetree */
3102             case 0x4:           /* SETHI */
3103                 /* Special-case %g0 because that's the canonical nop.  */
3104                 if (rd) {
3105                     uint32_t value = GET_FIELD(insn, 10, 31);
3106                     TCGv t = gen_dest_gpr(dc, rd);
3107                     tcg_gen_movi_tl(t, value << 10);
3108                     gen_store_gpr(dc, rd, t);
3109                 }
3110                 break;
3111             case 0x0:           /* UNIMPL */
3112             default:
3113                 goto illegal_insn;
3114             }
3115             break;
3116         }
3117         break;
3118     case 1:
3119         g_assert_not_reached(); /* in decodetree */
3120     case 2:                     /* FPU & Logical Operations */
3121         {
3122             unsigned int xop = GET_FIELD(insn, 7, 12);
3123             TCGv cpu_dst = tcg_temp_new();
3124             TCGv cpu_tmp0;
3125 
3126             if (xop == 0x3a) {  /* generate trap */
3127                 int cond = GET_FIELD(insn, 3, 6);
3128                 TCGv_i32 trap;
3129                 TCGLabel *l1 = NULL;
3130                 int mask;
3131 
3132                 if (cond == 0) {
3133                     /* Trap never.  */
3134                     break;
3135                 }
3136 
3137                 save_state(dc);
3138 
3139                 if (cond != 8) {
3140                     /* Conditional trap.  */
3141                     DisasCompare cmp;
3142 #ifdef TARGET_SPARC64
3143                     /* V9 icc/xcc */
3144                     int cc = GET_FIELD_SP(insn, 11, 12);
3145                     if (cc == 0) {
3146                         gen_compare(&cmp, 0, cond, dc);
3147                     } else if (cc == 2) {
3148                         gen_compare(&cmp, 1, cond, dc);
3149                     } else {
3150                         goto illegal_insn;
3151                     }
3152 #else
3153                     gen_compare(&cmp, 0, cond, dc);
3154 #endif
3155                     l1 = gen_new_label();
3156                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3157                                       cmp.c1, cmp.c2, l1);
3158                 }
3159 
3160                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3161                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3162 
3163                 /* Don't use the normal temporaries, as they may well have
3164                    gone out of scope with the branch above.  While we're
3165                    doing that we might as well pre-truncate to 32-bit.  */
3166                 trap = tcg_temp_new_i32();
3167 
3168                 rs1 = GET_FIELD_SP(insn, 14, 18);
3169                 if (IS_IMM) {
3170                     rs2 = GET_FIELD_SP(insn, 0, 7);
3171                     if (rs1 == 0) {
3172                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3173                         /* Signal that the trap value is fully constant.  */
3174                         mask = 0;
3175                     } else {
3176                         TCGv t1 = gen_load_gpr(dc, rs1);
3177                         tcg_gen_trunc_tl_i32(trap, t1);
3178                         tcg_gen_addi_i32(trap, trap, rs2);
3179                     }
3180                 } else {
3181                     TCGv t1, t2;
3182                     rs2 = GET_FIELD_SP(insn, 0, 4);
3183                     t1 = gen_load_gpr(dc, rs1);
3184                     t2 = gen_load_gpr(dc, rs2);
3185                     tcg_gen_add_tl(t1, t1, t2);
3186                     tcg_gen_trunc_tl_i32(trap, t1);
3187                 }
3188                 if (mask != 0) {
3189                     tcg_gen_andi_i32(trap, trap, mask);
3190                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3191                 }
3192 
3193                 gen_helper_raise_exception(tcg_env, trap);
3194 
3195                 if (cond == 8) {
3196                     /* An unconditional trap ends the TB.  */
3197                     dc->base.is_jmp = DISAS_NORETURN;
3198                     goto jmp_insn;
3199                 } else {
3200                     /* A conditional trap falls through to the next insn.  */
3201                     gen_set_label(l1);
3202                     break;
3203                 }
3204             } else if (xop == 0x28) {
3205                 rs1 = GET_FIELD(insn, 13, 17);
3206                 switch(rs1) {
3207                 case 0: /* rdy */
3208 #ifndef TARGET_SPARC64
3209                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3210                                        manual, rdy on the microSPARC
3211                                        II */
3212                 case 0x0f:          /* stbar in the SPARCv8 manual,
3213                                        rdy on the microSPARC II */
3214                 case 0x10 ... 0x1f: /* implementation-dependent in the
3215                                        SPARCv8 manual, rdy on the
3216                                        microSPARC II */
3217                     /* Read Asr17 */
3218                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3219                         TCGv t = gen_dest_gpr(dc, rd);
3220                         /* Read Asr17 for a Leon3 monoprocessor */
3221                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3222                         gen_store_gpr(dc, rd, t);
3223                         break;
3224                     }
3225 #endif
3226                     gen_store_gpr(dc, rd, cpu_y);
3227                     break;
3228 #ifdef TARGET_SPARC64
3229                 case 0x2: /* V9 rdccr */
3230                     update_psr(dc);
3231                     gen_helper_rdccr(cpu_dst, tcg_env);
3232                     gen_store_gpr(dc, rd, cpu_dst);
3233                     break;
3234                 case 0x3: /* V9 rdasi */
3235                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3236                     gen_store_gpr(dc, rd, cpu_dst);
3237                     break;
3238                 case 0x4: /* V9 rdtick */
3239                     {
3240                         TCGv_ptr r_tickptr;
3241                         TCGv_i32 r_const;
3242 
3243                         r_tickptr = tcg_temp_new_ptr();
3244                         r_const = tcg_constant_i32(dc->mem_idx);
3245                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3246                                        offsetof(CPUSPARCState, tick));
3247                         if (translator_io_start(&dc->base)) {
3248                             dc->base.is_jmp = DISAS_EXIT;
3249                         }
3250                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3251                                                   r_const);
3252                         gen_store_gpr(dc, rd, cpu_dst);
3253                     }
3254                     break;
3255                 case 0x5: /* V9 rdpc */
3256                     {
3257                         TCGv t = gen_dest_gpr(dc, rd);
3258                         if (unlikely(AM_CHECK(dc))) {
3259                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3260                         } else {
3261                             tcg_gen_movi_tl(t, dc->pc);
3262                         }
3263                         gen_store_gpr(dc, rd, t);
3264                     }
3265                     break;
3266                 case 0x6: /* V9 rdfprs */
3267                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3268                     gen_store_gpr(dc, rd, cpu_dst);
3269                     break;
3270                 case 0xf: /* V9 membar */
3271                     break; /* no effect */
3272                 case 0x13: /* Graphics Status */
3273                     if (gen_trap_ifnofpu(dc)) {
3274                         goto jmp_insn;
3275                     }
3276                     gen_store_gpr(dc, rd, cpu_gsr);
3277                     break;
3278                 case 0x16: /* Softint */
3279                     tcg_gen_ld32s_tl(cpu_dst, tcg_env,
3280                                      offsetof(CPUSPARCState, softint));
3281                     gen_store_gpr(dc, rd, cpu_dst);
3282                     break;
3283                 case 0x17: /* Tick compare */
3284                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3285                     break;
3286                 case 0x18: /* System tick */
3287                     {
3288                         TCGv_ptr r_tickptr;
3289                         TCGv_i32 r_const;
3290 
3291                         r_tickptr = tcg_temp_new_ptr();
3292                         r_const = tcg_constant_i32(dc->mem_idx);
3293                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3294                                        offsetof(CPUSPARCState, stick));
3295                         if (translator_io_start(&dc->base)) {
3296                             dc->base.is_jmp = DISAS_EXIT;
3297                         }
3298                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3299                                                   r_const);
3300                         gen_store_gpr(dc, rd, cpu_dst);
3301                     }
3302                     break;
3303                 case 0x19: /* System tick compare */
3304                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3305                     break;
3306                 case 0x1a: /* UltraSPARC-T1 Strand status */
3307                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3308                      * this ASR as impl. dep
3309                      */
3310                     CHECK_IU_FEATURE(dc, HYPV);
3311                     {
3312                         TCGv t = gen_dest_gpr(dc, rd);
3313                         tcg_gen_movi_tl(t, 1UL);
3314                         gen_store_gpr(dc, rd, t);
3315                     }
3316                     break;
3317                 case 0x10: /* Performance Control */
3318                 case 0x11: /* Performance Instrumentation Counter */
3319                 case 0x12: /* Dispatch Control */
3320                 case 0x14: /* Softint set, WO */
3321                 case 0x15: /* Softint clear, WO */
3322 #endif
3323                 default:
3324                     goto illegal_insn;
3325                 }
3326 #if !defined(CONFIG_USER_ONLY)
3327             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3328 #ifndef TARGET_SPARC64
3329                 if (!supervisor(dc)) {
3330                     goto priv_insn;
3331                 }
3332                 update_psr(dc);
3333                 gen_helper_rdpsr(cpu_dst, tcg_env);
3334 #else
3335                 CHECK_IU_FEATURE(dc, HYPV);
3336                 if (!hypervisor(dc))
3337                     goto priv_insn;
3338                 rs1 = GET_FIELD(insn, 13, 17);
3339                 switch (rs1) {
3340                 case 0: // hpstate
3341                     tcg_gen_ld_i64(cpu_dst, tcg_env,
3342                                    offsetof(CPUSPARCState, hpstate));
3343                     break;
3344                 case 1: // htstate
3345                     // gen_op_rdhtstate();
3346                     break;
3347                 case 3: // hintp
3348                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3349                     break;
3350                 case 5: // htba
3351                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3352                     break;
3353                 case 6: // hver
3354                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3355                     break;
3356                 case 31: // hstick_cmpr
3357                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3358                     break;
3359                 default:
3360                     goto illegal_insn;
3361                 }
3362 #endif
3363                 gen_store_gpr(dc, rd, cpu_dst);
3364                 break;
3365             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3366                 if (!supervisor(dc)) {
3367                     goto priv_insn;
3368                 }
3369                 cpu_tmp0 = tcg_temp_new();
3370 #ifdef TARGET_SPARC64
3371                 rs1 = GET_FIELD(insn, 13, 17);
3372                 switch (rs1) {
3373                 case 0: // tpc
3374                     {
3375                         TCGv_ptr r_tsptr;
3376 
3377                         r_tsptr = tcg_temp_new_ptr();
3378                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3379                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3380                                       offsetof(trap_state, tpc));
3381                     }
3382                     break;
3383                 case 1: // tnpc
3384                     {
3385                         TCGv_ptr r_tsptr;
3386 
3387                         r_tsptr = tcg_temp_new_ptr();
3388                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3389                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3390                                       offsetof(trap_state, tnpc));
3391                     }
3392                     break;
3393                 case 2: // tstate
3394                     {
3395                         TCGv_ptr r_tsptr;
3396 
3397                         r_tsptr = tcg_temp_new_ptr();
3398                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3399                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3400                                       offsetof(trap_state, tstate));
3401                     }
3402                     break;
3403                 case 3: // tt
3404                     {
3405                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3406 
3407                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3408                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3409                                          offsetof(trap_state, tt));
3410                     }
3411                     break;
3412                 case 4: // tick
3413                     {
3414                         TCGv_ptr r_tickptr;
3415                         TCGv_i32 r_const;
3416 
3417                         r_tickptr = tcg_temp_new_ptr();
3418                         r_const = tcg_constant_i32(dc->mem_idx);
3419                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3420                                        offsetof(CPUSPARCState, tick));
3421                         if (translator_io_start(&dc->base)) {
3422                             dc->base.is_jmp = DISAS_EXIT;
3423                         }
3424                         gen_helper_tick_get_count(cpu_tmp0, tcg_env,
3425                                                   r_tickptr, r_const);
3426                     }
3427                     break;
3428                 case 5: // tba
3429                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3430                     break;
3431                 case 6: // pstate
3432                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3433                                      offsetof(CPUSPARCState, pstate));
3434                     break;
3435                 case 7: // tl
3436                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3437                                      offsetof(CPUSPARCState, tl));
3438                     break;
3439                 case 8: // pil
3440                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3441                                      offsetof(CPUSPARCState, psrpil));
3442                     break;
3443                 case 9: // cwp
3444                     gen_helper_rdcwp(cpu_tmp0, tcg_env);
3445                     break;
3446                 case 10: // cansave
3447                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3448                                      offsetof(CPUSPARCState, cansave));
3449                     break;
3450                 case 11: // canrestore
3451                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3452                                      offsetof(CPUSPARCState, canrestore));
3453                     break;
3454                 case 12: // cleanwin
3455                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3456                                      offsetof(CPUSPARCState, cleanwin));
3457                     break;
3458                 case 13: // otherwin
3459                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3460                                      offsetof(CPUSPARCState, otherwin));
3461                     break;
3462                 case 14: // wstate
3463                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3464                                      offsetof(CPUSPARCState, wstate));
3465                     break;
3466                 case 16: // UA2005 gl
3467                     CHECK_IU_FEATURE(dc, GL);
3468                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3469                                      offsetof(CPUSPARCState, gl));
3470                     break;
3471                 case 26: // UA2005 strand status
3472                     CHECK_IU_FEATURE(dc, HYPV);
3473                     if (!hypervisor(dc))
3474                         goto priv_insn;
3475                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3476                     break;
3477                 case 31: // ver
3478                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3479                     break;
3480                 case 15: // fq
3481                 default:
3482                     goto illegal_insn;
3483                 }
3484 #else
3485                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3486 #endif
3487                 gen_store_gpr(dc, rd, cpu_tmp0);
3488                 break;
3489 #endif
3490 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3491             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3492 #ifdef TARGET_SPARC64
3493                 gen_helper_flushw(tcg_env);
3494 #else
3495                 if (!supervisor(dc))
3496                     goto priv_insn;
3497                 gen_store_gpr(dc, rd, cpu_tbr);
3498 #endif
3499                 break;
3500 #endif
3501             } else if (xop == 0x34) {   /* FPU Operations */
3502                 if (gen_trap_ifnofpu(dc)) {
3503                     goto jmp_insn;
3504                 }
3505                 gen_op_clear_ieee_excp_and_FTT();
3506                 rs1 = GET_FIELD(insn, 13, 17);
3507                 rs2 = GET_FIELD(insn, 27, 31);
3508                 xop = GET_FIELD(insn, 18, 26);
3509 
3510                 switch (xop) {
3511                 case 0x1: /* fmovs */
3512                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3513                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3514                     break;
3515                 case 0x5: /* fnegs */
3516                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3517                     break;
3518                 case 0x9: /* fabss */
3519                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3520                     break;
3521                 case 0x29: /* fsqrts */
3522                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3523                     break;
3524                 case 0x2a: /* fsqrtd */
3525                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3526                     break;
3527                 case 0x2b: /* fsqrtq */
3528                     CHECK_FPU_FEATURE(dc, FLOAT128);
3529                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3530                     break;
3531                 case 0x41: /* fadds */
3532                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3533                     break;
3534                 case 0x42: /* faddd */
3535                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3536                     break;
3537                 case 0x43: /* faddq */
3538                     CHECK_FPU_FEATURE(dc, FLOAT128);
3539                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3540                     break;
3541                 case 0x45: /* fsubs */
3542                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3543                     break;
3544                 case 0x46: /* fsubd */
3545                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3546                     break;
3547                 case 0x47: /* fsubq */
3548                     CHECK_FPU_FEATURE(dc, FLOAT128);
3549                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3550                     break;
3551                 case 0x49: /* fmuls */
3552                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3553                     break;
3554                 case 0x4a: /* fmuld */
3555                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3556                     break;
3557                 case 0x4b: /* fmulq */
3558                     CHECK_FPU_FEATURE(dc, FLOAT128);
3559                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3560                     break;
3561                 case 0x4d: /* fdivs */
3562                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3563                     break;
3564                 case 0x4e: /* fdivd */
3565                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3566                     break;
3567                 case 0x4f: /* fdivq */
3568                     CHECK_FPU_FEATURE(dc, FLOAT128);
3569                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3570                     break;
3571                 case 0x69: /* fsmuld */
3572                     CHECK_FPU_FEATURE(dc, FSMULD);
3573                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3574                     break;
3575                 case 0x6e: /* fdmulq */
3576                     CHECK_FPU_FEATURE(dc, FLOAT128);
3577                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3578                     break;
3579                 case 0xc4: /* fitos */
3580                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3581                     break;
3582                 case 0xc6: /* fdtos */
3583                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3584                     break;
3585                 case 0xc7: /* fqtos */
3586                     CHECK_FPU_FEATURE(dc, FLOAT128);
3587                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3588                     break;
3589                 case 0xc8: /* fitod */
3590                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3591                     break;
3592                 case 0xc9: /* fstod */
3593                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3594                     break;
3595                 case 0xcb: /* fqtod */
3596                     CHECK_FPU_FEATURE(dc, FLOAT128);
3597                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3598                     break;
3599                 case 0xcc: /* fitoq */
3600                     CHECK_FPU_FEATURE(dc, FLOAT128);
3601                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3602                     break;
3603                 case 0xcd: /* fstoq */
3604                     CHECK_FPU_FEATURE(dc, FLOAT128);
3605                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3606                     break;
3607                 case 0xce: /* fdtoq */
3608                     CHECK_FPU_FEATURE(dc, FLOAT128);
3609                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3610                     break;
3611                 case 0xd1: /* fstoi */
3612                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3613                     break;
3614                 case 0xd2: /* fdtoi */
3615                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3616                     break;
3617                 case 0xd3: /* fqtoi */
3618                     CHECK_FPU_FEATURE(dc, FLOAT128);
3619                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3620                     break;
3621 #ifdef TARGET_SPARC64
3622                 case 0x2: /* V9 fmovd */
3623                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3624                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3625                     break;
3626                 case 0x3: /* V9 fmovq */
3627                     CHECK_FPU_FEATURE(dc, FLOAT128);
3628                     gen_move_Q(dc, rd, rs2);
3629                     break;
3630                 case 0x6: /* V9 fnegd */
3631                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3632                     break;
3633                 case 0x7: /* V9 fnegq */
3634                     CHECK_FPU_FEATURE(dc, FLOAT128);
3635                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3636                     break;
3637                 case 0xa: /* V9 fabsd */
3638                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3639                     break;
3640                 case 0xb: /* V9 fabsq */
3641                     CHECK_FPU_FEATURE(dc, FLOAT128);
3642                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3643                     break;
3644                 case 0x81: /* V9 fstox */
3645                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3646                     break;
3647                 case 0x82: /* V9 fdtox */
3648                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3649                     break;
3650                 case 0x83: /* V9 fqtox */
3651                     CHECK_FPU_FEATURE(dc, FLOAT128);
3652                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3653                     break;
3654                 case 0x84: /* V9 fxtos */
3655                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3656                     break;
3657                 case 0x88: /* V9 fxtod */
3658                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3659                     break;
3660                 case 0x8c: /* V9 fxtoq */
3661                     CHECK_FPU_FEATURE(dc, FLOAT128);
3662                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3663                     break;
3664 #endif
3665                 default:
3666                     goto illegal_insn;
3667                 }
3668             } else if (xop == 0x35) {   /* FPU Operations */
3669 #ifdef TARGET_SPARC64
3670                 int cond;
3671 #endif
3672                 if (gen_trap_ifnofpu(dc)) {
3673                     goto jmp_insn;
3674                 }
3675                 gen_op_clear_ieee_excp_and_FTT();
3676                 rs1 = GET_FIELD(insn, 13, 17);
3677                 rs2 = GET_FIELD(insn, 27, 31);
3678                 xop = GET_FIELD(insn, 18, 26);
3679 
3680 #ifdef TARGET_SPARC64
3681 #define FMOVR(sz)                                                  \
3682                 do {                                               \
3683                     DisasCompare cmp;                              \
3684                     cond = GET_FIELD_SP(insn, 10, 12);             \
3685                     cpu_src1 = get_src1(dc, insn);                 \
3686                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3687                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3688                 } while (0)
3689 
3690                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3691                     FMOVR(s);
3692                     break;
3693                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3694                     FMOVR(d);
3695                     break;
3696                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3697                     CHECK_FPU_FEATURE(dc, FLOAT128);
3698                     FMOVR(q);
3699                     break;
3700                 }
3701 #undef FMOVR
3702 #endif
3703                 switch (xop) {
3704 #ifdef TARGET_SPARC64
3705 #define FMOVCC(fcc, sz)                                                 \
3706                     do {                                                \
3707                         DisasCompare cmp;                               \
3708                         cond = GET_FIELD_SP(insn, 14, 17);              \
3709                         gen_fcompare(&cmp, fcc, cond);                  \
3710                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3711                     } while (0)
3712 
3713                     case 0x001: /* V9 fmovscc %fcc0 */
3714                         FMOVCC(0, s);
3715                         break;
3716                     case 0x002: /* V9 fmovdcc %fcc0 */
3717                         FMOVCC(0, d);
3718                         break;
3719                     case 0x003: /* V9 fmovqcc %fcc0 */
3720                         CHECK_FPU_FEATURE(dc, FLOAT128);
3721                         FMOVCC(0, q);
3722                         break;
3723                     case 0x041: /* V9 fmovscc %fcc1 */
3724                         FMOVCC(1, s);
3725                         break;
3726                     case 0x042: /* V9 fmovdcc %fcc1 */
3727                         FMOVCC(1, d);
3728                         break;
3729                     case 0x043: /* V9 fmovqcc %fcc1 */
3730                         CHECK_FPU_FEATURE(dc, FLOAT128);
3731                         FMOVCC(1, q);
3732                         break;
3733                     case 0x081: /* V9 fmovscc %fcc2 */
3734                         FMOVCC(2, s);
3735                         break;
3736                     case 0x082: /* V9 fmovdcc %fcc2 */
3737                         FMOVCC(2, d);
3738                         break;
3739                     case 0x083: /* V9 fmovqcc %fcc2 */
3740                         CHECK_FPU_FEATURE(dc, FLOAT128);
3741                         FMOVCC(2, q);
3742                         break;
3743                     case 0x0c1: /* V9 fmovscc %fcc3 */
3744                         FMOVCC(3, s);
3745                         break;
3746                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3747                         FMOVCC(3, d);
3748                         break;
3749                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3750                         CHECK_FPU_FEATURE(dc, FLOAT128);
3751                         FMOVCC(3, q);
3752                         break;
3753 #undef FMOVCC
3754 #define FMOVCC(xcc, sz)                                                 \
3755                     do {                                                \
3756                         DisasCompare cmp;                               \
3757                         cond = GET_FIELD_SP(insn, 14, 17);              \
3758                         gen_compare(&cmp, xcc, cond, dc);               \
3759                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3760                     } while (0)
3761 
3762                     case 0x101: /* V9 fmovscc %icc */
3763                         FMOVCC(0, s);
3764                         break;
3765                     case 0x102: /* V9 fmovdcc %icc */
3766                         FMOVCC(0, d);
3767                         break;
3768                     case 0x103: /* V9 fmovqcc %icc */
3769                         CHECK_FPU_FEATURE(dc, FLOAT128);
3770                         FMOVCC(0, q);
3771                         break;
3772                     case 0x181: /* V9 fmovscc %xcc */
3773                         FMOVCC(1, s);
3774                         break;
3775                     case 0x182: /* V9 fmovdcc %xcc */
3776                         FMOVCC(1, d);
3777                         break;
3778                     case 0x183: /* V9 fmovqcc %xcc */
3779                         CHECK_FPU_FEATURE(dc, FLOAT128);
3780                         FMOVCC(1, q);
3781                         break;
3782 #undef FMOVCC
3783 #endif
3784                     case 0x51: /* fcmps, V9 %fcc */
3785                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3786                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3787                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3788                         break;
3789                     case 0x52: /* fcmpd, V9 %fcc */
3790                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3791                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3792                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3793                         break;
3794                     case 0x53: /* fcmpq, V9 %fcc */
3795                         CHECK_FPU_FEATURE(dc, FLOAT128);
3796                         gen_op_load_fpr_QT0(QFPREG(rs1));
3797                         gen_op_load_fpr_QT1(QFPREG(rs2));
3798                         gen_op_fcmpq(rd & 3);
3799                         break;
3800                     case 0x55: /* fcmpes, V9 %fcc */
3801                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3802                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3803                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3804                         break;
3805                     case 0x56: /* fcmped, V9 %fcc */
3806                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3807                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3808                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3809                         break;
3810                     case 0x57: /* fcmpeq, V9 %fcc */
3811                         CHECK_FPU_FEATURE(dc, FLOAT128);
3812                         gen_op_load_fpr_QT0(QFPREG(rs1));
3813                         gen_op_load_fpr_QT1(QFPREG(rs2));
3814                         gen_op_fcmpeq(rd & 3);
3815                         break;
3816                     default:
3817                         goto illegal_insn;
3818                 }
3819             } else if (xop == 0x2) {
3820                 TCGv dst = gen_dest_gpr(dc, rd);
3821                 rs1 = GET_FIELD(insn, 13, 17);
3822                 if (rs1 == 0) {
3823                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3824                     if (IS_IMM) {       /* immediate */
3825                         simm = GET_FIELDs(insn, 19, 31);
3826                         tcg_gen_movi_tl(dst, simm);
3827                         gen_store_gpr(dc, rd, dst);
3828                     } else {            /* register */
3829                         rs2 = GET_FIELD(insn, 27, 31);
3830                         if (rs2 == 0) {
3831                             tcg_gen_movi_tl(dst, 0);
3832                             gen_store_gpr(dc, rd, dst);
3833                         } else {
3834                             cpu_src2 = gen_load_gpr(dc, rs2);
3835                             gen_store_gpr(dc, rd, cpu_src2);
3836                         }
3837                     }
3838                 } else {
3839                     cpu_src1 = get_src1(dc, insn);
3840                     if (IS_IMM) {       /* immediate */
3841                         simm = GET_FIELDs(insn, 19, 31);
3842                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3843                         gen_store_gpr(dc, rd, dst);
3844                     } else {            /* register */
3845                         rs2 = GET_FIELD(insn, 27, 31);
3846                         if (rs2 == 0) {
3847                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3848                             gen_store_gpr(dc, rd, cpu_src1);
3849                         } else {
3850                             cpu_src2 = gen_load_gpr(dc, rs2);
3851                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3852                             gen_store_gpr(dc, rd, dst);
3853                         }
3854                     }
3855                 }
3856 #ifdef TARGET_SPARC64
3857             } else if (xop == 0x25) { /* sll, V9 sllx */
3858                 cpu_src1 = get_src1(dc, insn);
3859                 if (IS_IMM) {   /* immediate */
3860                     simm = GET_FIELDs(insn, 20, 31);
3861                     if (insn & (1 << 12)) {
3862                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3863                     } else {
3864                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3865                     }
3866                 } else {                /* register */
3867                     rs2 = GET_FIELD(insn, 27, 31);
3868                     cpu_src2 = gen_load_gpr(dc, rs2);
3869                     cpu_tmp0 = tcg_temp_new();
3870                     if (insn & (1 << 12)) {
3871                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3872                     } else {
3873                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3874                     }
3875                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3876                 }
3877                 gen_store_gpr(dc, rd, cpu_dst);
3878             } else if (xop == 0x26) { /* srl, V9 srlx */
3879                 cpu_src1 = get_src1(dc, insn);
3880                 if (IS_IMM) {   /* immediate */
3881                     simm = GET_FIELDs(insn, 20, 31);
3882                     if (insn & (1 << 12)) {
3883                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3884                     } else {
3885                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3886                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3887                     }
3888                 } else {                /* register */
3889                     rs2 = GET_FIELD(insn, 27, 31);
3890                     cpu_src2 = gen_load_gpr(dc, rs2);
3891                     cpu_tmp0 = tcg_temp_new();
3892                     if (insn & (1 << 12)) {
3893                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3894                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3895                     } else {
3896                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3897                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3898                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3899                     }
3900                 }
3901                 gen_store_gpr(dc, rd, cpu_dst);
3902             } else if (xop == 0x27) { /* sra, V9 srax */
3903                 cpu_src1 = get_src1(dc, insn);
3904                 if (IS_IMM) {   /* immediate */
3905                     simm = GET_FIELDs(insn, 20, 31);
3906                     if (insn & (1 << 12)) {
3907                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3908                     } else {
3909                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3910                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3911                     }
3912                 } else {                /* register */
3913                     rs2 = GET_FIELD(insn, 27, 31);
3914                     cpu_src2 = gen_load_gpr(dc, rs2);
3915                     cpu_tmp0 = tcg_temp_new();
3916                     if (insn & (1 << 12)) {
3917                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3918                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3919                     } else {
3920                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3921                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3922                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3923                     }
3924                 }
3925                 gen_store_gpr(dc, rd, cpu_dst);
3926 #endif
3927             } else if (xop < 0x36) {
3928                 if (xop < 0x20) {
3929                     cpu_src1 = get_src1(dc, insn);
3930                     cpu_src2 = get_src2(dc, insn);
3931                     switch (xop & ~0x10) {
3932                     case 0x0: /* add */
3933                         if (xop & 0x10) {
3934                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3935                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3936                             dc->cc_op = CC_OP_ADD;
3937                         } else {
3938                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3939                         }
3940                         break;
3941                     case 0x1: /* and */
3942                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3943                         if (xop & 0x10) {
3944                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3945                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3946                             dc->cc_op = CC_OP_LOGIC;
3947                         }
3948                         break;
3949                     case 0x2: /* or */
3950                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3951                         if (xop & 0x10) {
3952                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3953                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3954                             dc->cc_op = CC_OP_LOGIC;
3955                         }
3956                         break;
3957                     case 0x3: /* xor */
3958                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3959                         if (xop & 0x10) {
3960                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3961                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3962                             dc->cc_op = CC_OP_LOGIC;
3963                         }
3964                         break;
3965                     case 0x4: /* sub */
3966                         if (xop & 0x10) {
3967                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3968                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3969                             dc->cc_op = CC_OP_SUB;
3970                         } else {
3971                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3972                         }
3973                         break;
3974                     case 0x5: /* andn */
3975                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3976                         if (xop & 0x10) {
3977                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3978                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3979                             dc->cc_op = CC_OP_LOGIC;
3980                         }
3981                         break;
3982                     case 0x6: /* orn */
3983                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3984                         if (xop & 0x10) {
3985                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3986                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3987                             dc->cc_op = CC_OP_LOGIC;
3988                         }
3989                         break;
3990                     case 0x7: /* xorn */
3991                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3992                         if (xop & 0x10) {
3993                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3994                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3995                             dc->cc_op = CC_OP_LOGIC;
3996                         }
3997                         break;
3998                     case 0x8: /* addx, V9 addc */
3999                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4000                                         (xop & 0x10));
4001                         break;
4002 #ifdef TARGET_SPARC64
4003                     case 0x9: /* V9 mulx */
4004                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4005                         break;
4006 #endif
4007                     case 0xa: /* umul */
4008                         CHECK_IU_FEATURE(dc, MUL);
4009                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4010                         if (xop & 0x10) {
4011                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4012                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4013                             dc->cc_op = CC_OP_LOGIC;
4014                         }
4015                         break;
4016                     case 0xb: /* smul */
4017                         CHECK_IU_FEATURE(dc, MUL);
4018                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4019                         if (xop & 0x10) {
4020                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4021                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4022                             dc->cc_op = CC_OP_LOGIC;
4023                         }
4024                         break;
4025                     case 0xc: /* subx, V9 subc */
4026                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4027                                         (xop & 0x10));
4028                         break;
4029 #ifdef TARGET_SPARC64
4030                     case 0xd: /* V9 udivx */
4031                         gen_helper_udivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4032                         break;
4033 #endif
4034                     case 0xe: /* udiv */
4035                         CHECK_IU_FEATURE(dc, DIV);
4036                         if (xop & 0x10) {
4037                             gen_helper_udiv_cc(cpu_dst, tcg_env, cpu_src1,
4038                                                cpu_src2);
4039                             dc->cc_op = CC_OP_DIV;
4040                         } else {
4041                             gen_helper_udiv(cpu_dst, tcg_env, cpu_src1,
4042                                             cpu_src2);
4043                         }
4044                         break;
4045                     case 0xf: /* sdiv */
4046                         CHECK_IU_FEATURE(dc, DIV);
4047                         if (xop & 0x10) {
4048                             gen_helper_sdiv_cc(cpu_dst, tcg_env, cpu_src1,
4049                                                cpu_src2);
4050                             dc->cc_op = CC_OP_DIV;
4051                         } else {
4052                             gen_helper_sdiv(cpu_dst, tcg_env, cpu_src1,
4053                                             cpu_src2);
4054                         }
4055                         break;
4056                     default:
4057                         goto illegal_insn;
4058                     }
4059                     gen_store_gpr(dc, rd, cpu_dst);
4060                 } else {
4061                     cpu_src1 = get_src1(dc, insn);
4062                     cpu_src2 = get_src2(dc, insn);
4063                     switch (xop) {
4064                     case 0x20: /* taddcc */
4065                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4066                         gen_store_gpr(dc, rd, cpu_dst);
4067                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4068                         dc->cc_op = CC_OP_TADD;
4069                         break;
4070                     case 0x21: /* tsubcc */
4071                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4072                         gen_store_gpr(dc, rd, cpu_dst);
4073                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4074                         dc->cc_op = CC_OP_TSUB;
4075                         break;
4076                     case 0x22: /* taddcctv */
4077                         gen_helper_taddcctv(cpu_dst, tcg_env,
4078                                             cpu_src1, cpu_src2);
4079                         gen_store_gpr(dc, rd, cpu_dst);
4080                         dc->cc_op = CC_OP_TADDTV;
4081                         break;
4082                     case 0x23: /* tsubcctv */
4083                         gen_helper_tsubcctv(cpu_dst, tcg_env,
4084                                             cpu_src1, cpu_src2);
4085                         gen_store_gpr(dc, rd, cpu_dst);
4086                         dc->cc_op = CC_OP_TSUBTV;
4087                         break;
4088                     case 0x24: /* mulscc */
4089                         update_psr(dc);
4090                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4091                         gen_store_gpr(dc, rd, cpu_dst);
4092                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4093                         dc->cc_op = CC_OP_ADD;
4094                         break;
4095 #ifndef TARGET_SPARC64
4096                     case 0x25:  /* sll */
4097                         if (IS_IMM) { /* immediate */
4098                             simm = GET_FIELDs(insn, 20, 31);
4099                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4100                         } else { /* register */
4101                             cpu_tmp0 = tcg_temp_new();
4102                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4103                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4104                         }
4105                         gen_store_gpr(dc, rd, cpu_dst);
4106                         break;
4107                     case 0x26:  /* srl */
4108                         if (IS_IMM) { /* immediate */
4109                             simm = GET_FIELDs(insn, 20, 31);
4110                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4111                         } else { /* register */
4112                             cpu_tmp0 = tcg_temp_new();
4113                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4114                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4115                         }
4116                         gen_store_gpr(dc, rd, cpu_dst);
4117                         break;
4118                     case 0x27:  /* sra */
4119                         if (IS_IMM) { /* immediate */
4120                             simm = GET_FIELDs(insn, 20, 31);
4121                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4122                         } else { /* register */
4123                             cpu_tmp0 = tcg_temp_new();
4124                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4125                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4126                         }
4127                         gen_store_gpr(dc, rd, cpu_dst);
4128                         break;
4129 #endif
4130                     case 0x30:
4131                         {
4132                             cpu_tmp0 = tcg_temp_new();
4133                             switch(rd) {
4134                             case 0: /* wry */
4135                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4136                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4137                                 break;
4138 #ifndef TARGET_SPARC64
4139                             case 0x01 ... 0x0f: /* undefined in the
4140                                                    SPARCv8 manual, nop
4141                                                    on the microSPARC
4142                                                    II */
4143                             case 0x10 ... 0x1f: /* implementation-dependent
4144                                                    in the SPARCv8
4145                                                    manual, nop on the
4146                                                    microSPARC II */
4147                                 if ((rd == 0x13) && (dc->def->features &
4148                                                      CPU_FEATURE_POWERDOWN)) {
4149                                     /* LEON3 power-down */
4150                                     save_state(dc);
4151                                     gen_helper_power_down(tcg_env);
4152                                 }
4153                                 break;
4154 #else
4155                             case 0x2: /* V9 wrccr */
4156                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4157                                 gen_helper_wrccr(tcg_env, cpu_tmp0);
4158                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4159                                 dc->cc_op = CC_OP_FLAGS;
4160                                 break;
4161                             case 0x3: /* V9 wrasi */
4162                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4163                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4164                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4165                                                 offsetof(CPUSPARCState, asi));
4166                                 /*
4167                                  * End TB to notice changed ASI.
4168                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4169                                  * update DisasContext and not exit the TB.
4170                                  */
4171                                 save_state(dc);
4172                                 gen_op_next_insn();
4173                                 tcg_gen_lookup_and_goto_ptr();
4174                                 dc->base.is_jmp = DISAS_NORETURN;
4175                                 break;
4176                             case 0x6: /* V9 wrfprs */
4177                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4178                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4179                                 dc->fprs_dirty = 0;
4180                                 save_state(dc);
4181                                 gen_op_next_insn();
4182                                 tcg_gen_exit_tb(NULL, 0);
4183                                 dc->base.is_jmp = DISAS_NORETURN;
4184                                 break;
4185                             case 0xf: /* V9 sir, nop if user */
4186 #if !defined(CONFIG_USER_ONLY)
4187                                 if (supervisor(dc)) {
4188                                     ; // XXX
4189                                 }
4190 #endif
4191                                 break;
4192                             case 0x13: /* Graphics Status */
4193                                 if (gen_trap_ifnofpu(dc)) {
4194                                     goto jmp_insn;
4195                                 }
4196                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4197                                 break;
4198                             case 0x14: /* Softint set */
4199                                 if (!supervisor(dc))
4200                                     goto illegal_insn;
4201                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4202                                 gen_helper_set_softint(tcg_env, cpu_tmp0);
4203                                 break;
4204                             case 0x15: /* Softint clear */
4205                                 if (!supervisor(dc))
4206                                     goto illegal_insn;
4207                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4208                                 gen_helper_clear_softint(tcg_env, cpu_tmp0);
4209                                 break;
4210                             case 0x16: /* Softint write */
4211                                 if (!supervisor(dc))
4212                                     goto illegal_insn;
4213                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4214                                 gen_helper_write_softint(tcg_env, cpu_tmp0);
4215                                 break;
4216                             case 0x17: /* Tick compare */
4217 #if !defined(CONFIG_USER_ONLY)
4218                                 if (!supervisor(dc))
4219                                     goto illegal_insn;
4220 #endif
4221                                 {
4222                                     TCGv_ptr r_tickptr;
4223 
4224                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4225                                                    cpu_src2);
4226                                     r_tickptr = tcg_temp_new_ptr();
4227                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4228                                                    offsetof(CPUSPARCState, tick));
4229                                     translator_io_start(&dc->base);
4230                                     gen_helper_tick_set_limit(r_tickptr,
4231                                                               cpu_tick_cmpr);
4232                                     /* End TB to handle timer interrupt */
4233                                     dc->base.is_jmp = DISAS_EXIT;
4234                                 }
4235                                 break;
4236                             case 0x18: /* System tick */
4237 #if !defined(CONFIG_USER_ONLY)
4238                                 if (!supervisor(dc))
4239                                     goto illegal_insn;
4240 #endif
4241                                 {
4242                                     TCGv_ptr r_tickptr;
4243 
4244                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4245                                                    cpu_src2);
4246                                     r_tickptr = tcg_temp_new_ptr();
4247                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4248                                                    offsetof(CPUSPARCState, stick));
4249                                     translator_io_start(&dc->base);
4250                                     gen_helper_tick_set_count(r_tickptr,
4251                                                               cpu_tmp0);
4252                                     /* End TB to handle timer interrupt */
4253                                     dc->base.is_jmp = DISAS_EXIT;
4254                                 }
4255                                 break;
4256                             case 0x19: /* System tick compare */
4257 #if !defined(CONFIG_USER_ONLY)
4258                                 if (!supervisor(dc))
4259                                     goto illegal_insn;
4260 #endif
4261                                 {
4262                                     TCGv_ptr r_tickptr;
4263 
4264                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4265                                                    cpu_src2);
4266                                     r_tickptr = tcg_temp_new_ptr();
4267                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4268                                                    offsetof(CPUSPARCState, stick));
4269                                     translator_io_start(&dc->base);
4270                                     gen_helper_tick_set_limit(r_tickptr,
4271                                                               cpu_stick_cmpr);
4272                                     /* End TB to handle timer interrupt */
4273                                     dc->base.is_jmp = DISAS_EXIT;
4274                                 }
4275                                 break;
4276 
4277                             case 0x10: /* Performance Control */
4278                             case 0x11: /* Performance Instrumentation
4279                                           Counter */
4280                             case 0x12: /* Dispatch Control */
4281 #endif
4282                             default:
4283                                 goto illegal_insn;
4284                             }
4285                         }
4286                         break;
4287 #if !defined(CONFIG_USER_ONLY)
4288                     case 0x31: /* wrpsr, V9 saved, restored */
4289                         {
4290                             if (!supervisor(dc))
4291                                 goto priv_insn;
4292 #ifdef TARGET_SPARC64
4293                             switch (rd) {
4294                             case 0:
4295                                 gen_helper_saved(tcg_env);
4296                                 break;
4297                             case 1:
4298                                 gen_helper_restored(tcg_env);
4299                                 break;
4300                             case 2: /* UA2005 allclean */
4301                             case 3: /* UA2005 otherw */
4302                             case 4: /* UA2005 normalw */
4303                             case 5: /* UA2005 invalw */
4304                                 // XXX
4305                             default:
4306                                 goto illegal_insn;
4307                             }
4308 #else
4309                             cpu_tmp0 = tcg_temp_new();
4310                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4311                             gen_helper_wrpsr(tcg_env, cpu_tmp0);
4312                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4313                             dc->cc_op = CC_OP_FLAGS;
4314                             save_state(dc);
4315                             gen_op_next_insn();
4316                             tcg_gen_exit_tb(NULL, 0);
4317                             dc->base.is_jmp = DISAS_NORETURN;
4318 #endif
4319                         }
4320                         break;
4321                     case 0x32: /* wrwim, V9 wrpr */
4322                         {
4323                             if (!supervisor(dc))
4324                                 goto priv_insn;
4325                             cpu_tmp0 = tcg_temp_new();
4326                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4327 #ifdef TARGET_SPARC64
4328                             switch (rd) {
4329                             case 0: // tpc
4330                                 {
4331                                     TCGv_ptr r_tsptr;
4332 
4333                                     r_tsptr = tcg_temp_new_ptr();
4334                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4335                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4336                                                   offsetof(trap_state, tpc));
4337                                 }
4338                                 break;
4339                             case 1: // tnpc
4340                                 {
4341                                     TCGv_ptr r_tsptr;
4342 
4343                                     r_tsptr = tcg_temp_new_ptr();
4344                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4345                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4346                                                   offsetof(trap_state, tnpc));
4347                                 }
4348                                 break;
4349                             case 2: // tstate
4350                                 {
4351                                     TCGv_ptr r_tsptr;
4352 
4353                                     r_tsptr = tcg_temp_new_ptr();
4354                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4355                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4356                                                   offsetof(trap_state,
4357                                                            tstate));
4358                                 }
4359                                 break;
4360                             case 3: // tt
4361                                 {
4362                                     TCGv_ptr r_tsptr;
4363 
4364                                     r_tsptr = tcg_temp_new_ptr();
4365                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4366                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4367                                                     offsetof(trap_state, tt));
4368                                 }
4369                                 break;
4370                             case 4: // tick
4371                                 {
4372                                     TCGv_ptr r_tickptr;
4373 
4374                                     r_tickptr = tcg_temp_new_ptr();
4375                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4376                                                    offsetof(CPUSPARCState, tick));
4377                                     translator_io_start(&dc->base);
4378                                     gen_helper_tick_set_count(r_tickptr,
4379                                                               cpu_tmp0);
4380                                     /* End TB to handle timer interrupt */
4381                                     dc->base.is_jmp = DISAS_EXIT;
4382                                 }
4383                                 break;
4384                             case 5: // tba
4385                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4386                                 break;
4387                             case 6: // pstate
4388                                 save_state(dc);
4389                                 if (translator_io_start(&dc->base)) {
4390                                     dc->base.is_jmp = DISAS_EXIT;
4391                                 }
4392                                 gen_helper_wrpstate(tcg_env, cpu_tmp0);
4393                                 dc->npc = DYNAMIC_PC;
4394                                 break;
4395                             case 7: // tl
4396                                 save_state(dc);
4397                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4398                                                offsetof(CPUSPARCState, tl));
4399                                 dc->npc = DYNAMIC_PC;
4400                                 break;
4401                             case 8: // pil
4402                                 if (translator_io_start(&dc->base)) {
4403                                     dc->base.is_jmp = DISAS_EXIT;
4404                                 }
4405                                 gen_helper_wrpil(tcg_env, cpu_tmp0);
4406                                 break;
4407                             case 9: // cwp
4408                                 gen_helper_wrcwp(tcg_env, cpu_tmp0);
4409                                 break;
4410                             case 10: // cansave
4411                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4412                                                 offsetof(CPUSPARCState,
4413                                                          cansave));
4414                                 break;
4415                             case 11: // canrestore
4416                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4417                                                 offsetof(CPUSPARCState,
4418                                                          canrestore));
4419                                 break;
4420                             case 12: // cleanwin
4421                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4422                                                 offsetof(CPUSPARCState,
4423                                                          cleanwin));
4424                                 break;
4425                             case 13: // otherwin
4426                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4427                                                 offsetof(CPUSPARCState,
4428                                                          otherwin));
4429                                 break;
4430                             case 14: // wstate
4431                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4432                                                 offsetof(CPUSPARCState,
4433                                                          wstate));
4434                                 break;
4435                             case 16: // UA2005 gl
4436                                 CHECK_IU_FEATURE(dc, GL);
4437                                 gen_helper_wrgl(tcg_env, cpu_tmp0);
4438                                 break;
4439                             case 26: // UA2005 strand status
4440                                 CHECK_IU_FEATURE(dc, HYPV);
4441                                 if (!hypervisor(dc))
4442                                     goto priv_insn;
4443                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4444                                 break;
4445                             default:
4446                                 goto illegal_insn;
4447                             }
4448 #else
4449                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4450                             if (dc->def->nwindows != 32) {
4451                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4452                                                 (1 << dc->def->nwindows) - 1);
4453                             }
4454 #endif
4455                         }
4456                         break;
4457                     case 0x33: /* wrtbr, UA2005 wrhpr */
4458                         {
4459 #ifndef TARGET_SPARC64
4460                             if (!supervisor(dc))
4461                                 goto priv_insn;
4462                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4463 #else
4464                             CHECK_IU_FEATURE(dc, HYPV);
4465                             if (!hypervisor(dc))
4466                                 goto priv_insn;
4467                             cpu_tmp0 = tcg_temp_new();
4468                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4469                             switch (rd) {
4470                             case 0: // hpstate
4471                                 tcg_gen_st_i64(cpu_tmp0, tcg_env,
4472                                                offsetof(CPUSPARCState,
4473                                                         hpstate));
4474                                 save_state(dc);
4475                                 gen_op_next_insn();
4476                                 tcg_gen_exit_tb(NULL, 0);
4477                                 dc->base.is_jmp = DISAS_NORETURN;
4478                                 break;
4479                             case 1: // htstate
4480                                 // XXX gen_op_wrhtstate();
4481                                 break;
4482                             case 3: // hintp
4483                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4484                                 break;
4485                             case 5: // htba
4486                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4487                                 break;
4488                             case 31: // hstick_cmpr
4489                                 {
4490                                     TCGv_ptr r_tickptr;
4491 
4492                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4493                                     r_tickptr = tcg_temp_new_ptr();
4494                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4495                                                    offsetof(CPUSPARCState, hstick));
4496                                     translator_io_start(&dc->base);
4497                                     gen_helper_tick_set_limit(r_tickptr,
4498                                                               cpu_hstick_cmpr);
4499                                     /* End TB to handle timer interrupt */
4500                                     dc->base.is_jmp = DISAS_EXIT;
4501                                 }
4502                                 break;
4503                             case 6: // hver readonly
4504                             default:
4505                                 goto illegal_insn;
4506                             }
4507 #endif
4508                         }
4509                         break;
4510 #endif
4511 #ifdef TARGET_SPARC64
4512                     case 0x2c: /* V9 movcc */
4513                         {
4514                             int cc = GET_FIELD_SP(insn, 11, 12);
4515                             int cond = GET_FIELD_SP(insn, 14, 17);
4516                             DisasCompare cmp;
4517                             TCGv dst;
4518 
4519                             if (insn & (1 << 18)) {
4520                                 if (cc == 0) {
4521                                     gen_compare(&cmp, 0, cond, dc);
4522                                 } else if (cc == 2) {
4523                                     gen_compare(&cmp, 1, cond, dc);
4524                                 } else {
4525                                     goto illegal_insn;
4526                                 }
4527                             } else {
4528                                 gen_fcompare(&cmp, cc, cond);
4529                             }
4530 
4531                             /* The get_src2 above loaded the normal 13-bit
4532                                immediate field, not the 11-bit field we have
4533                                in movcc.  But it did handle the reg case.  */
4534                             if (IS_IMM) {
4535                                 simm = GET_FIELD_SPs(insn, 0, 10);
4536                                 tcg_gen_movi_tl(cpu_src2, simm);
4537                             }
4538 
4539                             dst = gen_load_gpr(dc, rd);
4540                             tcg_gen_movcond_tl(cmp.cond, dst,
4541                                                cmp.c1, cmp.c2,
4542                                                cpu_src2, dst);
4543                             gen_store_gpr(dc, rd, dst);
4544                             break;
4545                         }
4546                     case 0x2d: /* V9 sdivx */
4547                         gen_helper_sdivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4548                         gen_store_gpr(dc, rd, cpu_dst);
4549                         break;
4550                     case 0x2e: /* V9 popc */
4551                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4552                         gen_store_gpr(dc, rd, cpu_dst);
4553                         break;
4554                     case 0x2f: /* V9 movr */
4555                         {
4556                             int cond = GET_FIELD_SP(insn, 10, 12);
4557                             DisasCompare cmp;
4558                             TCGv dst;
4559 
4560                             gen_compare_reg(&cmp, cond, cpu_src1);
4561 
4562                             /* The get_src2 above loaded the normal 13-bit
4563                                immediate field, not the 10-bit field we have
4564                                in movr.  But it did handle the reg case.  */
4565                             if (IS_IMM) {
4566                                 simm = GET_FIELD_SPs(insn, 0, 9);
4567                                 tcg_gen_movi_tl(cpu_src2, simm);
4568                             }
4569 
4570                             dst = gen_load_gpr(dc, rd);
4571                             tcg_gen_movcond_tl(cmp.cond, dst,
4572                                                cmp.c1, cmp.c2,
4573                                                cpu_src2, dst);
4574                             gen_store_gpr(dc, rd, dst);
4575                             break;
4576                         }
4577 #endif
4578                     default:
4579                         goto illegal_insn;
4580                     }
4581                 }
4582             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4583 #ifdef TARGET_SPARC64
4584                 int opf = GET_FIELD_SP(insn, 5, 13);
4585                 rs1 = GET_FIELD(insn, 13, 17);
4586                 rs2 = GET_FIELD(insn, 27, 31);
4587                 if (gen_trap_ifnofpu(dc)) {
4588                     goto jmp_insn;
4589                 }
4590 
4591                 switch (opf) {
4592                 case 0x000: /* VIS I edge8cc */
4593                     CHECK_FPU_FEATURE(dc, VIS1);
4594                     cpu_src1 = gen_load_gpr(dc, rs1);
4595                     cpu_src2 = gen_load_gpr(dc, rs2);
4596                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4597                     gen_store_gpr(dc, rd, cpu_dst);
4598                     break;
4599                 case 0x001: /* VIS II edge8n */
4600                     CHECK_FPU_FEATURE(dc, VIS2);
4601                     cpu_src1 = gen_load_gpr(dc, rs1);
4602                     cpu_src2 = gen_load_gpr(dc, rs2);
4603                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4604                     gen_store_gpr(dc, rd, cpu_dst);
4605                     break;
4606                 case 0x002: /* VIS I edge8lcc */
4607                     CHECK_FPU_FEATURE(dc, VIS1);
4608                     cpu_src1 = gen_load_gpr(dc, rs1);
4609                     cpu_src2 = gen_load_gpr(dc, rs2);
4610                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4611                     gen_store_gpr(dc, rd, cpu_dst);
4612                     break;
4613                 case 0x003: /* VIS II edge8ln */
4614                     CHECK_FPU_FEATURE(dc, VIS2);
4615                     cpu_src1 = gen_load_gpr(dc, rs1);
4616                     cpu_src2 = gen_load_gpr(dc, rs2);
4617                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4618                     gen_store_gpr(dc, rd, cpu_dst);
4619                     break;
4620                 case 0x004: /* VIS I edge16cc */
4621                     CHECK_FPU_FEATURE(dc, VIS1);
4622                     cpu_src1 = gen_load_gpr(dc, rs1);
4623                     cpu_src2 = gen_load_gpr(dc, rs2);
4624                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4625                     gen_store_gpr(dc, rd, cpu_dst);
4626                     break;
4627                 case 0x005: /* VIS II edge16n */
4628                     CHECK_FPU_FEATURE(dc, VIS2);
4629                     cpu_src1 = gen_load_gpr(dc, rs1);
4630                     cpu_src2 = gen_load_gpr(dc, rs2);
4631                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4632                     gen_store_gpr(dc, rd, cpu_dst);
4633                     break;
4634                 case 0x006: /* VIS I edge16lcc */
4635                     CHECK_FPU_FEATURE(dc, VIS1);
4636                     cpu_src1 = gen_load_gpr(dc, rs1);
4637                     cpu_src2 = gen_load_gpr(dc, rs2);
4638                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4639                     gen_store_gpr(dc, rd, cpu_dst);
4640                     break;
4641                 case 0x007: /* VIS II edge16ln */
4642                     CHECK_FPU_FEATURE(dc, VIS2);
4643                     cpu_src1 = gen_load_gpr(dc, rs1);
4644                     cpu_src2 = gen_load_gpr(dc, rs2);
4645                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4646                     gen_store_gpr(dc, rd, cpu_dst);
4647                     break;
4648                 case 0x008: /* VIS I edge32cc */
4649                     CHECK_FPU_FEATURE(dc, VIS1);
4650                     cpu_src1 = gen_load_gpr(dc, rs1);
4651                     cpu_src2 = gen_load_gpr(dc, rs2);
4652                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4653                     gen_store_gpr(dc, rd, cpu_dst);
4654                     break;
4655                 case 0x009: /* VIS II edge32n */
4656                     CHECK_FPU_FEATURE(dc, VIS2);
4657                     cpu_src1 = gen_load_gpr(dc, rs1);
4658                     cpu_src2 = gen_load_gpr(dc, rs2);
4659                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4660                     gen_store_gpr(dc, rd, cpu_dst);
4661                     break;
4662                 case 0x00a: /* VIS I edge32lcc */
4663                     CHECK_FPU_FEATURE(dc, VIS1);
4664                     cpu_src1 = gen_load_gpr(dc, rs1);
4665                     cpu_src2 = gen_load_gpr(dc, rs2);
4666                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4667                     gen_store_gpr(dc, rd, cpu_dst);
4668                     break;
4669                 case 0x00b: /* VIS II edge32ln */
4670                     CHECK_FPU_FEATURE(dc, VIS2);
4671                     cpu_src1 = gen_load_gpr(dc, rs1);
4672                     cpu_src2 = gen_load_gpr(dc, rs2);
4673                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4674                     gen_store_gpr(dc, rd, cpu_dst);
4675                     break;
4676                 case 0x010: /* VIS I array8 */
4677                     CHECK_FPU_FEATURE(dc, VIS1);
4678                     cpu_src1 = gen_load_gpr(dc, rs1);
4679                     cpu_src2 = gen_load_gpr(dc, rs2);
4680                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4681                     gen_store_gpr(dc, rd, cpu_dst);
4682                     break;
4683                 case 0x012: /* VIS I array16 */
4684                     CHECK_FPU_FEATURE(dc, VIS1);
4685                     cpu_src1 = gen_load_gpr(dc, rs1);
4686                     cpu_src2 = gen_load_gpr(dc, rs2);
4687                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4688                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4689                     gen_store_gpr(dc, rd, cpu_dst);
4690                     break;
4691                 case 0x014: /* VIS I array32 */
4692                     CHECK_FPU_FEATURE(dc, VIS1);
4693                     cpu_src1 = gen_load_gpr(dc, rs1);
4694                     cpu_src2 = gen_load_gpr(dc, rs2);
4695                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4696                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4697                     gen_store_gpr(dc, rd, cpu_dst);
4698                     break;
4699                 case 0x018: /* VIS I alignaddr */
4700                     CHECK_FPU_FEATURE(dc, VIS1);
4701                     cpu_src1 = gen_load_gpr(dc, rs1);
4702                     cpu_src2 = gen_load_gpr(dc, rs2);
4703                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4704                     gen_store_gpr(dc, rd, cpu_dst);
4705                     break;
4706                 case 0x01a: /* VIS I alignaddrl */
4707                     CHECK_FPU_FEATURE(dc, VIS1);
4708                     cpu_src1 = gen_load_gpr(dc, rs1);
4709                     cpu_src2 = gen_load_gpr(dc, rs2);
4710                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4711                     gen_store_gpr(dc, rd, cpu_dst);
4712                     break;
4713                 case 0x019: /* VIS II bmask */
4714                     CHECK_FPU_FEATURE(dc, VIS2);
4715                     cpu_src1 = gen_load_gpr(dc, rs1);
4716                     cpu_src2 = gen_load_gpr(dc, rs2);
4717                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4718                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4719                     gen_store_gpr(dc, rd, cpu_dst);
4720                     break;
4721                 case 0x020: /* VIS I fcmple16 */
4722                     CHECK_FPU_FEATURE(dc, VIS1);
4723                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4724                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4725                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4726                     gen_store_gpr(dc, rd, cpu_dst);
4727                     break;
4728                 case 0x022: /* VIS I fcmpne16 */
4729                     CHECK_FPU_FEATURE(dc, VIS1);
4730                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4731                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4732                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4733                     gen_store_gpr(dc, rd, cpu_dst);
4734                     break;
4735                 case 0x024: /* VIS I fcmple32 */
4736                     CHECK_FPU_FEATURE(dc, VIS1);
4737                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4738                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4739                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4740                     gen_store_gpr(dc, rd, cpu_dst);
4741                     break;
4742                 case 0x026: /* VIS I fcmpne32 */
4743                     CHECK_FPU_FEATURE(dc, VIS1);
4744                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4745                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4746                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4747                     gen_store_gpr(dc, rd, cpu_dst);
4748                     break;
4749                 case 0x028: /* VIS I fcmpgt16 */
4750                     CHECK_FPU_FEATURE(dc, VIS1);
4751                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4752                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4753                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4754                     gen_store_gpr(dc, rd, cpu_dst);
4755                     break;
4756                 case 0x02a: /* VIS I fcmpeq16 */
4757                     CHECK_FPU_FEATURE(dc, VIS1);
4758                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4759                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4760                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4761                     gen_store_gpr(dc, rd, cpu_dst);
4762                     break;
4763                 case 0x02c: /* VIS I fcmpgt32 */
4764                     CHECK_FPU_FEATURE(dc, VIS1);
4765                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4766                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4767                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4768                     gen_store_gpr(dc, rd, cpu_dst);
4769                     break;
4770                 case 0x02e: /* VIS I fcmpeq32 */
4771                     CHECK_FPU_FEATURE(dc, VIS1);
4772                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4773                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4774                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4775                     gen_store_gpr(dc, rd, cpu_dst);
4776                     break;
4777                 case 0x031: /* VIS I fmul8x16 */
4778                     CHECK_FPU_FEATURE(dc, VIS1);
4779                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4780                     break;
4781                 case 0x033: /* VIS I fmul8x16au */
4782                     CHECK_FPU_FEATURE(dc, VIS1);
4783                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4784                     break;
4785                 case 0x035: /* VIS I fmul8x16al */
4786                     CHECK_FPU_FEATURE(dc, VIS1);
4787                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4788                     break;
4789                 case 0x036: /* VIS I fmul8sux16 */
4790                     CHECK_FPU_FEATURE(dc, VIS1);
4791                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4792                     break;
4793                 case 0x037: /* VIS I fmul8ulx16 */
4794                     CHECK_FPU_FEATURE(dc, VIS1);
4795                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4796                     break;
4797                 case 0x038: /* VIS I fmuld8sux16 */
4798                     CHECK_FPU_FEATURE(dc, VIS1);
4799                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4800                     break;
4801                 case 0x039: /* VIS I fmuld8ulx16 */
4802                     CHECK_FPU_FEATURE(dc, VIS1);
4803                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4804                     break;
4805                 case 0x03a: /* VIS I fpack32 */
4806                     CHECK_FPU_FEATURE(dc, VIS1);
4807                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4808                     break;
4809                 case 0x03b: /* VIS I fpack16 */
4810                     CHECK_FPU_FEATURE(dc, VIS1);
4811                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4812                     cpu_dst_32 = gen_dest_fpr_F(dc);
4813                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4814                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4815                     break;
4816                 case 0x03d: /* VIS I fpackfix */
4817                     CHECK_FPU_FEATURE(dc, VIS1);
4818                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4819                     cpu_dst_32 = gen_dest_fpr_F(dc);
4820                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4821                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4822                     break;
4823                 case 0x03e: /* VIS I pdist */
4824                     CHECK_FPU_FEATURE(dc, VIS1);
4825                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4826                     break;
4827                 case 0x048: /* VIS I faligndata */
4828                     CHECK_FPU_FEATURE(dc, VIS1);
4829                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4830                     break;
4831                 case 0x04b: /* VIS I fpmerge */
4832                     CHECK_FPU_FEATURE(dc, VIS1);
4833                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4834                     break;
4835                 case 0x04c: /* VIS II bshuffle */
4836                     CHECK_FPU_FEATURE(dc, VIS2);
4837                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4838                     break;
4839                 case 0x04d: /* VIS I fexpand */
4840                     CHECK_FPU_FEATURE(dc, VIS1);
4841                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4842                     break;
4843                 case 0x050: /* VIS I fpadd16 */
4844                     CHECK_FPU_FEATURE(dc, VIS1);
4845                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4846                     break;
4847                 case 0x051: /* VIS I fpadd16s */
4848                     CHECK_FPU_FEATURE(dc, VIS1);
4849                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4850                     break;
4851                 case 0x052: /* VIS I fpadd32 */
4852                     CHECK_FPU_FEATURE(dc, VIS1);
4853                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4854                     break;
4855                 case 0x053: /* VIS I fpadd32s */
4856                     CHECK_FPU_FEATURE(dc, VIS1);
4857                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4858                     break;
4859                 case 0x054: /* VIS I fpsub16 */
4860                     CHECK_FPU_FEATURE(dc, VIS1);
4861                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4862                     break;
4863                 case 0x055: /* VIS I fpsub16s */
4864                     CHECK_FPU_FEATURE(dc, VIS1);
4865                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4866                     break;
4867                 case 0x056: /* VIS I fpsub32 */
4868                     CHECK_FPU_FEATURE(dc, VIS1);
4869                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4870                     break;
4871                 case 0x057: /* VIS I fpsub32s */
4872                     CHECK_FPU_FEATURE(dc, VIS1);
4873                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4874                     break;
4875                 case 0x060: /* VIS I fzero */
4876                     CHECK_FPU_FEATURE(dc, VIS1);
4877                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4878                     tcg_gen_movi_i64(cpu_dst_64, 0);
4879                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4880                     break;
4881                 case 0x061: /* VIS I fzeros */
4882                     CHECK_FPU_FEATURE(dc, VIS1);
4883                     cpu_dst_32 = gen_dest_fpr_F(dc);
4884                     tcg_gen_movi_i32(cpu_dst_32, 0);
4885                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4886                     break;
4887                 case 0x062: /* VIS I fnor */
4888                     CHECK_FPU_FEATURE(dc, VIS1);
4889                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4890                     break;
4891                 case 0x063: /* VIS I fnors */
4892                     CHECK_FPU_FEATURE(dc, VIS1);
4893                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4894                     break;
4895                 case 0x064: /* VIS I fandnot2 */
4896                     CHECK_FPU_FEATURE(dc, VIS1);
4897                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4898                     break;
4899                 case 0x065: /* VIS I fandnot2s */
4900                     CHECK_FPU_FEATURE(dc, VIS1);
4901                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4902                     break;
4903                 case 0x066: /* VIS I fnot2 */
4904                     CHECK_FPU_FEATURE(dc, VIS1);
4905                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4906                     break;
4907                 case 0x067: /* VIS I fnot2s */
4908                     CHECK_FPU_FEATURE(dc, VIS1);
4909                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4910                     break;
4911                 case 0x068: /* VIS I fandnot1 */
4912                     CHECK_FPU_FEATURE(dc, VIS1);
4913                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4914                     break;
4915                 case 0x069: /* VIS I fandnot1s */
4916                     CHECK_FPU_FEATURE(dc, VIS1);
4917                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4918                     break;
4919                 case 0x06a: /* VIS I fnot1 */
4920                     CHECK_FPU_FEATURE(dc, VIS1);
4921                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4922                     break;
4923                 case 0x06b: /* VIS I fnot1s */
4924                     CHECK_FPU_FEATURE(dc, VIS1);
4925                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4926                     break;
4927                 case 0x06c: /* VIS I fxor */
4928                     CHECK_FPU_FEATURE(dc, VIS1);
4929                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4930                     break;
4931                 case 0x06d: /* VIS I fxors */
4932                     CHECK_FPU_FEATURE(dc, VIS1);
4933                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4934                     break;
4935                 case 0x06e: /* VIS I fnand */
4936                     CHECK_FPU_FEATURE(dc, VIS1);
4937                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4938                     break;
4939                 case 0x06f: /* VIS I fnands */
4940                     CHECK_FPU_FEATURE(dc, VIS1);
4941                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4942                     break;
4943                 case 0x070: /* VIS I fand */
4944                     CHECK_FPU_FEATURE(dc, VIS1);
4945                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4946                     break;
4947                 case 0x071: /* VIS I fands */
4948                     CHECK_FPU_FEATURE(dc, VIS1);
4949                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4950                     break;
4951                 case 0x072: /* VIS I fxnor */
4952                     CHECK_FPU_FEATURE(dc, VIS1);
4953                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4954                     break;
4955                 case 0x073: /* VIS I fxnors */
4956                     CHECK_FPU_FEATURE(dc, VIS1);
4957                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4958                     break;
4959                 case 0x074: /* VIS I fsrc1 */
4960                     CHECK_FPU_FEATURE(dc, VIS1);
4961                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4962                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4963                     break;
4964                 case 0x075: /* VIS I fsrc1s */
4965                     CHECK_FPU_FEATURE(dc, VIS1);
4966                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4967                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4968                     break;
4969                 case 0x076: /* VIS I fornot2 */
4970                     CHECK_FPU_FEATURE(dc, VIS1);
4971                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4972                     break;
4973                 case 0x077: /* VIS I fornot2s */
4974                     CHECK_FPU_FEATURE(dc, VIS1);
4975                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4976                     break;
4977                 case 0x078: /* VIS I fsrc2 */
4978                     CHECK_FPU_FEATURE(dc, VIS1);
4979                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4980                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4981                     break;
4982                 case 0x079: /* VIS I fsrc2s */
4983                     CHECK_FPU_FEATURE(dc, VIS1);
4984                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4985                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4986                     break;
4987                 case 0x07a: /* VIS I fornot1 */
4988                     CHECK_FPU_FEATURE(dc, VIS1);
4989                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4990                     break;
4991                 case 0x07b: /* VIS I fornot1s */
4992                     CHECK_FPU_FEATURE(dc, VIS1);
4993                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4994                     break;
4995                 case 0x07c: /* VIS I for */
4996                     CHECK_FPU_FEATURE(dc, VIS1);
4997                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4998                     break;
4999                 case 0x07d: /* VIS I fors */
5000                     CHECK_FPU_FEATURE(dc, VIS1);
5001                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5002                     break;
5003                 case 0x07e: /* VIS I fone */
5004                     CHECK_FPU_FEATURE(dc, VIS1);
5005                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5006                     tcg_gen_movi_i64(cpu_dst_64, -1);
5007                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5008                     break;
5009                 case 0x07f: /* VIS I fones */
5010                     CHECK_FPU_FEATURE(dc, VIS1);
5011                     cpu_dst_32 = gen_dest_fpr_F(dc);
5012                     tcg_gen_movi_i32(cpu_dst_32, -1);
5013                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5014                     break;
5015                 case 0x080: /* VIS I shutdown */
5016                 case 0x081: /* VIS II siam */
5017                     // XXX
5018                     goto illegal_insn;
5019                 default:
5020                     goto illegal_insn;
5021                 }
5022 #else
5023                 goto ncp_insn;
5024 #endif
5025             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5026 #ifdef TARGET_SPARC64
5027                 goto illegal_insn;
5028 #else
5029                 goto ncp_insn;
5030 #endif
5031 #ifdef TARGET_SPARC64
5032             } else if (xop == 0x39) { /* V9 return */
5033                 save_state(dc);
5034                 cpu_src1 = get_src1(dc, insn);
5035                 cpu_tmp0 = tcg_temp_new();
5036                 if (IS_IMM) {   /* immediate */
5037                     simm = GET_FIELDs(insn, 19, 31);
5038                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5039                 } else {                /* register */
5040                     rs2 = GET_FIELD(insn, 27, 31);
5041                     if (rs2) {
5042                         cpu_src2 = gen_load_gpr(dc, rs2);
5043                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5044                     } else {
5045                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5046                     }
5047                 }
5048                 gen_check_align(dc, cpu_tmp0, 3);
5049                 gen_helper_restore(tcg_env);
5050                 gen_mov_pc_npc(dc);
5051                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5052                 dc->npc = DYNAMIC_PC_LOOKUP;
5053                 goto jmp_insn;
5054 #endif
5055             } else {
5056                 cpu_src1 = get_src1(dc, insn);
5057                 cpu_tmp0 = tcg_temp_new();
5058                 if (IS_IMM) {   /* immediate */
5059                     simm = GET_FIELDs(insn, 19, 31);
5060                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5061                 } else {                /* register */
5062                     rs2 = GET_FIELD(insn, 27, 31);
5063                     if (rs2) {
5064                         cpu_src2 = gen_load_gpr(dc, rs2);
5065                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5066                     } else {
5067                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5068                     }
5069                 }
5070                 switch (xop) {
5071                 case 0x38:      /* jmpl */
5072                     {
5073                         gen_check_align(dc, cpu_tmp0, 3);
5074                         gen_store_gpr(dc, rd, tcg_constant_tl(dc->pc));
5075                         gen_mov_pc_npc(dc);
5076                         gen_address_mask(dc, cpu_tmp0);
5077                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5078                         dc->npc = DYNAMIC_PC_LOOKUP;
5079                     }
5080                     goto jmp_insn;
5081 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5082                 case 0x39:      /* rett, V9 return */
5083                     {
5084                         if (!supervisor(dc))
5085                             goto priv_insn;
5086                         gen_check_align(dc, cpu_tmp0, 3);
5087                         gen_mov_pc_npc(dc);
5088                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5089                         dc->npc = DYNAMIC_PC;
5090                         gen_helper_rett(tcg_env);
5091                     }
5092                     goto jmp_insn;
5093 #endif
5094                 case 0x3b: /* flush */
5095                     /* nop */
5096                     break;
5097                 case 0x3c:      /* save */
5098                     gen_helper_save(tcg_env);
5099                     gen_store_gpr(dc, rd, cpu_tmp0);
5100                     break;
5101                 case 0x3d:      /* restore */
5102                     gen_helper_restore(tcg_env);
5103                     gen_store_gpr(dc, rd, cpu_tmp0);
5104                     break;
5105 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5106                 case 0x3e:      /* V9 done/retry */
5107                     {
5108                         switch (rd) {
5109                         case 0:
5110                             if (!supervisor(dc))
5111                                 goto priv_insn;
5112                             dc->npc = DYNAMIC_PC;
5113                             dc->pc = DYNAMIC_PC;
5114                             translator_io_start(&dc->base);
5115                             gen_helper_done(tcg_env);
5116                             goto jmp_insn;
5117                         case 1:
5118                             if (!supervisor(dc))
5119                                 goto priv_insn;
5120                             dc->npc = DYNAMIC_PC;
5121                             dc->pc = DYNAMIC_PC;
5122                             translator_io_start(&dc->base);
5123                             gen_helper_retry(tcg_env);
5124                             goto jmp_insn;
5125                         default:
5126                             goto illegal_insn;
5127                         }
5128                     }
5129                     break;
5130 #endif
5131                 default:
5132                     goto illegal_insn;
5133                 }
5134             }
5135             break;
5136         }
5137         break;
5138     case 3:                     /* load/store instructions */
5139         {
5140             unsigned int xop = GET_FIELD(insn, 7, 12);
5141             /* ??? gen_address_mask prevents us from using a source
5142                register directly.  Always generate a temporary.  */
5143             TCGv cpu_addr = tcg_temp_new();
5144 
5145             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5146             if (xop == 0x3c || xop == 0x3e) {
5147                 /* V9 casa/casxa : no offset */
5148             } else if (IS_IMM) {     /* immediate */
5149                 simm = GET_FIELDs(insn, 19, 31);
5150                 if (simm != 0) {
5151                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5152                 }
5153             } else {            /* register */
5154                 rs2 = GET_FIELD(insn, 27, 31);
5155                 if (rs2 != 0) {
5156                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5157                 }
5158             }
5159             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5160                 (xop > 0x17 && xop <= 0x1d ) ||
5161                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5162                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5163 
5164                 switch (xop) {
5165                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5166                     gen_address_mask(dc, cpu_addr);
5167                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5168                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5169                     break;
5170                 case 0x1:       /* ldub, load unsigned byte */
5171                     gen_address_mask(dc, cpu_addr);
5172                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5173                                        dc->mem_idx, MO_UB);
5174                     break;
5175                 case 0x2:       /* lduh, load unsigned halfword */
5176                     gen_address_mask(dc, cpu_addr);
5177                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5178                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5179                     break;
5180                 case 0x3:       /* ldd, load double word */
5181                     if (rd & 1)
5182                         goto illegal_insn;
5183                     else {
5184                         TCGv_i64 t64;
5185 
5186                         gen_address_mask(dc, cpu_addr);
5187                         t64 = tcg_temp_new_i64();
5188                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5189                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5190                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5191                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5192                         gen_store_gpr(dc, rd + 1, cpu_val);
5193                         tcg_gen_shri_i64(t64, t64, 32);
5194                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5195                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5196                     }
5197                     break;
5198                 case 0x9:       /* ldsb, load signed byte */
5199                     gen_address_mask(dc, cpu_addr);
5200                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5201                     break;
5202                 case 0xa:       /* ldsh, load signed halfword */
5203                     gen_address_mask(dc, cpu_addr);
5204                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5205                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5206                     break;
5207                 case 0xd:       /* ldstub */
5208                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5209                     break;
5210                 case 0x0f:
5211                     /* swap, swap register with memory. Also atomically */
5212                     cpu_src1 = gen_load_gpr(dc, rd);
5213                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5214                              dc->mem_idx, MO_TEUL);
5215                     break;
5216 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5217                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5218                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5219                     break;
5220                 case 0x11:      /* lduba, load unsigned byte alternate */
5221                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5222                     break;
5223                 case 0x12:      /* lduha, load unsigned halfword alternate */
5224                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5225                     break;
5226                 case 0x13:      /* ldda, load double word alternate */
5227                     if (rd & 1) {
5228                         goto illegal_insn;
5229                     }
5230                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5231                     goto skip_move;
5232                 case 0x19:      /* ldsba, load signed byte alternate */
5233                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5234                     break;
5235                 case 0x1a:      /* ldsha, load signed halfword alternate */
5236                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5237                     break;
5238                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5239                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5240                     break;
5241                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5242                                    atomically */
5243                     cpu_src1 = gen_load_gpr(dc, rd);
5244                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5245                     break;
5246 
5247 #ifndef TARGET_SPARC64
5248                 case 0x30: /* ldc */
5249                 case 0x31: /* ldcsr */
5250                 case 0x33: /* lddc */
5251                     goto ncp_insn;
5252 #endif
5253 #endif
5254 #ifdef TARGET_SPARC64
5255                 case 0x08: /* V9 ldsw */
5256                     gen_address_mask(dc, cpu_addr);
5257                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5258                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5259                     break;
5260                 case 0x0b: /* V9 ldx */
5261                     gen_address_mask(dc, cpu_addr);
5262                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5263                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5264                     break;
5265                 case 0x18: /* V9 ldswa */
5266                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5267                     break;
5268                 case 0x1b: /* V9 ldxa */
5269                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5270                     break;
5271                 case 0x2d: /* V9 prefetch, no effect */
5272                     goto skip_move;
5273                 case 0x30: /* V9 ldfa */
5274                     if (gen_trap_ifnofpu(dc)) {
5275                         goto jmp_insn;
5276                     }
5277                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5278                     gen_update_fprs_dirty(dc, rd);
5279                     goto skip_move;
5280                 case 0x33: /* V9 lddfa */
5281                     if (gen_trap_ifnofpu(dc)) {
5282                         goto jmp_insn;
5283                     }
5284                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5285                     gen_update_fprs_dirty(dc, DFPREG(rd));
5286                     goto skip_move;
5287                 case 0x3d: /* V9 prefetcha, no effect */
5288                     goto skip_move;
5289                 case 0x32: /* V9 ldqfa */
5290                     CHECK_FPU_FEATURE(dc, FLOAT128);
5291                     if (gen_trap_ifnofpu(dc)) {
5292                         goto jmp_insn;
5293                     }
5294                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5295                     gen_update_fprs_dirty(dc, QFPREG(rd));
5296                     goto skip_move;
5297 #endif
5298                 default:
5299                     goto illegal_insn;
5300                 }
5301                 gen_store_gpr(dc, rd, cpu_val);
5302 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5303             skip_move: ;
5304 #endif
5305             } else if (xop >= 0x20 && xop < 0x24) {
5306                 if (gen_trap_ifnofpu(dc)) {
5307                     goto jmp_insn;
5308                 }
5309                 switch (xop) {
5310                 case 0x20:      /* ldf, load fpreg */
5311                     gen_address_mask(dc, cpu_addr);
5312                     cpu_dst_32 = gen_dest_fpr_F(dc);
5313                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5314                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5315                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5316                     break;
5317                 case 0x21:      /* ldfsr, V9 ldxfsr */
5318 #ifdef TARGET_SPARC64
5319                     gen_address_mask(dc, cpu_addr);
5320                     if (rd == 1) {
5321                         TCGv_i64 t64 = tcg_temp_new_i64();
5322                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5323                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5324                         gen_helper_ldxfsr(cpu_fsr, tcg_env, cpu_fsr, t64);
5325                         break;
5326                     }
5327 #endif
5328                     cpu_dst_32 = tcg_temp_new_i32();
5329                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5330                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5331                     gen_helper_ldfsr(cpu_fsr, tcg_env, cpu_fsr, cpu_dst_32);
5332                     break;
5333                 case 0x22:      /* ldqf, load quad fpreg */
5334                     CHECK_FPU_FEATURE(dc, FLOAT128);
5335                     gen_address_mask(dc, cpu_addr);
5336                     cpu_src1_64 = tcg_temp_new_i64();
5337                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5338                                         MO_TEUQ | MO_ALIGN_4);
5339                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5340                     cpu_src2_64 = tcg_temp_new_i64();
5341                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5342                                         MO_TEUQ | MO_ALIGN_4);
5343                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5344                     break;
5345                 case 0x23:      /* lddf, load double fpreg */
5346                     gen_address_mask(dc, cpu_addr);
5347                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5348                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5349                                         MO_TEUQ | MO_ALIGN_4);
5350                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5351                     break;
5352                 default:
5353                     goto illegal_insn;
5354                 }
5355             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5356                        xop == 0xe || xop == 0x1e) {
5357                 TCGv cpu_val = gen_load_gpr(dc, rd);
5358 
5359                 switch (xop) {
5360                 case 0x4: /* st, store word */
5361                     gen_address_mask(dc, cpu_addr);
5362                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5363                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5364                     break;
5365                 case 0x5: /* stb, store byte */
5366                     gen_address_mask(dc, cpu_addr);
5367                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5368                     break;
5369                 case 0x6: /* sth, store halfword */
5370                     gen_address_mask(dc, cpu_addr);
5371                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5372                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5373                     break;
5374                 case 0x7: /* std, store double word */
5375                     if (rd & 1)
5376                         goto illegal_insn;
5377                     else {
5378                         TCGv_i64 t64;
5379                         TCGv lo;
5380 
5381                         gen_address_mask(dc, cpu_addr);
5382                         lo = gen_load_gpr(dc, rd + 1);
5383                         t64 = tcg_temp_new_i64();
5384                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5385                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5386                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5387                     }
5388                     break;
5389 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5390                 case 0x14: /* sta, V9 stwa, store word alternate */
5391                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5392                     break;
5393                 case 0x15: /* stba, store byte alternate */
5394                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5395                     break;
5396                 case 0x16: /* stha, store halfword alternate */
5397                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5398                     break;
5399                 case 0x17: /* stda, store double word alternate */
5400                     if (rd & 1) {
5401                         goto illegal_insn;
5402                     }
5403                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5404                     break;
5405 #endif
5406 #ifdef TARGET_SPARC64
5407                 case 0x0e: /* V9 stx */
5408                     gen_address_mask(dc, cpu_addr);
5409                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5410                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5411                     break;
5412                 case 0x1e: /* V9 stxa */
5413                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5414                     break;
5415 #endif
5416                 default:
5417                     goto illegal_insn;
5418                 }
5419             } else if (xop > 0x23 && xop < 0x28) {
5420                 if (gen_trap_ifnofpu(dc)) {
5421                     goto jmp_insn;
5422                 }
5423                 switch (xop) {
5424                 case 0x24: /* stf, store fpreg */
5425                     gen_address_mask(dc, cpu_addr);
5426                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5427                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5428                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5429                     break;
5430                 case 0x25: /* stfsr, V9 stxfsr */
5431                     {
5432 #ifdef TARGET_SPARC64
5433                         gen_address_mask(dc, cpu_addr);
5434                         if (rd == 1) {
5435                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5436                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5437                             break;
5438                         }
5439 #endif
5440                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5441                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5442                     }
5443                     break;
5444                 case 0x26:
5445 #ifdef TARGET_SPARC64
5446                     /* V9 stqf, store quad fpreg */
5447                     CHECK_FPU_FEATURE(dc, FLOAT128);
5448                     gen_address_mask(dc, cpu_addr);
5449                     /* ??? While stqf only requires 4-byte alignment, it is
5450                        legal for the cpu to signal the unaligned exception.
5451                        The OS trap handler is then required to fix it up.
5452                        For qemu, this avoids having to probe the second page
5453                        before performing the first write.  */
5454                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5455                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5456                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5457                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5458                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5459                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5460                                         dc->mem_idx, MO_TEUQ);
5461                     break;
5462 #else /* !TARGET_SPARC64 */
5463                     /* stdfq, store floating point queue */
5464 #if defined(CONFIG_USER_ONLY)
5465                     goto illegal_insn;
5466 #else
5467                     if (!supervisor(dc))
5468                         goto priv_insn;
5469                     if (gen_trap_ifnofpu(dc)) {
5470                         goto jmp_insn;
5471                     }
5472                     goto nfq_insn;
5473 #endif
5474 #endif
5475                 case 0x27: /* stdf, store double fpreg */
5476                     gen_address_mask(dc, cpu_addr);
5477                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5478                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5479                                         MO_TEUQ | MO_ALIGN_4);
5480                     break;
5481                 default:
5482                     goto illegal_insn;
5483                 }
5484             } else if (xop > 0x33 && xop < 0x3f) {
5485                 switch (xop) {
5486 #ifdef TARGET_SPARC64
5487                 case 0x34: /* V9 stfa */
5488                     if (gen_trap_ifnofpu(dc)) {
5489                         goto jmp_insn;
5490                     }
5491                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5492                     break;
5493                 case 0x36: /* V9 stqfa */
5494                     {
5495                         CHECK_FPU_FEATURE(dc, FLOAT128);
5496                         if (gen_trap_ifnofpu(dc)) {
5497                             goto jmp_insn;
5498                         }
5499                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5500                     }
5501                     break;
5502                 case 0x37: /* V9 stdfa */
5503                     if (gen_trap_ifnofpu(dc)) {
5504                         goto jmp_insn;
5505                     }
5506                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5507                     break;
5508                 case 0x3e: /* V9 casxa */
5509                     rs2 = GET_FIELD(insn, 27, 31);
5510                     cpu_src2 = gen_load_gpr(dc, rs2);
5511                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5512                     break;
5513 #else
5514                 case 0x34: /* stc */
5515                 case 0x35: /* stcsr */
5516                 case 0x36: /* stdcq */
5517                 case 0x37: /* stdc */
5518                     goto ncp_insn;
5519 #endif
5520 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5521                 case 0x3c: /* V9 or LEON3 casa */
5522 #ifndef TARGET_SPARC64
5523                     CHECK_IU_FEATURE(dc, CASA);
5524 #endif
5525                     rs2 = GET_FIELD(insn, 27, 31);
5526                     cpu_src2 = gen_load_gpr(dc, rs2);
5527                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5528                     break;
5529 #endif
5530                 default:
5531                     goto illegal_insn;
5532                 }
5533             } else {
5534                 goto illegal_insn;
5535             }
5536         }
5537         break;
5538     }
5539     advance_pc(dc);
5540  jmp_insn:
5541     return;
5542  illegal_insn:
5543     gen_exception(dc, TT_ILL_INSN);
5544     return;
5545 #if !defined(CONFIG_USER_ONLY)
5546  priv_insn:
5547     gen_exception(dc, TT_PRIV_INSN);
5548     return;
5549 #endif
5550  nfpu_insn:
5551     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5552     return;
5553 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5554  nfq_insn:
5555     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5556     return;
5557 #endif
5558 #ifndef TARGET_SPARC64
5559  ncp_insn:
5560     gen_exception(dc, TT_NCP_INSN);
5561     return;
5562 #endif
5563 }
5564 
5565 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5566 {
5567     DisasContext *dc = container_of(dcbase, DisasContext, base);
5568     CPUSPARCState *env = cpu_env(cs);
5569     int bound;
5570 
5571     dc->pc = dc->base.pc_first;
5572     dc->npc = (target_ulong)dc->base.tb->cs_base;
5573     dc->cc_op = CC_OP_DYNAMIC;
5574     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5575     dc->def = &env->def;
5576     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5577     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5578 #ifndef CONFIG_USER_ONLY
5579     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5580 #endif
5581 #ifdef TARGET_SPARC64
5582     dc->fprs_dirty = 0;
5583     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5584 #ifndef CONFIG_USER_ONLY
5585     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5586 #endif
5587 #endif
5588     /*
5589      * if we reach a page boundary, we stop generation so that the
5590      * PC of a TT_TFAULT exception is always in the right page
5591      */
5592     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5593     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5594 }
5595 
5596 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5597 {
5598 }
5599 
5600 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5601 {
5602     DisasContext *dc = container_of(dcbase, DisasContext, base);
5603     target_ulong npc = dc->npc;
5604 
5605     if (npc & 3) {
5606         switch (npc) {
5607         case JUMP_PC:
5608             assert(dc->jump_pc[1] == dc->pc + 4);
5609             npc = dc->jump_pc[0] | JUMP_PC;
5610             break;
5611         case DYNAMIC_PC:
5612         case DYNAMIC_PC_LOOKUP:
5613             npc = DYNAMIC_PC;
5614             break;
5615         default:
5616             g_assert_not_reached();
5617         }
5618     }
5619     tcg_gen_insn_start(dc->pc, npc);
5620 }
5621 
5622 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5623 {
5624     DisasContext *dc = container_of(dcbase, DisasContext, base);
5625     CPUSPARCState *env = cpu_env(cs);
5626     unsigned int insn;
5627 
5628     insn = translator_ldl(env, &dc->base, dc->pc);
5629     dc->base.pc_next += 4;
5630 
5631     if (!decode(dc, insn)) {
5632         disas_sparc_legacy(dc, insn);
5633     }
5634 
5635     if (dc->base.is_jmp == DISAS_NORETURN) {
5636         return;
5637     }
5638     if (dc->pc != dc->base.pc_next) {
5639         dc->base.is_jmp = DISAS_TOO_MANY;
5640     }
5641 }
5642 
5643 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5644 {
5645     DisasContext *dc = container_of(dcbase, DisasContext, base);
5646     DisasDelayException *e, *e_next;
5647     bool may_lookup;
5648 
5649     switch (dc->base.is_jmp) {
5650     case DISAS_NEXT:
5651     case DISAS_TOO_MANY:
5652         if (((dc->pc | dc->npc) & 3) == 0) {
5653             /* static PC and NPC: we can use direct chaining */
5654             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5655             break;
5656         }
5657 
5658         may_lookup = true;
5659         if (dc->pc & 3) {
5660             switch (dc->pc) {
5661             case DYNAMIC_PC_LOOKUP:
5662                 break;
5663             case DYNAMIC_PC:
5664                 may_lookup = false;
5665                 break;
5666             default:
5667                 g_assert_not_reached();
5668             }
5669         } else {
5670             tcg_gen_movi_tl(cpu_pc, dc->pc);
5671         }
5672 
5673         if (dc->npc & 3) {
5674             switch (dc->npc) {
5675             case JUMP_PC:
5676                 gen_generic_branch(dc);
5677                 break;
5678             case DYNAMIC_PC:
5679                 may_lookup = false;
5680                 break;
5681             case DYNAMIC_PC_LOOKUP:
5682                 break;
5683             default:
5684                 g_assert_not_reached();
5685             }
5686         } else {
5687             tcg_gen_movi_tl(cpu_npc, dc->npc);
5688         }
5689         if (may_lookup) {
5690             tcg_gen_lookup_and_goto_ptr();
5691         } else {
5692             tcg_gen_exit_tb(NULL, 0);
5693         }
5694         break;
5695 
5696     case DISAS_NORETURN:
5697        break;
5698 
5699     case DISAS_EXIT:
5700         /* Exit TB */
5701         save_state(dc);
5702         tcg_gen_exit_tb(NULL, 0);
5703         break;
5704 
5705     default:
5706         g_assert_not_reached();
5707     }
5708 
5709     for (e = dc->delay_excp_list; e ; e = e_next) {
5710         gen_set_label(e->lab);
5711 
5712         tcg_gen_movi_tl(cpu_pc, e->pc);
5713         if (e->npc % 4 == 0) {
5714             tcg_gen_movi_tl(cpu_npc, e->npc);
5715         }
5716         gen_helper_raise_exception(tcg_env, e->excp);
5717 
5718         e_next = e->next;
5719         g_free(e);
5720     }
5721 }
5722 
5723 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5724                                CPUState *cpu, FILE *logfile)
5725 {
5726     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5727     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5728 }
5729 
5730 static const TranslatorOps sparc_tr_ops = {
5731     .init_disas_context = sparc_tr_init_disas_context,
5732     .tb_start           = sparc_tr_tb_start,
5733     .insn_start         = sparc_tr_insn_start,
5734     .translate_insn     = sparc_tr_translate_insn,
5735     .tb_stop            = sparc_tr_tb_stop,
5736     .disas_log          = sparc_tr_disas_log,
5737 };
5738 
5739 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5740                            target_ulong pc, void *host_pc)
5741 {
5742     DisasContext dc = {};
5743 
5744     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5745 }
5746 
5747 void sparc_tcg_init(void)
5748 {
5749     static const char gregnames[32][4] = {
5750         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5751         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5752         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5753         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5754     };
5755     static const char fregnames[32][4] = {
5756         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5757         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5758         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5759         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5760     };
5761 
5762     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5763 #ifdef TARGET_SPARC64
5764         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5765         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5766 #else
5767         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5768 #endif
5769         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5770         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5771     };
5772 
5773     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5774 #ifdef TARGET_SPARC64
5775         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5776         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5777         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5778         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5779           "hstick_cmpr" },
5780         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5781         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5782         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5783         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5784         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5785 #endif
5786         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5787         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5788         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5789         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5790         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5791         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5792         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5793         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5794 #ifndef CONFIG_USER_ONLY
5795         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5796 #endif
5797     };
5798 
5799     unsigned int i;
5800 
5801     cpu_regwptr = tcg_global_mem_new_ptr(tcg_env,
5802                                          offsetof(CPUSPARCState, regwptr),
5803                                          "regwptr");
5804 
5805     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5806         *r32[i].ptr = tcg_global_mem_new_i32(tcg_env, r32[i].off, r32[i].name);
5807     }
5808 
5809     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5810         *rtl[i].ptr = tcg_global_mem_new(tcg_env, rtl[i].off, rtl[i].name);
5811     }
5812 
5813     cpu_regs[0] = NULL;
5814     for (i = 1; i < 8; ++i) {
5815         cpu_regs[i] = tcg_global_mem_new(tcg_env,
5816                                          offsetof(CPUSPARCState, gregs[i]),
5817                                          gregnames[i]);
5818     }
5819 
5820     for (i = 8; i < 32; ++i) {
5821         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5822                                          (i - 8) * sizeof(target_ulong),
5823                                          gregnames[i]);
5824     }
5825 
5826     for (i = 0; i < TARGET_DPREGS; i++) {
5827         cpu_fpr[i] = tcg_global_mem_new_i64(tcg_env,
5828                                             offsetof(CPUSPARCState, fpr[i]),
5829                                             fregnames[i]);
5830     }
5831 }
5832 
5833 void sparc_restore_state_to_opc(CPUState *cs,
5834                                 const TranslationBlock *tb,
5835                                 const uint64_t *data)
5836 {
5837     SPARCCPU *cpu = SPARC_CPU(cs);
5838     CPUSPARCState *env = &cpu->env;
5839     target_ulong pc = data[0];
5840     target_ulong npc = data[1];
5841 
5842     env->pc = pc;
5843     if (npc == DYNAMIC_PC) {
5844         /* dynamic NPC: already stored */
5845     } else if (npc & JUMP_PC) {
5846         /* jump PC: use 'cond' and the jump targets of the translation */
5847         if (env->cond) {
5848             env->npc = npc & ~3;
5849         } else {
5850             env->npc = pc + 4;
5851         }
5852     } else {
5853         env->npc = npc;
5854     }
5855 }
5856