xref: /openbmc/qemu/target/sparc/translate.c (revision 89aafcf2)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 
30 #include "exec/helper-gen.h"
31 
32 #include "exec/translator.h"
33 #include "exec/log.h"
34 #include "asi.h"
35 
36 
37 #define DYNAMIC_PC  1 /* dynamic pc value */
38 #define JUMP_PC     2 /* dynamic pc value which takes only two values
39                          according to jump_pc[T2] */
40 
41 #define DISAS_EXIT  DISAS_TARGET_0
42 
43 /* global register indexes */
44 static TCGv_ptr cpu_regwptr;
45 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
46 static TCGv_i32 cpu_cc_op;
47 static TCGv_i32 cpu_psr;
48 static TCGv cpu_fsr, cpu_pc, cpu_npc;
49 static TCGv cpu_regs[32];
50 static TCGv cpu_y;
51 #ifndef CONFIG_USER_ONLY
52 static TCGv cpu_tbr;
53 #endif
54 static TCGv cpu_cond;
55 #ifdef TARGET_SPARC64
56 static TCGv_i32 cpu_xcc, cpu_fprs;
57 static TCGv cpu_gsr;
58 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
59 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
60 #else
61 static TCGv cpu_wim;
62 #endif
63 /* Floating point registers */
64 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
65 
66 #include "exec/gen-icount.h"
67 
68 typedef struct DisasContext {
69     DisasContextBase base;
70     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
71     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
72     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
73     int mem_idx;
74     bool fpu_enabled;
75     bool address_mask_32bit;
76 #ifndef CONFIG_USER_ONLY
77     bool supervisor;
78 #ifdef TARGET_SPARC64
79     bool hypervisor;
80 #endif
81 #endif
82 
83     uint32_t cc_op;  /* current CC operation */
84     sparc_def_t *def;
85 #ifdef TARGET_SPARC64
86     int fprs_dirty;
87     int asi;
88 #endif
89 } DisasContext;
90 
91 typedef struct {
92     TCGCond cond;
93     bool is_bool;
94     TCGv c1, c2;
95 } DisasCompare;
96 
97 // This function uses non-native bit order
98 #define GET_FIELD(X, FROM, TO)                                  \
99     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
100 
101 // This function uses the order in the manuals, i.e. bit 0 is 2^0
102 #define GET_FIELD_SP(X, FROM, TO)               \
103     GET_FIELD(X, 31 - (TO), 31 - (FROM))
104 
105 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
106 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
107 
108 #ifdef TARGET_SPARC64
109 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
110 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
111 #else
112 #define DFPREG(r) (r & 0x1e)
113 #define QFPREG(r) (r & 0x1c)
114 #endif
115 
116 #define UA2005_HTRAP_MASK 0xff
117 #define V8_TRAP_MASK 0x7f
118 
119 static int sign_extend(int x, int len)
120 {
121     len = 32 - len;
122     return (x << len) >> len;
123 }
124 
125 #define IS_IMM (insn & (1<<13))
126 
127 static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
128 {
129 #if defined(TARGET_SPARC64)
130     int bit = (rd < 32) ? 1 : 2;
131     /* If we know we've already set this bit within the TB,
132        we can avoid setting it again.  */
133     if (!(dc->fprs_dirty & bit)) {
134         dc->fprs_dirty |= bit;
135         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
136     }
137 #endif
138 }
139 
140 /* floating point registers moves */
141 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
142 {
143     TCGv_i32 ret = tcg_temp_new_i32();
144     if (src & 1) {
145         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
146     } else {
147         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
148     }
149     return ret;
150 }
151 
152 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
153 {
154     TCGv_i64 t = tcg_temp_new_i64();
155 
156     tcg_gen_extu_i32_i64(t, v);
157     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
158                         (dst & 1 ? 0 : 32), 32);
159     gen_update_fprs_dirty(dc, dst);
160 }
161 
162 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
163 {
164     return tcg_temp_new_i32();
165 }
166 
167 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
168 {
169     src = DFPREG(src);
170     return cpu_fpr[src / 2];
171 }
172 
173 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
174 {
175     dst = DFPREG(dst);
176     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
177     gen_update_fprs_dirty(dc, dst);
178 }
179 
180 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
181 {
182     return cpu_fpr[DFPREG(dst) / 2];
183 }
184 
185 static void gen_op_load_fpr_QT0(unsigned int src)
186 {
187     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
188                    offsetof(CPU_QuadU, ll.upper));
189     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
190                    offsetof(CPU_QuadU, ll.lower));
191 }
192 
193 static void gen_op_load_fpr_QT1(unsigned int src)
194 {
195     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
196                    offsetof(CPU_QuadU, ll.upper));
197     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
198                    offsetof(CPU_QuadU, ll.lower));
199 }
200 
201 static void gen_op_store_QT0_fpr(unsigned int dst)
202 {
203     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
204                    offsetof(CPU_QuadU, ll.upper));
205     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
206                    offsetof(CPU_QuadU, ll.lower));
207 }
208 
209 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
210                             TCGv_i64 v1, TCGv_i64 v2)
211 {
212     dst = QFPREG(dst);
213 
214     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
215     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
216     gen_update_fprs_dirty(dc, dst);
217 }
218 
219 #ifdef TARGET_SPARC64
220 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
221 {
222     src = QFPREG(src);
223     return cpu_fpr[src / 2];
224 }
225 
226 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
227 {
228     src = QFPREG(src);
229     return cpu_fpr[src / 2 + 1];
230 }
231 
232 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
233 {
234     rd = QFPREG(rd);
235     rs = QFPREG(rs);
236 
237     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
238     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
239     gen_update_fprs_dirty(dc, rd);
240 }
241 #endif
242 
243 /* moves */
244 #ifdef CONFIG_USER_ONLY
245 #define supervisor(dc) 0
246 #ifdef TARGET_SPARC64
247 #define hypervisor(dc) 0
248 #endif
249 #else
250 #ifdef TARGET_SPARC64
251 #define hypervisor(dc) (dc->hypervisor)
252 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
253 #else
254 #define supervisor(dc) (dc->supervisor)
255 #endif
256 #endif
257 
258 #ifdef TARGET_SPARC64
259 #ifndef TARGET_ABI32
260 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
261 #else
262 #define AM_CHECK(dc) (1)
263 #endif
264 #endif
265 
266 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
267 {
268 #ifdef TARGET_SPARC64
269     if (AM_CHECK(dc))
270         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
271 #endif
272 }
273 
274 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
275 {
276     if (reg > 0) {
277         assert(reg < 32);
278         return cpu_regs[reg];
279     } else {
280         TCGv t = tcg_temp_new();
281         tcg_gen_movi_tl(t, 0);
282         return t;
283     }
284 }
285 
286 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
287 {
288     if (reg > 0) {
289         assert(reg < 32);
290         tcg_gen_mov_tl(cpu_regs[reg], v);
291     }
292 }
293 
294 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
295 {
296     if (reg > 0) {
297         assert(reg < 32);
298         return cpu_regs[reg];
299     } else {
300         return tcg_temp_new();
301     }
302 }
303 
304 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
305 {
306     return translator_use_goto_tb(&s->base, pc) &&
307            translator_use_goto_tb(&s->base, npc);
308 }
309 
310 static void gen_goto_tb(DisasContext *s, int tb_num,
311                         target_ulong pc, target_ulong npc)
312 {
313     if (use_goto_tb(s, pc, npc))  {
314         /* jump to same page: we can use a direct jump */
315         tcg_gen_goto_tb(tb_num);
316         tcg_gen_movi_tl(cpu_pc, pc);
317         tcg_gen_movi_tl(cpu_npc, npc);
318         tcg_gen_exit_tb(s->base.tb, tb_num);
319     } else {
320         /* jump to another page: currently not optimized */
321         tcg_gen_movi_tl(cpu_pc, pc);
322         tcg_gen_movi_tl(cpu_npc, npc);
323         tcg_gen_exit_tb(NULL, 0);
324     }
325 }
326 
327 // XXX suboptimal
328 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
329 {
330     tcg_gen_extu_i32_tl(reg, src);
331     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
332 }
333 
334 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
335 {
336     tcg_gen_extu_i32_tl(reg, src);
337     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
338 }
339 
340 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
341 {
342     tcg_gen_extu_i32_tl(reg, src);
343     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
344 }
345 
346 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
347 {
348     tcg_gen_extu_i32_tl(reg, src);
349     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
350 }
351 
352 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
353 {
354     tcg_gen_mov_tl(cpu_cc_src, src1);
355     tcg_gen_mov_tl(cpu_cc_src2, src2);
356     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
357     tcg_gen_mov_tl(dst, cpu_cc_dst);
358 }
359 
360 static TCGv_i32 gen_add32_carry32(void)
361 {
362     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
363 
364     /* Carry is computed from a previous add: (dst < src)  */
365 #if TARGET_LONG_BITS == 64
366     cc_src1_32 = tcg_temp_new_i32();
367     cc_src2_32 = tcg_temp_new_i32();
368     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
369     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
370 #else
371     cc_src1_32 = cpu_cc_dst;
372     cc_src2_32 = cpu_cc_src;
373 #endif
374 
375     carry_32 = tcg_temp_new_i32();
376     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
377 
378     return carry_32;
379 }
380 
381 static TCGv_i32 gen_sub32_carry32(void)
382 {
383     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
384 
385     /* Carry is computed from a previous borrow: (src1 < src2)  */
386 #if TARGET_LONG_BITS == 64
387     cc_src1_32 = tcg_temp_new_i32();
388     cc_src2_32 = tcg_temp_new_i32();
389     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
390     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
391 #else
392     cc_src1_32 = cpu_cc_src;
393     cc_src2_32 = cpu_cc_src2;
394 #endif
395 
396     carry_32 = tcg_temp_new_i32();
397     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
398 
399     return carry_32;
400 }
401 
402 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
403                             TCGv src2, int update_cc)
404 {
405     TCGv_i32 carry_32;
406     TCGv carry;
407 
408     switch (dc->cc_op) {
409     case CC_OP_DIV:
410     case CC_OP_LOGIC:
411         /* Carry is known to be zero.  Fall back to plain ADD.  */
412         if (update_cc) {
413             gen_op_add_cc(dst, src1, src2);
414         } else {
415             tcg_gen_add_tl(dst, src1, src2);
416         }
417         return;
418 
419     case CC_OP_ADD:
420     case CC_OP_TADD:
421     case CC_OP_TADDTV:
422         if (TARGET_LONG_BITS == 32) {
423             /* We can re-use the host's hardware carry generation by using
424                an ADD2 opcode.  We discard the low part of the output.
425                Ideally we'd combine this operation with the add that
426                generated the carry in the first place.  */
427             carry = tcg_temp_new();
428             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
429             goto add_done;
430         }
431         carry_32 = gen_add32_carry32();
432         break;
433 
434     case CC_OP_SUB:
435     case CC_OP_TSUB:
436     case CC_OP_TSUBTV:
437         carry_32 = gen_sub32_carry32();
438         break;
439 
440     default:
441         /* We need external help to produce the carry.  */
442         carry_32 = tcg_temp_new_i32();
443         gen_helper_compute_C_icc(carry_32, cpu_env);
444         break;
445     }
446 
447 #if TARGET_LONG_BITS == 64
448     carry = tcg_temp_new();
449     tcg_gen_extu_i32_i64(carry, carry_32);
450 #else
451     carry = carry_32;
452 #endif
453 
454     tcg_gen_add_tl(dst, src1, src2);
455     tcg_gen_add_tl(dst, dst, carry);
456 
457  add_done:
458     if (update_cc) {
459         tcg_gen_mov_tl(cpu_cc_src, src1);
460         tcg_gen_mov_tl(cpu_cc_src2, src2);
461         tcg_gen_mov_tl(cpu_cc_dst, dst);
462         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
463         dc->cc_op = CC_OP_ADDX;
464     }
465 }
466 
467 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
468 {
469     tcg_gen_mov_tl(cpu_cc_src, src1);
470     tcg_gen_mov_tl(cpu_cc_src2, src2);
471     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
472     tcg_gen_mov_tl(dst, cpu_cc_dst);
473 }
474 
475 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
476                             TCGv src2, int update_cc)
477 {
478     TCGv_i32 carry_32;
479     TCGv carry;
480 
481     switch (dc->cc_op) {
482     case CC_OP_DIV:
483     case CC_OP_LOGIC:
484         /* Carry is known to be zero.  Fall back to plain SUB.  */
485         if (update_cc) {
486             gen_op_sub_cc(dst, src1, src2);
487         } else {
488             tcg_gen_sub_tl(dst, src1, src2);
489         }
490         return;
491 
492     case CC_OP_ADD:
493     case CC_OP_TADD:
494     case CC_OP_TADDTV:
495         carry_32 = gen_add32_carry32();
496         break;
497 
498     case CC_OP_SUB:
499     case CC_OP_TSUB:
500     case CC_OP_TSUBTV:
501         if (TARGET_LONG_BITS == 32) {
502             /* We can re-use the host's hardware carry generation by using
503                a SUB2 opcode.  We discard the low part of the output.
504                Ideally we'd combine this operation with the add that
505                generated the carry in the first place.  */
506             carry = tcg_temp_new();
507             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
508             goto sub_done;
509         }
510         carry_32 = gen_sub32_carry32();
511         break;
512 
513     default:
514         /* We need external help to produce the carry.  */
515         carry_32 = tcg_temp_new_i32();
516         gen_helper_compute_C_icc(carry_32, cpu_env);
517         break;
518     }
519 
520 #if TARGET_LONG_BITS == 64
521     carry = tcg_temp_new();
522     tcg_gen_extu_i32_i64(carry, carry_32);
523 #else
524     carry = carry_32;
525 #endif
526 
527     tcg_gen_sub_tl(dst, src1, src2);
528     tcg_gen_sub_tl(dst, dst, carry);
529 
530  sub_done:
531     if (update_cc) {
532         tcg_gen_mov_tl(cpu_cc_src, src1);
533         tcg_gen_mov_tl(cpu_cc_src2, src2);
534         tcg_gen_mov_tl(cpu_cc_dst, dst);
535         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
536         dc->cc_op = CC_OP_SUBX;
537     }
538 }
539 
540 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
541 {
542     TCGv r_temp, zero, t0;
543 
544     r_temp = tcg_temp_new();
545     t0 = tcg_temp_new();
546 
547     /* old op:
548     if (!(env->y & 1))
549         T1 = 0;
550     */
551     zero = tcg_constant_tl(0);
552     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
553     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
554     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
555     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
556                        zero, cpu_cc_src2);
557 
558     // b2 = T0 & 1;
559     // env->y = (b2 << 31) | (env->y >> 1);
560     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
561     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
562 
563     // b1 = N ^ V;
564     gen_mov_reg_N(t0, cpu_psr);
565     gen_mov_reg_V(r_temp, cpu_psr);
566     tcg_gen_xor_tl(t0, t0, r_temp);
567 
568     // T0 = (b1 << 31) | (T0 >> 1);
569     // src1 = T0;
570     tcg_gen_shli_tl(t0, t0, 31);
571     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
572     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
573 
574     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
575 
576     tcg_gen_mov_tl(dst, cpu_cc_dst);
577 }
578 
579 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
580 {
581 #if TARGET_LONG_BITS == 32
582     if (sign_ext) {
583         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
584     } else {
585         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
586     }
587 #else
588     TCGv t0 = tcg_temp_new_i64();
589     TCGv t1 = tcg_temp_new_i64();
590 
591     if (sign_ext) {
592         tcg_gen_ext32s_i64(t0, src1);
593         tcg_gen_ext32s_i64(t1, src2);
594     } else {
595         tcg_gen_ext32u_i64(t0, src1);
596         tcg_gen_ext32u_i64(t1, src2);
597     }
598 
599     tcg_gen_mul_i64(dst, t0, t1);
600     tcg_gen_shri_i64(cpu_y, dst, 32);
601 #endif
602 }
603 
604 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
605 {
606     /* zero-extend truncated operands before multiplication */
607     gen_op_multiply(dst, src1, src2, 0);
608 }
609 
610 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
611 {
612     /* sign-extend truncated operands before multiplication */
613     gen_op_multiply(dst, src1, src2, 1);
614 }
615 
616 // 1
617 static inline void gen_op_eval_ba(TCGv dst)
618 {
619     tcg_gen_movi_tl(dst, 1);
620 }
621 
622 // Z
623 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
624 {
625     gen_mov_reg_Z(dst, src);
626 }
627 
628 // Z | (N ^ V)
629 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
630 {
631     TCGv t0 = tcg_temp_new();
632     gen_mov_reg_N(t0, src);
633     gen_mov_reg_V(dst, src);
634     tcg_gen_xor_tl(dst, dst, t0);
635     gen_mov_reg_Z(t0, src);
636     tcg_gen_or_tl(dst, dst, t0);
637 }
638 
639 // N ^ V
640 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
641 {
642     TCGv t0 = tcg_temp_new();
643     gen_mov_reg_V(t0, src);
644     gen_mov_reg_N(dst, src);
645     tcg_gen_xor_tl(dst, dst, t0);
646 }
647 
648 // C | Z
649 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
650 {
651     TCGv t0 = tcg_temp_new();
652     gen_mov_reg_Z(t0, src);
653     gen_mov_reg_C(dst, src);
654     tcg_gen_or_tl(dst, dst, t0);
655 }
656 
657 // C
658 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
659 {
660     gen_mov_reg_C(dst, src);
661 }
662 
663 // V
664 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
665 {
666     gen_mov_reg_V(dst, src);
667 }
668 
669 // 0
670 static inline void gen_op_eval_bn(TCGv dst)
671 {
672     tcg_gen_movi_tl(dst, 0);
673 }
674 
675 // N
676 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
677 {
678     gen_mov_reg_N(dst, src);
679 }
680 
681 // !Z
682 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
683 {
684     gen_mov_reg_Z(dst, src);
685     tcg_gen_xori_tl(dst, dst, 0x1);
686 }
687 
688 // !(Z | (N ^ V))
689 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
690 {
691     gen_op_eval_ble(dst, src);
692     tcg_gen_xori_tl(dst, dst, 0x1);
693 }
694 
695 // !(N ^ V)
696 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
697 {
698     gen_op_eval_bl(dst, src);
699     tcg_gen_xori_tl(dst, dst, 0x1);
700 }
701 
702 // !(C | Z)
703 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
704 {
705     gen_op_eval_bleu(dst, src);
706     tcg_gen_xori_tl(dst, dst, 0x1);
707 }
708 
709 // !C
710 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
711 {
712     gen_mov_reg_C(dst, src);
713     tcg_gen_xori_tl(dst, dst, 0x1);
714 }
715 
716 // !N
717 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
718 {
719     gen_mov_reg_N(dst, src);
720     tcg_gen_xori_tl(dst, dst, 0x1);
721 }
722 
723 // !V
724 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
725 {
726     gen_mov_reg_V(dst, src);
727     tcg_gen_xori_tl(dst, dst, 0x1);
728 }
729 
730 /*
731   FPSR bit field FCC1 | FCC0:
732    0 =
733    1 <
734    2 >
735    3 unordered
736 */
737 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
738                                     unsigned int fcc_offset)
739 {
740     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
741     tcg_gen_andi_tl(reg, reg, 0x1);
742 }
743 
744 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
745                                     unsigned int fcc_offset)
746 {
747     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
748     tcg_gen_andi_tl(reg, reg, 0x1);
749 }
750 
751 // !0: FCC0 | FCC1
752 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
753                                     unsigned int fcc_offset)
754 {
755     TCGv t0 = tcg_temp_new();
756     gen_mov_reg_FCC0(dst, src, fcc_offset);
757     gen_mov_reg_FCC1(t0, src, fcc_offset);
758     tcg_gen_or_tl(dst, dst, t0);
759 }
760 
761 // 1 or 2: FCC0 ^ FCC1
762 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
763                                     unsigned int fcc_offset)
764 {
765     TCGv t0 = tcg_temp_new();
766     gen_mov_reg_FCC0(dst, src, fcc_offset);
767     gen_mov_reg_FCC1(t0, src, fcc_offset);
768     tcg_gen_xor_tl(dst, dst, t0);
769 }
770 
771 // 1 or 3: FCC0
772 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
773                                     unsigned int fcc_offset)
774 {
775     gen_mov_reg_FCC0(dst, src, fcc_offset);
776 }
777 
778 // 1: FCC0 & !FCC1
779 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
780                                     unsigned int fcc_offset)
781 {
782     TCGv t0 = tcg_temp_new();
783     gen_mov_reg_FCC0(dst, src, fcc_offset);
784     gen_mov_reg_FCC1(t0, src, fcc_offset);
785     tcg_gen_andc_tl(dst, dst, t0);
786 }
787 
788 // 2 or 3: FCC1
789 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
790                                     unsigned int fcc_offset)
791 {
792     gen_mov_reg_FCC1(dst, src, fcc_offset);
793 }
794 
795 // 2: !FCC0 & FCC1
796 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
797                                     unsigned int fcc_offset)
798 {
799     TCGv t0 = tcg_temp_new();
800     gen_mov_reg_FCC0(dst, src, fcc_offset);
801     gen_mov_reg_FCC1(t0, src, fcc_offset);
802     tcg_gen_andc_tl(dst, t0, dst);
803 }
804 
805 // 3: FCC0 & FCC1
806 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
807                                     unsigned int fcc_offset)
808 {
809     TCGv t0 = tcg_temp_new();
810     gen_mov_reg_FCC0(dst, src, fcc_offset);
811     gen_mov_reg_FCC1(t0, src, fcc_offset);
812     tcg_gen_and_tl(dst, dst, t0);
813 }
814 
815 // 0: !(FCC0 | FCC1)
816 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
817                                     unsigned int fcc_offset)
818 {
819     TCGv t0 = tcg_temp_new();
820     gen_mov_reg_FCC0(dst, src, fcc_offset);
821     gen_mov_reg_FCC1(t0, src, fcc_offset);
822     tcg_gen_or_tl(dst, dst, t0);
823     tcg_gen_xori_tl(dst, dst, 0x1);
824 }
825 
826 // 0 or 3: !(FCC0 ^ FCC1)
827 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
828                                     unsigned int fcc_offset)
829 {
830     TCGv t0 = tcg_temp_new();
831     gen_mov_reg_FCC0(dst, src, fcc_offset);
832     gen_mov_reg_FCC1(t0, src, fcc_offset);
833     tcg_gen_xor_tl(dst, dst, t0);
834     tcg_gen_xori_tl(dst, dst, 0x1);
835 }
836 
837 // 0 or 2: !FCC0
838 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
839                                     unsigned int fcc_offset)
840 {
841     gen_mov_reg_FCC0(dst, src, fcc_offset);
842     tcg_gen_xori_tl(dst, dst, 0x1);
843 }
844 
845 // !1: !(FCC0 & !FCC1)
846 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
847                                     unsigned int fcc_offset)
848 {
849     TCGv t0 = tcg_temp_new();
850     gen_mov_reg_FCC0(dst, src, fcc_offset);
851     gen_mov_reg_FCC1(t0, src, fcc_offset);
852     tcg_gen_andc_tl(dst, dst, t0);
853     tcg_gen_xori_tl(dst, dst, 0x1);
854 }
855 
856 // 0 or 1: !FCC1
857 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
858                                     unsigned int fcc_offset)
859 {
860     gen_mov_reg_FCC1(dst, src, fcc_offset);
861     tcg_gen_xori_tl(dst, dst, 0x1);
862 }
863 
864 // !2: !(!FCC0 & FCC1)
865 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
866                                     unsigned int fcc_offset)
867 {
868     TCGv t0 = tcg_temp_new();
869     gen_mov_reg_FCC0(dst, src, fcc_offset);
870     gen_mov_reg_FCC1(t0, src, fcc_offset);
871     tcg_gen_andc_tl(dst, t0, dst);
872     tcg_gen_xori_tl(dst, dst, 0x1);
873 }
874 
875 // !3: !(FCC0 & FCC1)
876 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
877                                     unsigned int fcc_offset)
878 {
879     TCGv t0 = tcg_temp_new();
880     gen_mov_reg_FCC0(dst, src, fcc_offset);
881     gen_mov_reg_FCC1(t0, src, fcc_offset);
882     tcg_gen_and_tl(dst, dst, t0);
883     tcg_gen_xori_tl(dst, dst, 0x1);
884 }
885 
886 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
887                                target_ulong pc2, TCGv r_cond)
888 {
889     TCGLabel *l1 = gen_new_label();
890 
891     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
892 
893     gen_goto_tb(dc, 0, pc1, pc1 + 4);
894 
895     gen_set_label(l1);
896     gen_goto_tb(dc, 1, pc2, pc2 + 4);
897 }
898 
899 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
900 {
901     TCGLabel *l1 = gen_new_label();
902     target_ulong npc = dc->npc;
903 
904     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
905 
906     gen_goto_tb(dc, 0, npc, pc1);
907 
908     gen_set_label(l1);
909     gen_goto_tb(dc, 1, npc + 4, npc + 8);
910 
911     dc->base.is_jmp = DISAS_NORETURN;
912 }
913 
914 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
915 {
916     target_ulong npc = dc->npc;
917 
918     if (likely(npc != DYNAMIC_PC)) {
919         dc->pc = npc;
920         dc->jump_pc[0] = pc1;
921         dc->jump_pc[1] = npc + 4;
922         dc->npc = JUMP_PC;
923     } else {
924         TCGv t, z;
925 
926         tcg_gen_mov_tl(cpu_pc, cpu_npc);
927 
928         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
929         t = tcg_constant_tl(pc1);
930         z = tcg_constant_tl(0);
931         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
932 
933         dc->pc = DYNAMIC_PC;
934     }
935 }
936 
937 static inline void gen_generic_branch(DisasContext *dc)
938 {
939     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
940     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
941     TCGv zero = tcg_constant_tl(0);
942 
943     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
944 }
945 
946 /* call this function before using the condition register as it may
947    have been set for a jump */
948 static inline void flush_cond(DisasContext *dc)
949 {
950     if (dc->npc == JUMP_PC) {
951         gen_generic_branch(dc);
952         dc->npc = DYNAMIC_PC;
953     }
954 }
955 
956 static inline void save_npc(DisasContext *dc)
957 {
958     if (dc->npc == JUMP_PC) {
959         gen_generic_branch(dc);
960         dc->npc = DYNAMIC_PC;
961     } else if (dc->npc != DYNAMIC_PC) {
962         tcg_gen_movi_tl(cpu_npc, dc->npc);
963     }
964 }
965 
966 static inline void update_psr(DisasContext *dc)
967 {
968     if (dc->cc_op != CC_OP_FLAGS) {
969         dc->cc_op = CC_OP_FLAGS;
970         gen_helper_compute_psr(cpu_env);
971     }
972 }
973 
974 static inline void save_state(DisasContext *dc)
975 {
976     tcg_gen_movi_tl(cpu_pc, dc->pc);
977     save_npc(dc);
978 }
979 
980 static void gen_exception(DisasContext *dc, int which)
981 {
982     save_state(dc);
983     gen_helper_raise_exception(cpu_env, tcg_constant_i32(which));
984     dc->base.is_jmp = DISAS_NORETURN;
985 }
986 
987 static void gen_check_align(TCGv addr, int mask)
988 {
989     gen_helper_check_align(cpu_env, addr, tcg_constant_i32(mask));
990 }
991 
992 static inline void gen_mov_pc_npc(DisasContext *dc)
993 {
994     if (dc->npc == JUMP_PC) {
995         gen_generic_branch(dc);
996         tcg_gen_mov_tl(cpu_pc, cpu_npc);
997         dc->pc = DYNAMIC_PC;
998     } else if (dc->npc == DYNAMIC_PC) {
999         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1000         dc->pc = DYNAMIC_PC;
1001     } else {
1002         dc->pc = dc->npc;
1003     }
1004 }
1005 
1006 static inline void gen_op_next_insn(void)
1007 {
1008     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1009     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1010 }
1011 
1012 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1013                         DisasContext *dc)
1014 {
1015     static int subcc_cond[16] = {
1016         TCG_COND_NEVER,
1017         TCG_COND_EQ,
1018         TCG_COND_LE,
1019         TCG_COND_LT,
1020         TCG_COND_LEU,
1021         TCG_COND_LTU,
1022         -1, /* neg */
1023         -1, /* overflow */
1024         TCG_COND_ALWAYS,
1025         TCG_COND_NE,
1026         TCG_COND_GT,
1027         TCG_COND_GE,
1028         TCG_COND_GTU,
1029         TCG_COND_GEU,
1030         -1, /* pos */
1031         -1, /* no overflow */
1032     };
1033 
1034     static int logic_cond[16] = {
1035         TCG_COND_NEVER,
1036         TCG_COND_EQ,     /* eq:  Z */
1037         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1038         TCG_COND_LT,     /* lt:  N ^ V -> N */
1039         TCG_COND_EQ,     /* leu: C | Z -> Z */
1040         TCG_COND_NEVER,  /* ltu: C -> 0 */
1041         TCG_COND_LT,     /* neg: N */
1042         TCG_COND_NEVER,  /* vs:  V -> 0 */
1043         TCG_COND_ALWAYS,
1044         TCG_COND_NE,     /* ne:  !Z */
1045         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1046         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1047         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1048         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1049         TCG_COND_GE,     /* pos: !N */
1050         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1051     };
1052 
1053     TCGv_i32 r_src;
1054     TCGv r_dst;
1055 
1056 #ifdef TARGET_SPARC64
1057     if (xcc) {
1058         r_src = cpu_xcc;
1059     } else {
1060         r_src = cpu_psr;
1061     }
1062 #else
1063     r_src = cpu_psr;
1064 #endif
1065 
1066     switch (dc->cc_op) {
1067     case CC_OP_LOGIC:
1068         cmp->cond = logic_cond[cond];
1069     do_compare_dst_0:
1070         cmp->is_bool = false;
1071         cmp->c2 = tcg_constant_tl(0);
1072 #ifdef TARGET_SPARC64
1073         if (!xcc) {
1074             cmp->c1 = tcg_temp_new();
1075             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1076             break;
1077         }
1078 #endif
1079         cmp->c1 = cpu_cc_dst;
1080         break;
1081 
1082     case CC_OP_SUB:
1083         switch (cond) {
1084         case 6:  /* neg */
1085         case 14: /* pos */
1086             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1087             goto do_compare_dst_0;
1088 
1089         case 7: /* overflow */
1090         case 15: /* !overflow */
1091             goto do_dynamic;
1092 
1093         default:
1094             cmp->cond = subcc_cond[cond];
1095             cmp->is_bool = false;
1096 #ifdef TARGET_SPARC64
1097             if (!xcc) {
1098                 /* Note that sign-extension works for unsigned compares as
1099                    long as both operands are sign-extended.  */
1100                 cmp->c1 = tcg_temp_new();
1101                 cmp->c2 = tcg_temp_new();
1102                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1103                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1104                 break;
1105             }
1106 #endif
1107             cmp->c1 = cpu_cc_src;
1108             cmp->c2 = cpu_cc_src2;
1109             break;
1110         }
1111         break;
1112 
1113     default:
1114     do_dynamic:
1115         gen_helper_compute_psr(cpu_env);
1116         dc->cc_op = CC_OP_FLAGS;
1117         /* FALLTHRU */
1118 
1119     case CC_OP_FLAGS:
1120         /* We're going to generate a boolean result.  */
1121         cmp->cond = TCG_COND_NE;
1122         cmp->is_bool = true;
1123         cmp->c1 = r_dst = tcg_temp_new();
1124         cmp->c2 = tcg_constant_tl(0);
1125 
1126         switch (cond) {
1127         case 0x0:
1128             gen_op_eval_bn(r_dst);
1129             break;
1130         case 0x1:
1131             gen_op_eval_be(r_dst, r_src);
1132             break;
1133         case 0x2:
1134             gen_op_eval_ble(r_dst, r_src);
1135             break;
1136         case 0x3:
1137             gen_op_eval_bl(r_dst, r_src);
1138             break;
1139         case 0x4:
1140             gen_op_eval_bleu(r_dst, r_src);
1141             break;
1142         case 0x5:
1143             gen_op_eval_bcs(r_dst, r_src);
1144             break;
1145         case 0x6:
1146             gen_op_eval_bneg(r_dst, r_src);
1147             break;
1148         case 0x7:
1149             gen_op_eval_bvs(r_dst, r_src);
1150             break;
1151         case 0x8:
1152             gen_op_eval_ba(r_dst);
1153             break;
1154         case 0x9:
1155             gen_op_eval_bne(r_dst, r_src);
1156             break;
1157         case 0xa:
1158             gen_op_eval_bg(r_dst, r_src);
1159             break;
1160         case 0xb:
1161             gen_op_eval_bge(r_dst, r_src);
1162             break;
1163         case 0xc:
1164             gen_op_eval_bgu(r_dst, r_src);
1165             break;
1166         case 0xd:
1167             gen_op_eval_bcc(r_dst, r_src);
1168             break;
1169         case 0xe:
1170             gen_op_eval_bpos(r_dst, r_src);
1171             break;
1172         case 0xf:
1173             gen_op_eval_bvc(r_dst, r_src);
1174             break;
1175         }
1176         break;
1177     }
1178 }
1179 
1180 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1181 {
1182     unsigned int offset;
1183     TCGv r_dst;
1184 
1185     /* For now we still generate a straight boolean result.  */
1186     cmp->cond = TCG_COND_NE;
1187     cmp->is_bool = true;
1188     cmp->c1 = r_dst = tcg_temp_new();
1189     cmp->c2 = tcg_constant_tl(0);
1190 
1191     switch (cc) {
1192     default:
1193     case 0x0:
1194         offset = 0;
1195         break;
1196     case 0x1:
1197         offset = 32 - 10;
1198         break;
1199     case 0x2:
1200         offset = 34 - 10;
1201         break;
1202     case 0x3:
1203         offset = 36 - 10;
1204         break;
1205     }
1206 
1207     switch (cond) {
1208     case 0x0:
1209         gen_op_eval_bn(r_dst);
1210         break;
1211     case 0x1:
1212         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1213         break;
1214     case 0x2:
1215         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1216         break;
1217     case 0x3:
1218         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1219         break;
1220     case 0x4:
1221         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1222         break;
1223     case 0x5:
1224         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1225         break;
1226     case 0x6:
1227         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1228         break;
1229     case 0x7:
1230         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1231         break;
1232     case 0x8:
1233         gen_op_eval_ba(r_dst);
1234         break;
1235     case 0x9:
1236         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1237         break;
1238     case 0xa:
1239         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1240         break;
1241     case 0xb:
1242         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1243         break;
1244     case 0xc:
1245         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1246         break;
1247     case 0xd:
1248         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1249         break;
1250     case 0xe:
1251         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1252         break;
1253     case 0xf:
1254         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1255         break;
1256     }
1257 }
1258 
1259 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1260                      DisasContext *dc)
1261 {
1262     DisasCompare cmp;
1263     gen_compare(&cmp, cc, cond, dc);
1264 
1265     /* The interface is to return a boolean in r_dst.  */
1266     if (cmp.is_bool) {
1267         tcg_gen_mov_tl(r_dst, cmp.c1);
1268     } else {
1269         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1270     }
1271 }
1272 
1273 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1274 {
1275     DisasCompare cmp;
1276     gen_fcompare(&cmp, cc, cond);
1277 
1278     /* The interface is to return a boolean in r_dst.  */
1279     if (cmp.is_bool) {
1280         tcg_gen_mov_tl(r_dst, cmp.c1);
1281     } else {
1282         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1283     }
1284 }
1285 
1286 #ifdef TARGET_SPARC64
1287 // Inverted logic
1288 static const int gen_tcg_cond_reg[8] = {
1289     -1,
1290     TCG_COND_NE,
1291     TCG_COND_GT,
1292     TCG_COND_GE,
1293     -1,
1294     TCG_COND_EQ,
1295     TCG_COND_LE,
1296     TCG_COND_LT,
1297 };
1298 
1299 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1300 {
1301     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1302     cmp->is_bool = false;
1303     cmp->c1 = r_src;
1304     cmp->c2 = tcg_constant_tl(0);
1305 }
1306 
1307 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1308 {
1309     DisasCompare cmp;
1310     gen_compare_reg(&cmp, cond, r_src);
1311 
1312     /* The interface is to return a boolean in r_dst.  */
1313     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1314 }
1315 #endif
1316 
1317 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1318 {
1319     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1320     target_ulong target = dc->pc + offset;
1321 
1322 #ifdef TARGET_SPARC64
1323     if (unlikely(AM_CHECK(dc))) {
1324         target &= 0xffffffffULL;
1325     }
1326 #endif
1327     if (cond == 0x0) {
1328         /* unconditional not taken */
1329         if (a) {
1330             dc->pc = dc->npc + 4;
1331             dc->npc = dc->pc + 4;
1332         } else {
1333             dc->pc = dc->npc;
1334             dc->npc = dc->pc + 4;
1335         }
1336     } else if (cond == 0x8) {
1337         /* unconditional taken */
1338         if (a) {
1339             dc->pc = target;
1340             dc->npc = dc->pc + 4;
1341         } else {
1342             dc->pc = dc->npc;
1343             dc->npc = target;
1344             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1345         }
1346     } else {
1347         flush_cond(dc);
1348         gen_cond(cpu_cond, cc, cond, dc);
1349         if (a) {
1350             gen_branch_a(dc, target);
1351         } else {
1352             gen_branch_n(dc, target);
1353         }
1354     }
1355 }
1356 
1357 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1358 {
1359     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1360     target_ulong target = dc->pc + offset;
1361 
1362 #ifdef TARGET_SPARC64
1363     if (unlikely(AM_CHECK(dc))) {
1364         target &= 0xffffffffULL;
1365     }
1366 #endif
1367     if (cond == 0x0) {
1368         /* unconditional not taken */
1369         if (a) {
1370             dc->pc = dc->npc + 4;
1371             dc->npc = dc->pc + 4;
1372         } else {
1373             dc->pc = dc->npc;
1374             dc->npc = dc->pc + 4;
1375         }
1376     } else if (cond == 0x8) {
1377         /* unconditional taken */
1378         if (a) {
1379             dc->pc = target;
1380             dc->npc = dc->pc + 4;
1381         } else {
1382             dc->pc = dc->npc;
1383             dc->npc = target;
1384             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1385         }
1386     } else {
1387         flush_cond(dc);
1388         gen_fcond(cpu_cond, cc, cond);
1389         if (a) {
1390             gen_branch_a(dc, target);
1391         } else {
1392             gen_branch_n(dc, target);
1393         }
1394     }
1395 }
1396 
1397 #ifdef TARGET_SPARC64
1398 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1399                           TCGv r_reg)
1400 {
1401     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1402     target_ulong target = dc->pc + offset;
1403 
1404     if (unlikely(AM_CHECK(dc))) {
1405         target &= 0xffffffffULL;
1406     }
1407     flush_cond(dc);
1408     gen_cond_reg(cpu_cond, cond, r_reg);
1409     if (a) {
1410         gen_branch_a(dc, target);
1411     } else {
1412         gen_branch_n(dc, target);
1413     }
1414 }
1415 
1416 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1417 {
1418     switch (fccno) {
1419     case 0:
1420         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1421         break;
1422     case 1:
1423         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1424         break;
1425     case 2:
1426         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1427         break;
1428     case 3:
1429         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1430         break;
1431     }
1432 }
1433 
1434 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1435 {
1436     switch (fccno) {
1437     case 0:
1438         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1439         break;
1440     case 1:
1441         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1442         break;
1443     case 2:
1444         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1445         break;
1446     case 3:
1447         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1448         break;
1449     }
1450 }
1451 
1452 static inline void gen_op_fcmpq(int fccno)
1453 {
1454     switch (fccno) {
1455     case 0:
1456         gen_helper_fcmpq(cpu_fsr, cpu_env);
1457         break;
1458     case 1:
1459         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1460         break;
1461     case 2:
1462         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1463         break;
1464     case 3:
1465         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1466         break;
1467     }
1468 }
1469 
1470 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1471 {
1472     switch (fccno) {
1473     case 0:
1474         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1475         break;
1476     case 1:
1477         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1478         break;
1479     case 2:
1480         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1481         break;
1482     case 3:
1483         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1484         break;
1485     }
1486 }
1487 
1488 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1489 {
1490     switch (fccno) {
1491     case 0:
1492         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1493         break;
1494     case 1:
1495         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1496         break;
1497     case 2:
1498         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1499         break;
1500     case 3:
1501         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1502         break;
1503     }
1504 }
1505 
1506 static inline void gen_op_fcmpeq(int fccno)
1507 {
1508     switch (fccno) {
1509     case 0:
1510         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1511         break;
1512     case 1:
1513         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1514         break;
1515     case 2:
1516         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1517         break;
1518     case 3:
1519         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1520         break;
1521     }
1522 }
1523 
1524 #else
1525 
1526 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1527 {
1528     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1529 }
1530 
1531 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1532 {
1533     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1534 }
1535 
1536 static inline void gen_op_fcmpq(int fccno)
1537 {
1538     gen_helper_fcmpq(cpu_fsr, cpu_env);
1539 }
1540 
1541 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1542 {
1543     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1544 }
1545 
1546 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1547 {
1548     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1549 }
1550 
1551 static inline void gen_op_fcmpeq(int fccno)
1552 {
1553     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1554 }
1555 #endif
1556 
1557 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1558 {
1559     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1560     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1561     gen_exception(dc, TT_FP_EXCP);
1562 }
1563 
1564 static int gen_trap_ifnofpu(DisasContext *dc)
1565 {
1566 #if !defined(CONFIG_USER_ONLY)
1567     if (!dc->fpu_enabled) {
1568         gen_exception(dc, TT_NFPU_INSN);
1569         return 1;
1570     }
1571 #endif
1572     return 0;
1573 }
1574 
1575 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1576 {
1577     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1578 }
1579 
1580 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1581                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1582 {
1583     TCGv_i32 dst, src;
1584 
1585     src = gen_load_fpr_F(dc, rs);
1586     dst = gen_dest_fpr_F(dc);
1587 
1588     gen(dst, cpu_env, src);
1589     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1590 
1591     gen_store_fpr_F(dc, rd, dst);
1592 }
1593 
1594 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1595                                  void (*gen)(TCGv_i32, TCGv_i32))
1596 {
1597     TCGv_i32 dst, src;
1598 
1599     src = gen_load_fpr_F(dc, rs);
1600     dst = gen_dest_fpr_F(dc);
1601 
1602     gen(dst, src);
1603 
1604     gen_store_fpr_F(dc, rd, dst);
1605 }
1606 
1607 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1608                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1609 {
1610     TCGv_i32 dst, src1, src2;
1611 
1612     src1 = gen_load_fpr_F(dc, rs1);
1613     src2 = gen_load_fpr_F(dc, rs2);
1614     dst = gen_dest_fpr_F(dc);
1615 
1616     gen(dst, cpu_env, src1, src2);
1617     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1618 
1619     gen_store_fpr_F(dc, rd, dst);
1620 }
1621 
1622 #ifdef TARGET_SPARC64
1623 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1624                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1625 {
1626     TCGv_i32 dst, src1, src2;
1627 
1628     src1 = gen_load_fpr_F(dc, rs1);
1629     src2 = gen_load_fpr_F(dc, rs2);
1630     dst = gen_dest_fpr_F(dc);
1631 
1632     gen(dst, src1, src2);
1633 
1634     gen_store_fpr_F(dc, rd, dst);
1635 }
1636 #endif
1637 
1638 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1639                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1640 {
1641     TCGv_i64 dst, src;
1642 
1643     src = gen_load_fpr_D(dc, rs);
1644     dst = gen_dest_fpr_D(dc, rd);
1645 
1646     gen(dst, cpu_env, src);
1647     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1648 
1649     gen_store_fpr_D(dc, rd, dst);
1650 }
1651 
1652 #ifdef TARGET_SPARC64
1653 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1654                                  void (*gen)(TCGv_i64, TCGv_i64))
1655 {
1656     TCGv_i64 dst, src;
1657 
1658     src = gen_load_fpr_D(dc, rs);
1659     dst = gen_dest_fpr_D(dc, rd);
1660 
1661     gen(dst, src);
1662 
1663     gen_store_fpr_D(dc, rd, dst);
1664 }
1665 #endif
1666 
1667 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1668                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1669 {
1670     TCGv_i64 dst, src1, src2;
1671 
1672     src1 = gen_load_fpr_D(dc, rs1);
1673     src2 = gen_load_fpr_D(dc, rs2);
1674     dst = gen_dest_fpr_D(dc, rd);
1675 
1676     gen(dst, cpu_env, src1, src2);
1677     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1678 
1679     gen_store_fpr_D(dc, rd, dst);
1680 }
1681 
1682 #ifdef TARGET_SPARC64
1683 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1684                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1685 {
1686     TCGv_i64 dst, src1, src2;
1687 
1688     src1 = gen_load_fpr_D(dc, rs1);
1689     src2 = gen_load_fpr_D(dc, rs2);
1690     dst = gen_dest_fpr_D(dc, rd);
1691 
1692     gen(dst, src1, src2);
1693 
1694     gen_store_fpr_D(dc, rd, dst);
1695 }
1696 
1697 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1698                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1699 {
1700     TCGv_i64 dst, src1, src2;
1701 
1702     src1 = gen_load_fpr_D(dc, rs1);
1703     src2 = gen_load_fpr_D(dc, rs2);
1704     dst = gen_dest_fpr_D(dc, rd);
1705 
1706     gen(dst, cpu_gsr, src1, src2);
1707 
1708     gen_store_fpr_D(dc, rd, dst);
1709 }
1710 
1711 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1712                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1713 {
1714     TCGv_i64 dst, src0, src1, src2;
1715 
1716     src1 = gen_load_fpr_D(dc, rs1);
1717     src2 = gen_load_fpr_D(dc, rs2);
1718     src0 = gen_load_fpr_D(dc, rd);
1719     dst = gen_dest_fpr_D(dc, rd);
1720 
1721     gen(dst, src0, src1, src2);
1722 
1723     gen_store_fpr_D(dc, rd, dst);
1724 }
1725 #endif
1726 
1727 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1728                               void (*gen)(TCGv_ptr))
1729 {
1730     gen_op_load_fpr_QT1(QFPREG(rs));
1731 
1732     gen(cpu_env);
1733     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1734 
1735     gen_op_store_QT0_fpr(QFPREG(rd));
1736     gen_update_fprs_dirty(dc, QFPREG(rd));
1737 }
1738 
1739 #ifdef TARGET_SPARC64
1740 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1741                                  void (*gen)(TCGv_ptr))
1742 {
1743     gen_op_load_fpr_QT1(QFPREG(rs));
1744 
1745     gen(cpu_env);
1746 
1747     gen_op_store_QT0_fpr(QFPREG(rd));
1748     gen_update_fprs_dirty(dc, QFPREG(rd));
1749 }
1750 #endif
1751 
1752 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1753                                void (*gen)(TCGv_ptr))
1754 {
1755     gen_op_load_fpr_QT0(QFPREG(rs1));
1756     gen_op_load_fpr_QT1(QFPREG(rs2));
1757 
1758     gen(cpu_env);
1759     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1760 
1761     gen_op_store_QT0_fpr(QFPREG(rd));
1762     gen_update_fprs_dirty(dc, QFPREG(rd));
1763 }
1764 
1765 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1766                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1767 {
1768     TCGv_i64 dst;
1769     TCGv_i32 src1, src2;
1770 
1771     src1 = gen_load_fpr_F(dc, rs1);
1772     src2 = gen_load_fpr_F(dc, rs2);
1773     dst = gen_dest_fpr_D(dc, rd);
1774 
1775     gen(dst, cpu_env, src1, src2);
1776     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1777 
1778     gen_store_fpr_D(dc, rd, dst);
1779 }
1780 
1781 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1782                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1783 {
1784     TCGv_i64 src1, src2;
1785 
1786     src1 = gen_load_fpr_D(dc, rs1);
1787     src2 = gen_load_fpr_D(dc, rs2);
1788 
1789     gen(cpu_env, src1, src2);
1790     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1791 
1792     gen_op_store_QT0_fpr(QFPREG(rd));
1793     gen_update_fprs_dirty(dc, QFPREG(rd));
1794 }
1795 
1796 #ifdef TARGET_SPARC64
1797 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1798                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1799 {
1800     TCGv_i64 dst;
1801     TCGv_i32 src;
1802 
1803     src = gen_load_fpr_F(dc, rs);
1804     dst = gen_dest_fpr_D(dc, rd);
1805 
1806     gen(dst, cpu_env, src);
1807     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1808 
1809     gen_store_fpr_D(dc, rd, dst);
1810 }
1811 #endif
1812 
1813 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1814                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1815 {
1816     TCGv_i64 dst;
1817     TCGv_i32 src;
1818 
1819     src = gen_load_fpr_F(dc, rs);
1820     dst = gen_dest_fpr_D(dc, rd);
1821 
1822     gen(dst, cpu_env, src);
1823 
1824     gen_store_fpr_D(dc, rd, dst);
1825 }
1826 
1827 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1828                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1829 {
1830     TCGv_i32 dst;
1831     TCGv_i64 src;
1832 
1833     src = gen_load_fpr_D(dc, rs);
1834     dst = gen_dest_fpr_F(dc);
1835 
1836     gen(dst, cpu_env, src);
1837     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1838 
1839     gen_store_fpr_F(dc, rd, dst);
1840 }
1841 
1842 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1843                               void (*gen)(TCGv_i32, TCGv_ptr))
1844 {
1845     TCGv_i32 dst;
1846 
1847     gen_op_load_fpr_QT1(QFPREG(rs));
1848     dst = gen_dest_fpr_F(dc);
1849 
1850     gen(dst, cpu_env);
1851     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1852 
1853     gen_store_fpr_F(dc, rd, dst);
1854 }
1855 
1856 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1857                               void (*gen)(TCGv_i64, TCGv_ptr))
1858 {
1859     TCGv_i64 dst;
1860 
1861     gen_op_load_fpr_QT1(QFPREG(rs));
1862     dst = gen_dest_fpr_D(dc, rd);
1863 
1864     gen(dst, cpu_env);
1865     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1866 
1867     gen_store_fpr_D(dc, rd, dst);
1868 }
1869 
1870 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1871                                  void (*gen)(TCGv_ptr, TCGv_i32))
1872 {
1873     TCGv_i32 src;
1874 
1875     src = gen_load_fpr_F(dc, rs);
1876 
1877     gen(cpu_env, src);
1878 
1879     gen_op_store_QT0_fpr(QFPREG(rd));
1880     gen_update_fprs_dirty(dc, QFPREG(rd));
1881 }
1882 
1883 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1884                                  void (*gen)(TCGv_ptr, TCGv_i64))
1885 {
1886     TCGv_i64 src;
1887 
1888     src = gen_load_fpr_D(dc, rs);
1889 
1890     gen(cpu_env, src);
1891 
1892     gen_op_store_QT0_fpr(QFPREG(rd));
1893     gen_update_fprs_dirty(dc, QFPREG(rd));
1894 }
1895 
1896 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1897                      TCGv addr, int mmu_idx, MemOp memop)
1898 {
1899     gen_address_mask(dc, addr);
1900     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1901 }
1902 
1903 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1904 {
1905     TCGv m1 = tcg_constant_tl(0xff);
1906     gen_address_mask(dc, addr);
1907     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1908 }
1909 
1910 /* asi moves */
1911 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1912 typedef enum {
1913     GET_ASI_HELPER,
1914     GET_ASI_EXCP,
1915     GET_ASI_DIRECT,
1916     GET_ASI_DTWINX,
1917     GET_ASI_BLOCK,
1918     GET_ASI_SHORT,
1919     GET_ASI_BCOPY,
1920     GET_ASI_BFILL,
1921 } ASIType;
1922 
1923 typedef struct {
1924     ASIType type;
1925     int asi;
1926     int mem_idx;
1927     MemOp memop;
1928 } DisasASI;
1929 
1930 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1931 {
1932     int asi = GET_FIELD(insn, 19, 26);
1933     ASIType type = GET_ASI_HELPER;
1934     int mem_idx = dc->mem_idx;
1935 
1936 #ifndef TARGET_SPARC64
1937     /* Before v9, all asis are immediate and privileged.  */
1938     if (IS_IMM) {
1939         gen_exception(dc, TT_ILL_INSN);
1940         type = GET_ASI_EXCP;
1941     } else if (supervisor(dc)
1942                /* Note that LEON accepts ASI_USERDATA in user mode, for
1943                   use with CASA.  Also note that previous versions of
1944                   QEMU allowed (and old versions of gcc emitted) ASI_P
1945                   for LEON, which is incorrect.  */
1946                || (asi == ASI_USERDATA
1947                    && (dc->def->features & CPU_FEATURE_CASA))) {
1948         switch (asi) {
1949         case ASI_USERDATA:   /* User data access */
1950             mem_idx = MMU_USER_IDX;
1951             type = GET_ASI_DIRECT;
1952             break;
1953         case ASI_KERNELDATA: /* Supervisor data access */
1954             mem_idx = MMU_KERNEL_IDX;
1955             type = GET_ASI_DIRECT;
1956             break;
1957         case ASI_M_BYPASS:    /* MMU passthrough */
1958         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1959             mem_idx = MMU_PHYS_IDX;
1960             type = GET_ASI_DIRECT;
1961             break;
1962         case ASI_M_BCOPY: /* Block copy, sta access */
1963             mem_idx = MMU_KERNEL_IDX;
1964             type = GET_ASI_BCOPY;
1965             break;
1966         case ASI_M_BFILL: /* Block fill, stda access */
1967             mem_idx = MMU_KERNEL_IDX;
1968             type = GET_ASI_BFILL;
1969             break;
1970         }
1971 
1972         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1973          * permissions check in get_physical_address(..).
1974          */
1975         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1976     } else {
1977         gen_exception(dc, TT_PRIV_INSN);
1978         type = GET_ASI_EXCP;
1979     }
1980 #else
1981     if (IS_IMM) {
1982         asi = dc->asi;
1983     }
1984     /* With v9, all asis below 0x80 are privileged.  */
1985     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1986        down that bit into DisasContext.  For the moment that's ok,
1987        since the direct implementations below doesn't have any ASIs
1988        in the restricted [0x30, 0x7f] range, and the check will be
1989        done properly in the helper.  */
1990     if (!supervisor(dc) && asi < 0x80) {
1991         gen_exception(dc, TT_PRIV_ACT);
1992         type = GET_ASI_EXCP;
1993     } else {
1994         switch (asi) {
1995         case ASI_REAL:      /* Bypass */
1996         case ASI_REAL_IO:   /* Bypass, non-cacheable */
1997         case ASI_REAL_L:    /* Bypass LE */
1998         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
1999         case ASI_TWINX_REAL:   /* Real address, twinx */
2000         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2001         case ASI_QUAD_LDD_PHYS:
2002         case ASI_QUAD_LDD_PHYS_L:
2003             mem_idx = MMU_PHYS_IDX;
2004             break;
2005         case ASI_N:  /* Nucleus */
2006         case ASI_NL: /* Nucleus LE */
2007         case ASI_TWINX_N:
2008         case ASI_TWINX_NL:
2009         case ASI_NUCLEUS_QUAD_LDD:
2010         case ASI_NUCLEUS_QUAD_LDD_L:
2011             if (hypervisor(dc)) {
2012                 mem_idx = MMU_PHYS_IDX;
2013             } else {
2014                 mem_idx = MMU_NUCLEUS_IDX;
2015             }
2016             break;
2017         case ASI_AIUP:  /* As if user primary */
2018         case ASI_AIUPL: /* As if user primary LE */
2019         case ASI_TWINX_AIUP:
2020         case ASI_TWINX_AIUP_L:
2021         case ASI_BLK_AIUP_4V:
2022         case ASI_BLK_AIUP_L_4V:
2023         case ASI_BLK_AIUP:
2024         case ASI_BLK_AIUPL:
2025             mem_idx = MMU_USER_IDX;
2026             break;
2027         case ASI_AIUS:  /* As if user secondary */
2028         case ASI_AIUSL: /* As if user secondary LE */
2029         case ASI_TWINX_AIUS:
2030         case ASI_TWINX_AIUS_L:
2031         case ASI_BLK_AIUS_4V:
2032         case ASI_BLK_AIUS_L_4V:
2033         case ASI_BLK_AIUS:
2034         case ASI_BLK_AIUSL:
2035             mem_idx = MMU_USER_SECONDARY_IDX;
2036             break;
2037         case ASI_S:  /* Secondary */
2038         case ASI_SL: /* Secondary LE */
2039         case ASI_TWINX_S:
2040         case ASI_TWINX_SL:
2041         case ASI_BLK_COMMIT_S:
2042         case ASI_BLK_S:
2043         case ASI_BLK_SL:
2044         case ASI_FL8_S:
2045         case ASI_FL8_SL:
2046         case ASI_FL16_S:
2047         case ASI_FL16_SL:
2048             if (mem_idx == MMU_USER_IDX) {
2049                 mem_idx = MMU_USER_SECONDARY_IDX;
2050             } else if (mem_idx == MMU_KERNEL_IDX) {
2051                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2052             }
2053             break;
2054         case ASI_P:  /* Primary */
2055         case ASI_PL: /* Primary LE */
2056         case ASI_TWINX_P:
2057         case ASI_TWINX_PL:
2058         case ASI_BLK_COMMIT_P:
2059         case ASI_BLK_P:
2060         case ASI_BLK_PL:
2061         case ASI_FL8_P:
2062         case ASI_FL8_PL:
2063         case ASI_FL16_P:
2064         case ASI_FL16_PL:
2065             break;
2066         }
2067         switch (asi) {
2068         case ASI_REAL:
2069         case ASI_REAL_IO:
2070         case ASI_REAL_L:
2071         case ASI_REAL_IO_L:
2072         case ASI_N:
2073         case ASI_NL:
2074         case ASI_AIUP:
2075         case ASI_AIUPL:
2076         case ASI_AIUS:
2077         case ASI_AIUSL:
2078         case ASI_S:
2079         case ASI_SL:
2080         case ASI_P:
2081         case ASI_PL:
2082             type = GET_ASI_DIRECT;
2083             break;
2084         case ASI_TWINX_REAL:
2085         case ASI_TWINX_REAL_L:
2086         case ASI_TWINX_N:
2087         case ASI_TWINX_NL:
2088         case ASI_TWINX_AIUP:
2089         case ASI_TWINX_AIUP_L:
2090         case ASI_TWINX_AIUS:
2091         case ASI_TWINX_AIUS_L:
2092         case ASI_TWINX_P:
2093         case ASI_TWINX_PL:
2094         case ASI_TWINX_S:
2095         case ASI_TWINX_SL:
2096         case ASI_QUAD_LDD_PHYS:
2097         case ASI_QUAD_LDD_PHYS_L:
2098         case ASI_NUCLEUS_QUAD_LDD:
2099         case ASI_NUCLEUS_QUAD_LDD_L:
2100             type = GET_ASI_DTWINX;
2101             break;
2102         case ASI_BLK_COMMIT_P:
2103         case ASI_BLK_COMMIT_S:
2104         case ASI_BLK_AIUP_4V:
2105         case ASI_BLK_AIUP_L_4V:
2106         case ASI_BLK_AIUP:
2107         case ASI_BLK_AIUPL:
2108         case ASI_BLK_AIUS_4V:
2109         case ASI_BLK_AIUS_L_4V:
2110         case ASI_BLK_AIUS:
2111         case ASI_BLK_AIUSL:
2112         case ASI_BLK_S:
2113         case ASI_BLK_SL:
2114         case ASI_BLK_P:
2115         case ASI_BLK_PL:
2116             type = GET_ASI_BLOCK;
2117             break;
2118         case ASI_FL8_S:
2119         case ASI_FL8_SL:
2120         case ASI_FL8_P:
2121         case ASI_FL8_PL:
2122             memop = MO_UB;
2123             type = GET_ASI_SHORT;
2124             break;
2125         case ASI_FL16_S:
2126         case ASI_FL16_SL:
2127         case ASI_FL16_P:
2128         case ASI_FL16_PL:
2129             memop = MO_TEUW;
2130             type = GET_ASI_SHORT;
2131             break;
2132         }
2133         /* The little-endian asis all have bit 3 set.  */
2134         if (asi & 8) {
2135             memop ^= MO_BSWAP;
2136         }
2137     }
2138 #endif
2139 
2140     return (DisasASI){ type, asi, mem_idx, memop };
2141 }
2142 
2143 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2144                        int insn, MemOp memop)
2145 {
2146     DisasASI da = get_asi(dc, insn, memop);
2147 
2148     switch (da.type) {
2149     case GET_ASI_EXCP:
2150         break;
2151     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2152         gen_exception(dc, TT_ILL_INSN);
2153         break;
2154     case GET_ASI_DIRECT:
2155         gen_address_mask(dc, addr);
2156         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2157         break;
2158     default:
2159         {
2160             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2161             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2162 
2163             save_state(dc);
2164 #ifdef TARGET_SPARC64
2165             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2166 #else
2167             {
2168                 TCGv_i64 t64 = tcg_temp_new_i64();
2169                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2170                 tcg_gen_trunc_i64_tl(dst, t64);
2171             }
2172 #endif
2173         }
2174         break;
2175     }
2176 }
2177 
2178 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2179                        int insn, MemOp memop)
2180 {
2181     DisasASI da = get_asi(dc, insn, memop);
2182 
2183     switch (da.type) {
2184     case GET_ASI_EXCP:
2185         break;
2186     case GET_ASI_DTWINX: /* Reserved for stda.  */
2187 #ifndef TARGET_SPARC64
2188         gen_exception(dc, TT_ILL_INSN);
2189         break;
2190 #else
2191         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2192             /* Pre OpenSPARC CPUs don't have these */
2193             gen_exception(dc, TT_ILL_INSN);
2194             return;
2195         }
2196         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2197          * are ST_BLKINIT_ ASIs */
2198 #endif
2199         /* fall through */
2200     case GET_ASI_DIRECT:
2201         gen_address_mask(dc, addr);
2202         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2203         break;
2204 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2205     case GET_ASI_BCOPY:
2206         /* Copy 32 bytes from the address in SRC to ADDR.  */
2207         /* ??? The original qemu code suggests 4-byte alignment, dropping
2208            the low bits, but the only place I can see this used is in the
2209            Linux kernel with 32 byte alignment, which would make more sense
2210            as a cacheline-style operation.  */
2211         {
2212             TCGv saddr = tcg_temp_new();
2213             TCGv daddr = tcg_temp_new();
2214             TCGv four = tcg_constant_tl(4);
2215             TCGv_i32 tmp = tcg_temp_new_i32();
2216             int i;
2217 
2218             tcg_gen_andi_tl(saddr, src, -4);
2219             tcg_gen_andi_tl(daddr, addr, -4);
2220             for (i = 0; i < 32; i += 4) {
2221                 /* Since the loads and stores are paired, allow the
2222                    copy to happen in the host endianness.  */
2223                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2224                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2225                 tcg_gen_add_tl(saddr, saddr, four);
2226                 tcg_gen_add_tl(daddr, daddr, four);
2227             }
2228         }
2229         break;
2230 #endif
2231     default:
2232         {
2233             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2234             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2235 
2236             save_state(dc);
2237 #ifdef TARGET_SPARC64
2238             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2239 #else
2240             {
2241                 TCGv_i64 t64 = tcg_temp_new_i64();
2242                 tcg_gen_extu_tl_i64(t64, src);
2243                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2244             }
2245 #endif
2246 
2247             /* A write to a TLB register may alter page maps.  End the TB. */
2248             dc->npc = DYNAMIC_PC;
2249         }
2250         break;
2251     }
2252 }
2253 
2254 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2255                          TCGv addr, int insn)
2256 {
2257     DisasASI da = get_asi(dc, insn, MO_TEUL);
2258 
2259     switch (da.type) {
2260     case GET_ASI_EXCP:
2261         break;
2262     case GET_ASI_DIRECT:
2263         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2264         break;
2265     default:
2266         /* ??? Should be DAE_invalid_asi.  */
2267         gen_exception(dc, TT_DATA_ACCESS);
2268         break;
2269     }
2270 }
2271 
2272 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2273                         int insn, int rd)
2274 {
2275     DisasASI da = get_asi(dc, insn, MO_TEUL);
2276     TCGv oldv;
2277 
2278     switch (da.type) {
2279     case GET_ASI_EXCP:
2280         return;
2281     case GET_ASI_DIRECT:
2282         oldv = tcg_temp_new();
2283         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2284                                   da.mem_idx, da.memop | MO_ALIGN);
2285         gen_store_gpr(dc, rd, oldv);
2286         break;
2287     default:
2288         /* ??? Should be DAE_invalid_asi.  */
2289         gen_exception(dc, TT_DATA_ACCESS);
2290         break;
2291     }
2292 }
2293 
2294 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2295 {
2296     DisasASI da = get_asi(dc, insn, MO_UB);
2297 
2298     switch (da.type) {
2299     case GET_ASI_EXCP:
2300         break;
2301     case GET_ASI_DIRECT:
2302         gen_ldstub(dc, dst, addr, da.mem_idx);
2303         break;
2304     default:
2305         /* ??? In theory, this should be raise DAE_invalid_asi.
2306            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2307         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2308             gen_helper_exit_atomic(cpu_env);
2309         } else {
2310             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2311             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2312             TCGv_i64 s64, t64;
2313 
2314             save_state(dc);
2315             t64 = tcg_temp_new_i64();
2316             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2317 
2318             s64 = tcg_constant_i64(0xff);
2319             gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2320 
2321             tcg_gen_trunc_i64_tl(dst, t64);
2322 
2323             /* End the TB.  */
2324             dc->npc = DYNAMIC_PC;
2325         }
2326         break;
2327     }
2328 }
2329 #endif
2330 
2331 #ifdef TARGET_SPARC64
2332 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2333                         int insn, int size, int rd)
2334 {
2335     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2336     TCGv_i32 d32;
2337     TCGv_i64 d64;
2338 
2339     switch (da.type) {
2340     case GET_ASI_EXCP:
2341         break;
2342 
2343     case GET_ASI_DIRECT:
2344         gen_address_mask(dc, addr);
2345         switch (size) {
2346         case 4:
2347             d32 = gen_dest_fpr_F(dc);
2348             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2349             gen_store_fpr_F(dc, rd, d32);
2350             break;
2351         case 8:
2352             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2353                                 da.memop | MO_ALIGN_4);
2354             break;
2355         case 16:
2356             d64 = tcg_temp_new_i64();
2357             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2358             tcg_gen_addi_tl(addr, addr, 8);
2359             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2360                                 da.memop | MO_ALIGN_4);
2361             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2362             break;
2363         default:
2364             g_assert_not_reached();
2365         }
2366         break;
2367 
2368     case GET_ASI_BLOCK:
2369         /* Valid for lddfa on aligned registers only.  */
2370         if (size == 8 && (rd & 7) == 0) {
2371             MemOp memop;
2372             TCGv eight;
2373             int i;
2374 
2375             gen_address_mask(dc, addr);
2376 
2377             /* The first operation checks required alignment.  */
2378             memop = da.memop | MO_ALIGN_64;
2379             eight = tcg_constant_tl(8);
2380             for (i = 0; ; ++i) {
2381                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2382                                     da.mem_idx, memop);
2383                 if (i == 7) {
2384                     break;
2385                 }
2386                 tcg_gen_add_tl(addr, addr, eight);
2387                 memop = da.memop;
2388             }
2389         } else {
2390             gen_exception(dc, TT_ILL_INSN);
2391         }
2392         break;
2393 
2394     case GET_ASI_SHORT:
2395         /* Valid for lddfa only.  */
2396         if (size == 8) {
2397             gen_address_mask(dc, addr);
2398             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2399                                 da.memop | MO_ALIGN);
2400         } else {
2401             gen_exception(dc, TT_ILL_INSN);
2402         }
2403         break;
2404 
2405     default:
2406         {
2407             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2408             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2409 
2410             save_state(dc);
2411             /* According to the table in the UA2011 manual, the only
2412                other asis that are valid for ldfa/lddfa/ldqfa are
2413                the NO_FAULT asis.  We still need a helper for these,
2414                but we can just use the integer asi helper for them.  */
2415             switch (size) {
2416             case 4:
2417                 d64 = tcg_temp_new_i64();
2418                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2419                 d32 = gen_dest_fpr_F(dc);
2420                 tcg_gen_extrl_i64_i32(d32, d64);
2421                 gen_store_fpr_F(dc, rd, d32);
2422                 break;
2423             case 8:
2424                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2425                 break;
2426             case 16:
2427                 d64 = tcg_temp_new_i64();
2428                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2429                 tcg_gen_addi_tl(addr, addr, 8);
2430                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2431                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2432                 break;
2433             default:
2434                 g_assert_not_reached();
2435             }
2436         }
2437         break;
2438     }
2439 }
2440 
2441 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2442                         int insn, int size, int rd)
2443 {
2444     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2445     TCGv_i32 d32;
2446 
2447     switch (da.type) {
2448     case GET_ASI_EXCP:
2449         break;
2450 
2451     case GET_ASI_DIRECT:
2452         gen_address_mask(dc, addr);
2453         switch (size) {
2454         case 4:
2455             d32 = gen_load_fpr_F(dc, rd);
2456             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2457             break;
2458         case 8:
2459             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2460                                 da.memop | MO_ALIGN_4);
2461             break;
2462         case 16:
2463             /* Only 4-byte alignment required.  However, it is legal for the
2464                cpu to signal the alignment fault, and the OS trap handler is
2465                required to fix it up.  Requiring 16-byte alignment here avoids
2466                having to probe the second page before performing the first
2467                write.  */
2468             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2469                                 da.memop | MO_ALIGN_16);
2470             tcg_gen_addi_tl(addr, addr, 8);
2471             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2472             break;
2473         default:
2474             g_assert_not_reached();
2475         }
2476         break;
2477 
2478     case GET_ASI_BLOCK:
2479         /* Valid for stdfa on aligned registers only.  */
2480         if (size == 8 && (rd & 7) == 0) {
2481             MemOp memop;
2482             TCGv eight;
2483             int i;
2484 
2485             gen_address_mask(dc, addr);
2486 
2487             /* The first operation checks required alignment.  */
2488             memop = da.memop | MO_ALIGN_64;
2489             eight = tcg_constant_tl(8);
2490             for (i = 0; ; ++i) {
2491                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2492                                     da.mem_idx, memop);
2493                 if (i == 7) {
2494                     break;
2495                 }
2496                 tcg_gen_add_tl(addr, addr, eight);
2497                 memop = da.memop;
2498             }
2499         } else {
2500             gen_exception(dc, TT_ILL_INSN);
2501         }
2502         break;
2503 
2504     case GET_ASI_SHORT:
2505         /* Valid for stdfa only.  */
2506         if (size == 8) {
2507             gen_address_mask(dc, addr);
2508             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2509                                 da.memop | MO_ALIGN);
2510         } else {
2511             gen_exception(dc, TT_ILL_INSN);
2512         }
2513         break;
2514 
2515     default:
2516         /* According to the table in the UA2011 manual, the only
2517            other asis that are valid for ldfa/lddfa/ldqfa are
2518            the PST* asis, which aren't currently handled.  */
2519         gen_exception(dc, TT_ILL_INSN);
2520         break;
2521     }
2522 }
2523 
2524 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2525 {
2526     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2527     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2528     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2529 
2530     switch (da.type) {
2531     case GET_ASI_EXCP:
2532         return;
2533 
2534     case GET_ASI_DTWINX:
2535         gen_address_mask(dc, addr);
2536         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2537         tcg_gen_addi_tl(addr, addr, 8);
2538         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2539         break;
2540 
2541     case GET_ASI_DIRECT:
2542         {
2543             TCGv_i64 tmp = tcg_temp_new_i64();
2544 
2545             gen_address_mask(dc, addr);
2546             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2547 
2548             /* Note that LE ldda acts as if each 32-bit register
2549                result is byte swapped.  Having just performed one
2550                64-bit bswap, we need now to swap the writebacks.  */
2551             if ((da.memop & MO_BSWAP) == MO_TE) {
2552                 tcg_gen_extr32_i64(lo, hi, tmp);
2553             } else {
2554                 tcg_gen_extr32_i64(hi, lo, tmp);
2555             }
2556         }
2557         break;
2558 
2559     default:
2560         /* ??? In theory we've handled all of the ASIs that are valid
2561            for ldda, and this should raise DAE_invalid_asi.  However,
2562            real hardware allows others.  This can be seen with e.g.
2563            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2564         {
2565             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2566             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2567             TCGv_i64 tmp = tcg_temp_new_i64();
2568 
2569             save_state(dc);
2570             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2571 
2572             /* See above.  */
2573             if ((da.memop & MO_BSWAP) == MO_TE) {
2574                 tcg_gen_extr32_i64(lo, hi, tmp);
2575             } else {
2576                 tcg_gen_extr32_i64(hi, lo, tmp);
2577             }
2578         }
2579         break;
2580     }
2581 
2582     gen_store_gpr(dc, rd, hi);
2583     gen_store_gpr(dc, rd + 1, lo);
2584 }
2585 
2586 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2587                          int insn, int rd)
2588 {
2589     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2590     TCGv lo = gen_load_gpr(dc, rd + 1);
2591 
2592     switch (da.type) {
2593     case GET_ASI_EXCP:
2594         break;
2595 
2596     case GET_ASI_DTWINX:
2597         gen_address_mask(dc, addr);
2598         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2599         tcg_gen_addi_tl(addr, addr, 8);
2600         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2601         break;
2602 
2603     case GET_ASI_DIRECT:
2604         {
2605             TCGv_i64 t64 = tcg_temp_new_i64();
2606 
2607             /* Note that LE stda acts as if each 32-bit register result is
2608                byte swapped.  We will perform one 64-bit LE store, so now
2609                we must swap the order of the construction.  */
2610             if ((da.memop & MO_BSWAP) == MO_TE) {
2611                 tcg_gen_concat32_i64(t64, lo, hi);
2612             } else {
2613                 tcg_gen_concat32_i64(t64, hi, lo);
2614             }
2615             gen_address_mask(dc, addr);
2616             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2617         }
2618         break;
2619 
2620     default:
2621         /* ??? In theory we've handled all of the ASIs that are valid
2622            for stda, and this should raise DAE_invalid_asi.  */
2623         {
2624             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2625             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2626             TCGv_i64 t64 = tcg_temp_new_i64();
2627 
2628             /* See above.  */
2629             if ((da.memop & MO_BSWAP) == MO_TE) {
2630                 tcg_gen_concat32_i64(t64, lo, hi);
2631             } else {
2632                 tcg_gen_concat32_i64(t64, hi, lo);
2633             }
2634 
2635             save_state(dc);
2636             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2637         }
2638         break;
2639     }
2640 }
2641 
2642 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2643                          int insn, int rd)
2644 {
2645     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2646     TCGv oldv;
2647 
2648     switch (da.type) {
2649     case GET_ASI_EXCP:
2650         return;
2651     case GET_ASI_DIRECT:
2652         oldv = tcg_temp_new();
2653         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2654                                   da.mem_idx, da.memop | MO_ALIGN);
2655         gen_store_gpr(dc, rd, oldv);
2656         break;
2657     default:
2658         /* ??? Should be DAE_invalid_asi.  */
2659         gen_exception(dc, TT_DATA_ACCESS);
2660         break;
2661     }
2662 }
2663 
2664 #elif !defined(CONFIG_USER_ONLY)
2665 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2666 {
2667     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2668        whereby "rd + 1" elicits "error: array subscript is above array".
2669        Since we have already asserted that rd is even, the semantics
2670        are unchanged.  */
2671     TCGv lo = gen_dest_gpr(dc, rd | 1);
2672     TCGv hi = gen_dest_gpr(dc, rd);
2673     TCGv_i64 t64 = tcg_temp_new_i64();
2674     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2675 
2676     switch (da.type) {
2677     case GET_ASI_EXCP:
2678         return;
2679     case GET_ASI_DIRECT:
2680         gen_address_mask(dc, addr);
2681         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2682         break;
2683     default:
2684         {
2685             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2686             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2687 
2688             save_state(dc);
2689             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2690         }
2691         break;
2692     }
2693 
2694     tcg_gen_extr_i64_i32(lo, hi, t64);
2695     gen_store_gpr(dc, rd | 1, lo);
2696     gen_store_gpr(dc, rd, hi);
2697 }
2698 
2699 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2700                          int insn, int rd)
2701 {
2702     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2703     TCGv lo = gen_load_gpr(dc, rd + 1);
2704     TCGv_i64 t64 = tcg_temp_new_i64();
2705 
2706     tcg_gen_concat_tl_i64(t64, lo, hi);
2707 
2708     switch (da.type) {
2709     case GET_ASI_EXCP:
2710         break;
2711     case GET_ASI_DIRECT:
2712         gen_address_mask(dc, addr);
2713         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2714         break;
2715     case GET_ASI_BFILL:
2716         /* Store 32 bytes of T64 to ADDR.  */
2717         /* ??? The original qemu code suggests 8-byte alignment, dropping
2718            the low bits, but the only place I can see this used is in the
2719            Linux kernel with 32 byte alignment, which would make more sense
2720            as a cacheline-style operation.  */
2721         {
2722             TCGv d_addr = tcg_temp_new();
2723             TCGv eight = tcg_constant_tl(8);
2724             int i;
2725 
2726             tcg_gen_andi_tl(d_addr, addr, -8);
2727             for (i = 0; i < 32; i += 8) {
2728                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2729                 tcg_gen_add_tl(d_addr, d_addr, eight);
2730             }
2731         }
2732         break;
2733     default:
2734         {
2735             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2736             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2737 
2738             save_state(dc);
2739             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2740         }
2741         break;
2742     }
2743 }
2744 #endif
2745 
2746 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2747 {
2748     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2749     return gen_load_gpr(dc, rs1);
2750 }
2751 
2752 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2753 {
2754     if (IS_IMM) { /* immediate */
2755         target_long simm = GET_FIELDs(insn, 19, 31);
2756         TCGv t = tcg_temp_new();
2757         tcg_gen_movi_tl(t, simm);
2758         return t;
2759     } else {      /* register */
2760         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2761         return gen_load_gpr(dc, rs2);
2762     }
2763 }
2764 
2765 #ifdef TARGET_SPARC64
2766 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2767 {
2768     TCGv_i32 c32, zero, dst, s1, s2;
2769 
2770     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2771        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2772        the later.  */
2773     c32 = tcg_temp_new_i32();
2774     if (cmp->is_bool) {
2775         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2776     } else {
2777         TCGv_i64 c64 = tcg_temp_new_i64();
2778         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2779         tcg_gen_extrl_i64_i32(c32, c64);
2780     }
2781 
2782     s1 = gen_load_fpr_F(dc, rs);
2783     s2 = gen_load_fpr_F(dc, rd);
2784     dst = gen_dest_fpr_F(dc);
2785     zero = tcg_constant_i32(0);
2786 
2787     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2788 
2789     gen_store_fpr_F(dc, rd, dst);
2790 }
2791 
2792 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2793 {
2794     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2795     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2796                         gen_load_fpr_D(dc, rs),
2797                         gen_load_fpr_D(dc, rd));
2798     gen_store_fpr_D(dc, rd, dst);
2799 }
2800 
2801 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2802 {
2803     int qd = QFPREG(rd);
2804     int qs = QFPREG(rs);
2805 
2806     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2807                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2808     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2809                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2810 
2811     gen_update_fprs_dirty(dc, qd);
2812 }
2813 
2814 #ifndef CONFIG_USER_ONLY
2815 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2816 {
2817     TCGv_i32 r_tl = tcg_temp_new_i32();
2818 
2819     /* load env->tl into r_tl */
2820     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2821 
2822     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2823     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2824 
2825     /* calculate offset to current trap state from env->ts, reuse r_tl */
2826     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2827     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2828 
2829     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2830     {
2831         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2832         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2833         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2834     }
2835 }
2836 #endif
2837 
2838 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2839                      int width, bool cc, bool left)
2840 {
2841     TCGv lo1, lo2;
2842     uint64_t amask, tabl, tabr;
2843     int shift, imask, omask;
2844 
2845     if (cc) {
2846         tcg_gen_mov_tl(cpu_cc_src, s1);
2847         tcg_gen_mov_tl(cpu_cc_src2, s2);
2848         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2849         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2850         dc->cc_op = CC_OP_SUB;
2851     }
2852 
2853     /* Theory of operation: there are two tables, left and right (not to
2854        be confused with the left and right versions of the opcode).  These
2855        are indexed by the low 3 bits of the inputs.  To make things "easy",
2856        these tables are loaded into two constants, TABL and TABR below.
2857        The operation index = (input & imask) << shift calculates the index
2858        into the constant, while val = (table >> index) & omask calculates
2859        the value we're looking for.  */
2860     switch (width) {
2861     case 8:
2862         imask = 0x7;
2863         shift = 3;
2864         omask = 0xff;
2865         if (left) {
2866             tabl = 0x80c0e0f0f8fcfeffULL;
2867             tabr = 0xff7f3f1f0f070301ULL;
2868         } else {
2869             tabl = 0x0103070f1f3f7fffULL;
2870             tabr = 0xfffefcf8f0e0c080ULL;
2871         }
2872         break;
2873     case 16:
2874         imask = 0x6;
2875         shift = 1;
2876         omask = 0xf;
2877         if (left) {
2878             tabl = 0x8cef;
2879             tabr = 0xf731;
2880         } else {
2881             tabl = 0x137f;
2882             tabr = 0xfec8;
2883         }
2884         break;
2885     case 32:
2886         imask = 0x4;
2887         shift = 0;
2888         omask = 0x3;
2889         if (left) {
2890             tabl = (2 << 2) | 3;
2891             tabr = (3 << 2) | 1;
2892         } else {
2893             tabl = (1 << 2) | 3;
2894             tabr = (3 << 2) | 2;
2895         }
2896         break;
2897     default:
2898         abort();
2899     }
2900 
2901     lo1 = tcg_temp_new();
2902     lo2 = tcg_temp_new();
2903     tcg_gen_andi_tl(lo1, s1, imask);
2904     tcg_gen_andi_tl(lo2, s2, imask);
2905     tcg_gen_shli_tl(lo1, lo1, shift);
2906     tcg_gen_shli_tl(lo2, lo2, shift);
2907 
2908     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2909     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2910     tcg_gen_andi_tl(dst, lo1, omask);
2911     tcg_gen_andi_tl(lo2, lo2, omask);
2912 
2913     amask = -8;
2914     if (AM_CHECK(dc)) {
2915         amask &= 0xffffffffULL;
2916     }
2917     tcg_gen_andi_tl(s1, s1, amask);
2918     tcg_gen_andi_tl(s2, s2, amask);
2919 
2920     /* We want to compute
2921         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2922        We've already done dst = lo1, so this reduces to
2923         dst &= (s1 == s2 ? -1 : lo2)
2924        Which we perform by
2925         lo2 |= -(s1 == s2)
2926         dst &= lo2
2927     */
2928     tcg_gen_setcond_tl(TCG_COND_EQ, lo1, s1, s2);
2929     tcg_gen_neg_tl(lo1, lo1);
2930     tcg_gen_or_tl(lo2, lo2, lo1);
2931     tcg_gen_and_tl(dst, dst, lo2);
2932 }
2933 
2934 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2935 {
2936     TCGv tmp = tcg_temp_new();
2937 
2938     tcg_gen_add_tl(tmp, s1, s2);
2939     tcg_gen_andi_tl(dst, tmp, -8);
2940     if (left) {
2941         tcg_gen_neg_tl(tmp, tmp);
2942     }
2943     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2944 }
2945 
2946 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2947 {
2948     TCGv t1, t2, shift;
2949 
2950     t1 = tcg_temp_new();
2951     t2 = tcg_temp_new();
2952     shift = tcg_temp_new();
2953 
2954     tcg_gen_andi_tl(shift, gsr, 7);
2955     tcg_gen_shli_tl(shift, shift, 3);
2956     tcg_gen_shl_tl(t1, s1, shift);
2957 
2958     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2959        shift of (up to 63) followed by a constant shift of 1.  */
2960     tcg_gen_xori_tl(shift, shift, 63);
2961     tcg_gen_shr_tl(t2, s2, shift);
2962     tcg_gen_shri_tl(t2, t2, 1);
2963 
2964     tcg_gen_or_tl(dst, t1, t2);
2965 }
2966 #endif
2967 
2968 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2969     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2970         goto illegal_insn;
2971 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2972     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2973         goto nfpu_insn;
2974 
2975 /* before an instruction, dc->pc must be static */
2976 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2977 {
2978     unsigned int opc, rs1, rs2, rd;
2979     TCGv cpu_src1, cpu_src2;
2980     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2981     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2982     target_long simm;
2983 
2984     opc = GET_FIELD(insn, 0, 1);
2985     rd = GET_FIELD(insn, 2, 6);
2986 
2987     switch (opc) {
2988     case 0:                     /* branches/sethi */
2989         {
2990             unsigned int xop = GET_FIELD(insn, 7, 9);
2991             int32_t target;
2992             switch (xop) {
2993 #ifdef TARGET_SPARC64
2994             case 0x1:           /* V9 BPcc */
2995                 {
2996                     int cc;
2997 
2998                     target = GET_FIELD_SP(insn, 0, 18);
2999                     target = sign_extend(target, 19);
3000                     target <<= 2;
3001                     cc = GET_FIELD_SP(insn, 20, 21);
3002                     if (cc == 0)
3003                         do_branch(dc, target, insn, 0);
3004                     else if (cc == 2)
3005                         do_branch(dc, target, insn, 1);
3006                     else
3007                         goto illegal_insn;
3008                     goto jmp_insn;
3009                 }
3010             case 0x3:           /* V9 BPr */
3011                 {
3012                     target = GET_FIELD_SP(insn, 0, 13) |
3013                         (GET_FIELD_SP(insn, 20, 21) << 14);
3014                     target = sign_extend(target, 16);
3015                     target <<= 2;
3016                     cpu_src1 = get_src1(dc, insn);
3017                     do_branch_reg(dc, target, insn, cpu_src1);
3018                     goto jmp_insn;
3019                 }
3020             case 0x5:           /* V9 FBPcc */
3021                 {
3022                     int cc = GET_FIELD_SP(insn, 20, 21);
3023                     if (gen_trap_ifnofpu(dc)) {
3024                         goto jmp_insn;
3025                     }
3026                     target = GET_FIELD_SP(insn, 0, 18);
3027                     target = sign_extend(target, 19);
3028                     target <<= 2;
3029                     do_fbranch(dc, target, insn, cc);
3030                     goto jmp_insn;
3031                 }
3032 #else
3033             case 0x7:           /* CBN+x */
3034                 {
3035                     goto ncp_insn;
3036                 }
3037 #endif
3038             case 0x2:           /* BN+x */
3039                 {
3040                     target = GET_FIELD(insn, 10, 31);
3041                     target = sign_extend(target, 22);
3042                     target <<= 2;
3043                     do_branch(dc, target, insn, 0);
3044                     goto jmp_insn;
3045                 }
3046             case 0x6:           /* FBN+x */
3047                 {
3048                     if (gen_trap_ifnofpu(dc)) {
3049                         goto jmp_insn;
3050                     }
3051                     target = GET_FIELD(insn, 10, 31);
3052                     target = sign_extend(target, 22);
3053                     target <<= 2;
3054                     do_fbranch(dc, target, insn, 0);
3055                     goto jmp_insn;
3056                 }
3057             case 0x4:           /* SETHI */
3058                 /* Special-case %g0 because that's the canonical nop.  */
3059                 if (rd) {
3060                     uint32_t value = GET_FIELD(insn, 10, 31);
3061                     TCGv t = gen_dest_gpr(dc, rd);
3062                     tcg_gen_movi_tl(t, value << 10);
3063                     gen_store_gpr(dc, rd, t);
3064                 }
3065                 break;
3066             case 0x0:           /* UNIMPL */
3067             default:
3068                 goto illegal_insn;
3069             }
3070             break;
3071         }
3072         break;
3073     case 1:                     /*CALL*/
3074         {
3075             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3076             TCGv o7 = gen_dest_gpr(dc, 15);
3077 
3078             tcg_gen_movi_tl(o7, dc->pc);
3079             gen_store_gpr(dc, 15, o7);
3080             target += dc->pc;
3081             gen_mov_pc_npc(dc);
3082 #ifdef TARGET_SPARC64
3083             if (unlikely(AM_CHECK(dc))) {
3084                 target &= 0xffffffffULL;
3085             }
3086 #endif
3087             dc->npc = target;
3088         }
3089         goto jmp_insn;
3090     case 2:                     /* FPU & Logical Operations */
3091         {
3092             unsigned int xop = GET_FIELD(insn, 7, 12);
3093             TCGv cpu_dst = tcg_temp_new();
3094             TCGv cpu_tmp0;
3095 
3096             if (xop == 0x3a) {  /* generate trap */
3097                 int cond = GET_FIELD(insn, 3, 6);
3098                 TCGv_i32 trap;
3099                 TCGLabel *l1 = NULL;
3100                 int mask;
3101 
3102                 if (cond == 0) {
3103                     /* Trap never.  */
3104                     break;
3105                 }
3106 
3107                 save_state(dc);
3108 
3109                 if (cond != 8) {
3110                     /* Conditional trap.  */
3111                     DisasCompare cmp;
3112 #ifdef TARGET_SPARC64
3113                     /* V9 icc/xcc */
3114                     int cc = GET_FIELD_SP(insn, 11, 12);
3115                     if (cc == 0) {
3116                         gen_compare(&cmp, 0, cond, dc);
3117                     } else if (cc == 2) {
3118                         gen_compare(&cmp, 1, cond, dc);
3119                     } else {
3120                         goto illegal_insn;
3121                     }
3122 #else
3123                     gen_compare(&cmp, 0, cond, dc);
3124 #endif
3125                     l1 = gen_new_label();
3126                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3127                                       cmp.c1, cmp.c2, l1);
3128                 }
3129 
3130                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3131                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3132 
3133                 /* Don't use the normal temporaries, as they may well have
3134                    gone out of scope with the branch above.  While we're
3135                    doing that we might as well pre-truncate to 32-bit.  */
3136                 trap = tcg_temp_new_i32();
3137 
3138                 rs1 = GET_FIELD_SP(insn, 14, 18);
3139                 if (IS_IMM) {
3140                     rs2 = GET_FIELD_SP(insn, 0, 7);
3141                     if (rs1 == 0) {
3142                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3143                         /* Signal that the trap value is fully constant.  */
3144                         mask = 0;
3145                     } else {
3146                         TCGv t1 = gen_load_gpr(dc, rs1);
3147                         tcg_gen_trunc_tl_i32(trap, t1);
3148                         tcg_gen_addi_i32(trap, trap, rs2);
3149                     }
3150                 } else {
3151                     TCGv t1, t2;
3152                     rs2 = GET_FIELD_SP(insn, 0, 4);
3153                     t1 = gen_load_gpr(dc, rs1);
3154                     t2 = gen_load_gpr(dc, rs2);
3155                     tcg_gen_add_tl(t1, t1, t2);
3156                     tcg_gen_trunc_tl_i32(trap, t1);
3157                 }
3158                 if (mask != 0) {
3159                     tcg_gen_andi_i32(trap, trap, mask);
3160                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3161                 }
3162 
3163                 gen_helper_raise_exception(cpu_env, trap);
3164 
3165                 if (cond == 8) {
3166                     /* An unconditional trap ends the TB.  */
3167                     dc->base.is_jmp = DISAS_NORETURN;
3168                     goto jmp_insn;
3169                 } else {
3170                     /* A conditional trap falls through to the next insn.  */
3171                     gen_set_label(l1);
3172                     break;
3173                 }
3174             } else if (xop == 0x28) {
3175                 rs1 = GET_FIELD(insn, 13, 17);
3176                 switch(rs1) {
3177                 case 0: /* rdy */
3178 #ifndef TARGET_SPARC64
3179                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3180                                        manual, rdy on the microSPARC
3181                                        II */
3182                 case 0x0f:          /* stbar in the SPARCv8 manual,
3183                                        rdy on the microSPARC II */
3184                 case 0x10 ... 0x1f: /* implementation-dependent in the
3185                                        SPARCv8 manual, rdy on the
3186                                        microSPARC II */
3187                     /* Read Asr17 */
3188                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3189                         TCGv t = gen_dest_gpr(dc, rd);
3190                         /* Read Asr17 for a Leon3 monoprocessor */
3191                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3192                         gen_store_gpr(dc, rd, t);
3193                         break;
3194                     }
3195 #endif
3196                     gen_store_gpr(dc, rd, cpu_y);
3197                     break;
3198 #ifdef TARGET_SPARC64
3199                 case 0x2: /* V9 rdccr */
3200                     update_psr(dc);
3201                     gen_helper_rdccr(cpu_dst, cpu_env);
3202                     gen_store_gpr(dc, rd, cpu_dst);
3203                     break;
3204                 case 0x3: /* V9 rdasi */
3205                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3206                     gen_store_gpr(dc, rd, cpu_dst);
3207                     break;
3208                 case 0x4: /* V9 rdtick */
3209                     {
3210                         TCGv_ptr r_tickptr;
3211                         TCGv_i32 r_const;
3212 
3213                         r_tickptr = tcg_temp_new_ptr();
3214                         r_const = tcg_constant_i32(dc->mem_idx);
3215                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3216                                        offsetof(CPUSPARCState, tick));
3217                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3218                             gen_io_start();
3219                         }
3220                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3221                                                   r_const);
3222                         gen_store_gpr(dc, rd, cpu_dst);
3223                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3224                             /* I/O operations in icount mode must end the TB */
3225                             dc->base.is_jmp = DISAS_EXIT;
3226                         }
3227                     }
3228                     break;
3229                 case 0x5: /* V9 rdpc */
3230                     {
3231                         TCGv t = gen_dest_gpr(dc, rd);
3232                         if (unlikely(AM_CHECK(dc))) {
3233                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3234                         } else {
3235                             tcg_gen_movi_tl(t, dc->pc);
3236                         }
3237                         gen_store_gpr(dc, rd, t);
3238                     }
3239                     break;
3240                 case 0x6: /* V9 rdfprs */
3241                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3242                     gen_store_gpr(dc, rd, cpu_dst);
3243                     break;
3244                 case 0xf: /* V9 membar */
3245                     break; /* no effect */
3246                 case 0x13: /* Graphics Status */
3247                     if (gen_trap_ifnofpu(dc)) {
3248                         goto jmp_insn;
3249                     }
3250                     gen_store_gpr(dc, rd, cpu_gsr);
3251                     break;
3252                 case 0x16: /* Softint */
3253                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3254                                      offsetof(CPUSPARCState, softint));
3255                     gen_store_gpr(dc, rd, cpu_dst);
3256                     break;
3257                 case 0x17: /* Tick compare */
3258                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3259                     break;
3260                 case 0x18: /* System tick */
3261                     {
3262                         TCGv_ptr r_tickptr;
3263                         TCGv_i32 r_const;
3264 
3265                         r_tickptr = tcg_temp_new_ptr();
3266                         r_const = tcg_constant_i32(dc->mem_idx);
3267                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3268                                        offsetof(CPUSPARCState, stick));
3269                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3270                             gen_io_start();
3271                         }
3272                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3273                                                   r_const);
3274                         gen_store_gpr(dc, rd, cpu_dst);
3275                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3276                             /* I/O operations in icount mode must end the TB */
3277                             dc->base.is_jmp = DISAS_EXIT;
3278                         }
3279                     }
3280                     break;
3281                 case 0x19: /* System tick compare */
3282                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3283                     break;
3284                 case 0x1a: /* UltraSPARC-T1 Strand status */
3285                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3286                      * this ASR as impl. dep
3287                      */
3288                     CHECK_IU_FEATURE(dc, HYPV);
3289                     {
3290                         TCGv t = gen_dest_gpr(dc, rd);
3291                         tcg_gen_movi_tl(t, 1UL);
3292                         gen_store_gpr(dc, rd, t);
3293                     }
3294                     break;
3295                 case 0x10: /* Performance Control */
3296                 case 0x11: /* Performance Instrumentation Counter */
3297                 case 0x12: /* Dispatch Control */
3298                 case 0x14: /* Softint set, WO */
3299                 case 0x15: /* Softint clear, WO */
3300 #endif
3301                 default:
3302                     goto illegal_insn;
3303                 }
3304 #if !defined(CONFIG_USER_ONLY)
3305             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3306 #ifndef TARGET_SPARC64
3307                 if (!supervisor(dc)) {
3308                     goto priv_insn;
3309                 }
3310                 update_psr(dc);
3311                 gen_helper_rdpsr(cpu_dst, cpu_env);
3312 #else
3313                 CHECK_IU_FEATURE(dc, HYPV);
3314                 if (!hypervisor(dc))
3315                     goto priv_insn;
3316                 rs1 = GET_FIELD(insn, 13, 17);
3317                 switch (rs1) {
3318                 case 0: // hpstate
3319                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3320                                    offsetof(CPUSPARCState, hpstate));
3321                     break;
3322                 case 1: // htstate
3323                     // gen_op_rdhtstate();
3324                     break;
3325                 case 3: // hintp
3326                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3327                     break;
3328                 case 5: // htba
3329                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3330                     break;
3331                 case 6: // hver
3332                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3333                     break;
3334                 case 31: // hstick_cmpr
3335                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3336                     break;
3337                 default:
3338                     goto illegal_insn;
3339                 }
3340 #endif
3341                 gen_store_gpr(dc, rd, cpu_dst);
3342                 break;
3343             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3344                 if (!supervisor(dc)) {
3345                     goto priv_insn;
3346                 }
3347                 cpu_tmp0 = tcg_temp_new();
3348 #ifdef TARGET_SPARC64
3349                 rs1 = GET_FIELD(insn, 13, 17);
3350                 switch (rs1) {
3351                 case 0: // tpc
3352                     {
3353                         TCGv_ptr r_tsptr;
3354 
3355                         r_tsptr = tcg_temp_new_ptr();
3356                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3357                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3358                                       offsetof(trap_state, tpc));
3359                     }
3360                     break;
3361                 case 1: // tnpc
3362                     {
3363                         TCGv_ptr r_tsptr;
3364 
3365                         r_tsptr = tcg_temp_new_ptr();
3366                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3367                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3368                                       offsetof(trap_state, tnpc));
3369                     }
3370                     break;
3371                 case 2: // tstate
3372                     {
3373                         TCGv_ptr r_tsptr;
3374 
3375                         r_tsptr = tcg_temp_new_ptr();
3376                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3377                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3378                                       offsetof(trap_state, tstate));
3379                     }
3380                     break;
3381                 case 3: // tt
3382                     {
3383                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3384 
3385                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3386                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3387                                          offsetof(trap_state, tt));
3388                     }
3389                     break;
3390                 case 4: // tick
3391                     {
3392                         TCGv_ptr r_tickptr;
3393                         TCGv_i32 r_const;
3394 
3395                         r_tickptr = tcg_temp_new_ptr();
3396                         r_const = tcg_constant_i32(dc->mem_idx);
3397                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3398                                        offsetof(CPUSPARCState, tick));
3399                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3400                             gen_io_start();
3401                         }
3402                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3403                                                   r_tickptr, r_const);
3404                         if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
3405                             /* I/O operations in icount mode must end the TB */
3406                             dc->base.is_jmp = DISAS_EXIT;
3407                         }
3408                     }
3409                     break;
3410                 case 5: // tba
3411                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3412                     break;
3413                 case 6: // pstate
3414                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3415                                      offsetof(CPUSPARCState, pstate));
3416                     break;
3417                 case 7: // tl
3418                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3419                                      offsetof(CPUSPARCState, tl));
3420                     break;
3421                 case 8: // pil
3422                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3423                                      offsetof(CPUSPARCState, psrpil));
3424                     break;
3425                 case 9: // cwp
3426                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3427                     break;
3428                 case 10: // cansave
3429                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3430                                      offsetof(CPUSPARCState, cansave));
3431                     break;
3432                 case 11: // canrestore
3433                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3434                                      offsetof(CPUSPARCState, canrestore));
3435                     break;
3436                 case 12: // cleanwin
3437                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3438                                      offsetof(CPUSPARCState, cleanwin));
3439                     break;
3440                 case 13: // otherwin
3441                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3442                                      offsetof(CPUSPARCState, otherwin));
3443                     break;
3444                 case 14: // wstate
3445                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3446                                      offsetof(CPUSPARCState, wstate));
3447                     break;
3448                 case 16: // UA2005 gl
3449                     CHECK_IU_FEATURE(dc, GL);
3450                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3451                                      offsetof(CPUSPARCState, gl));
3452                     break;
3453                 case 26: // UA2005 strand status
3454                     CHECK_IU_FEATURE(dc, HYPV);
3455                     if (!hypervisor(dc))
3456                         goto priv_insn;
3457                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3458                     break;
3459                 case 31: // ver
3460                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3461                     break;
3462                 case 15: // fq
3463                 default:
3464                     goto illegal_insn;
3465                 }
3466 #else
3467                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3468 #endif
3469                 gen_store_gpr(dc, rd, cpu_tmp0);
3470                 break;
3471 #endif
3472 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3473             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3474 #ifdef TARGET_SPARC64
3475                 gen_helper_flushw(cpu_env);
3476 #else
3477                 if (!supervisor(dc))
3478                     goto priv_insn;
3479                 gen_store_gpr(dc, rd, cpu_tbr);
3480 #endif
3481                 break;
3482 #endif
3483             } else if (xop == 0x34) {   /* FPU Operations */
3484                 if (gen_trap_ifnofpu(dc)) {
3485                     goto jmp_insn;
3486                 }
3487                 gen_op_clear_ieee_excp_and_FTT();
3488                 rs1 = GET_FIELD(insn, 13, 17);
3489                 rs2 = GET_FIELD(insn, 27, 31);
3490                 xop = GET_FIELD(insn, 18, 26);
3491 
3492                 switch (xop) {
3493                 case 0x1: /* fmovs */
3494                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3495                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3496                     break;
3497                 case 0x5: /* fnegs */
3498                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3499                     break;
3500                 case 0x9: /* fabss */
3501                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3502                     break;
3503                 case 0x29: /* fsqrts */
3504                     CHECK_FPU_FEATURE(dc, FSQRT);
3505                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3506                     break;
3507                 case 0x2a: /* fsqrtd */
3508                     CHECK_FPU_FEATURE(dc, FSQRT);
3509                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3510                     break;
3511                 case 0x2b: /* fsqrtq */
3512                     CHECK_FPU_FEATURE(dc, FLOAT128);
3513                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3514                     break;
3515                 case 0x41: /* fadds */
3516                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3517                     break;
3518                 case 0x42: /* faddd */
3519                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3520                     break;
3521                 case 0x43: /* faddq */
3522                     CHECK_FPU_FEATURE(dc, FLOAT128);
3523                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3524                     break;
3525                 case 0x45: /* fsubs */
3526                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3527                     break;
3528                 case 0x46: /* fsubd */
3529                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3530                     break;
3531                 case 0x47: /* fsubq */
3532                     CHECK_FPU_FEATURE(dc, FLOAT128);
3533                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3534                     break;
3535                 case 0x49: /* fmuls */
3536                     CHECK_FPU_FEATURE(dc, FMUL);
3537                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3538                     break;
3539                 case 0x4a: /* fmuld */
3540                     CHECK_FPU_FEATURE(dc, FMUL);
3541                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3542                     break;
3543                 case 0x4b: /* fmulq */
3544                     CHECK_FPU_FEATURE(dc, FLOAT128);
3545                     CHECK_FPU_FEATURE(dc, FMUL);
3546                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3547                     break;
3548                 case 0x4d: /* fdivs */
3549                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3550                     break;
3551                 case 0x4e: /* fdivd */
3552                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3553                     break;
3554                 case 0x4f: /* fdivq */
3555                     CHECK_FPU_FEATURE(dc, FLOAT128);
3556                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3557                     break;
3558                 case 0x69: /* fsmuld */
3559                     CHECK_FPU_FEATURE(dc, FSMULD);
3560                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3561                     break;
3562                 case 0x6e: /* fdmulq */
3563                     CHECK_FPU_FEATURE(dc, FLOAT128);
3564                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3565                     break;
3566                 case 0xc4: /* fitos */
3567                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3568                     break;
3569                 case 0xc6: /* fdtos */
3570                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3571                     break;
3572                 case 0xc7: /* fqtos */
3573                     CHECK_FPU_FEATURE(dc, FLOAT128);
3574                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3575                     break;
3576                 case 0xc8: /* fitod */
3577                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3578                     break;
3579                 case 0xc9: /* fstod */
3580                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3581                     break;
3582                 case 0xcb: /* fqtod */
3583                     CHECK_FPU_FEATURE(dc, FLOAT128);
3584                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3585                     break;
3586                 case 0xcc: /* fitoq */
3587                     CHECK_FPU_FEATURE(dc, FLOAT128);
3588                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3589                     break;
3590                 case 0xcd: /* fstoq */
3591                     CHECK_FPU_FEATURE(dc, FLOAT128);
3592                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3593                     break;
3594                 case 0xce: /* fdtoq */
3595                     CHECK_FPU_FEATURE(dc, FLOAT128);
3596                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3597                     break;
3598                 case 0xd1: /* fstoi */
3599                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3600                     break;
3601                 case 0xd2: /* fdtoi */
3602                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3603                     break;
3604                 case 0xd3: /* fqtoi */
3605                     CHECK_FPU_FEATURE(dc, FLOAT128);
3606                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3607                     break;
3608 #ifdef TARGET_SPARC64
3609                 case 0x2: /* V9 fmovd */
3610                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3611                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3612                     break;
3613                 case 0x3: /* V9 fmovq */
3614                     CHECK_FPU_FEATURE(dc, FLOAT128);
3615                     gen_move_Q(dc, rd, rs2);
3616                     break;
3617                 case 0x6: /* V9 fnegd */
3618                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3619                     break;
3620                 case 0x7: /* V9 fnegq */
3621                     CHECK_FPU_FEATURE(dc, FLOAT128);
3622                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3623                     break;
3624                 case 0xa: /* V9 fabsd */
3625                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3626                     break;
3627                 case 0xb: /* V9 fabsq */
3628                     CHECK_FPU_FEATURE(dc, FLOAT128);
3629                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3630                     break;
3631                 case 0x81: /* V9 fstox */
3632                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3633                     break;
3634                 case 0x82: /* V9 fdtox */
3635                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3636                     break;
3637                 case 0x83: /* V9 fqtox */
3638                     CHECK_FPU_FEATURE(dc, FLOAT128);
3639                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3640                     break;
3641                 case 0x84: /* V9 fxtos */
3642                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3643                     break;
3644                 case 0x88: /* V9 fxtod */
3645                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3646                     break;
3647                 case 0x8c: /* V9 fxtoq */
3648                     CHECK_FPU_FEATURE(dc, FLOAT128);
3649                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3650                     break;
3651 #endif
3652                 default:
3653                     goto illegal_insn;
3654                 }
3655             } else if (xop == 0x35) {   /* FPU Operations */
3656 #ifdef TARGET_SPARC64
3657                 int cond;
3658 #endif
3659                 if (gen_trap_ifnofpu(dc)) {
3660                     goto jmp_insn;
3661                 }
3662                 gen_op_clear_ieee_excp_and_FTT();
3663                 rs1 = GET_FIELD(insn, 13, 17);
3664                 rs2 = GET_FIELD(insn, 27, 31);
3665                 xop = GET_FIELD(insn, 18, 26);
3666 
3667 #ifdef TARGET_SPARC64
3668 #define FMOVR(sz)                                                  \
3669                 do {                                               \
3670                     DisasCompare cmp;                              \
3671                     cond = GET_FIELD_SP(insn, 10, 12);             \
3672                     cpu_src1 = get_src1(dc, insn);                 \
3673                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3674                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3675                 } while (0)
3676 
3677                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3678                     FMOVR(s);
3679                     break;
3680                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3681                     FMOVR(d);
3682                     break;
3683                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3684                     CHECK_FPU_FEATURE(dc, FLOAT128);
3685                     FMOVR(q);
3686                     break;
3687                 }
3688 #undef FMOVR
3689 #endif
3690                 switch (xop) {
3691 #ifdef TARGET_SPARC64
3692 #define FMOVCC(fcc, sz)                                                 \
3693                     do {                                                \
3694                         DisasCompare cmp;                               \
3695                         cond = GET_FIELD_SP(insn, 14, 17);              \
3696                         gen_fcompare(&cmp, fcc, cond);                  \
3697                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3698                     } while (0)
3699 
3700                     case 0x001: /* V9 fmovscc %fcc0 */
3701                         FMOVCC(0, s);
3702                         break;
3703                     case 0x002: /* V9 fmovdcc %fcc0 */
3704                         FMOVCC(0, d);
3705                         break;
3706                     case 0x003: /* V9 fmovqcc %fcc0 */
3707                         CHECK_FPU_FEATURE(dc, FLOAT128);
3708                         FMOVCC(0, q);
3709                         break;
3710                     case 0x041: /* V9 fmovscc %fcc1 */
3711                         FMOVCC(1, s);
3712                         break;
3713                     case 0x042: /* V9 fmovdcc %fcc1 */
3714                         FMOVCC(1, d);
3715                         break;
3716                     case 0x043: /* V9 fmovqcc %fcc1 */
3717                         CHECK_FPU_FEATURE(dc, FLOAT128);
3718                         FMOVCC(1, q);
3719                         break;
3720                     case 0x081: /* V9 fmovscc %fcc2 */
3721                         FMOVCC(2, s);
3722                         break;
3723                     case 0x082: /* V9 fmovdcc %fcc2 */
3724                         FMOVCC(2, d);
3725                         break;
3726                     case 0x083: /* V9 fmovqcc %fcc2 */
3727                         CHECK_FPU_FEATURE(dc, FLOAT128);
3728                         FMOVCC(2, q);
3729                         break;
3730                     case 0x0c1: /* V9 fmovscc %fcc3 */
3731                         FMOVCC(3, s);
3732                         break;
3733                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3734                         FMOVCC(3, d);
3735                         break;
3736                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3737                         CHECK_FPU_FEATURE(dc, FLOAT128);
3738                         FMOVCC(3, q);
3739                         break;
3740 #undef FMOVCC
3741 #define FMOVCC(xcc, sz)                                                 \
3742                     do {                                                \
3743                         DisasCompare cmp;                               \
3744                         cond = GET_FIELD_SP(insn, 14, 17);              \
3745                         gen_compare(&cmp, xcc, cond, dc);               \
3746                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3747                     } while (0)
3748 
3749                     case 0x101: /* V9 fmovscc %icc */
3750                         FMOVCC(0, s);
3751                         break;
3752                     case 0x102: /* V9 fmovdcc %icc */
3753                         FMOVCC(0, d);
3754                         break;
3755                     case 0x103: /* V9 fmovqcc %icc */
3756                         CHECK_FPU_FEATURE(dc, FLOAT128);
3757                         FMOVCC(0, q);
3758                         break;
3759                     case 0x181: /* V9 fmovscc %xcc */
3760                         FMOVCC(1, s);
3761                         break;
3762                     case 0x182: /* V9 fmovdcc %xcc */
3763                         FMOVCC(1, d);
3764                         break;
3765                     case 0x183: /* V9 fmovqcc %xcc */
3766                         CHECK_FPU_FEATURE(dc, FLOAT128);
3767                         FMOVCC(1, q);
3768                         break;
3769 #undef FMOVCC
3770 #endif
3771                     case 0x51: /* fcmps, V9 %fcc */
3772                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3773                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3774                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3775                         break;
3776                     case 0x52: /* fcmpd, V9 %fcc */
3777                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3778                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3779                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3780                         break;
3781                     case 0x53: /* fcmpq, V9 %fcc */
3782                         CHECK_FPU_FEATURE(dc, FLOAT128);
3783                         gen_op_load_fpr_QT0(QFPREG(rs1));
3784                         gen_op_load_fpr_QT1(QFPREG(rs2));
3785                         gen_op_fcmpq(rd & 3);
3786                         break;
3787                     case 0x55: /* fcmpes, V9 %fcc */
3788                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3789                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3790                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3791                         break;
3792                     case 0x56: /* fcmped, V9 %fcc */
3793                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3794                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3795                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3796                         break;
3797                     case 0x57: /* fcmpeq, V9 %fcc */
3798                         CHECK_FPU_FEATURE(dc, FLOAT128);
3799                         gen_op_load_fpr_QT0(QFPREG(rs1));
3800                         gen_op_load_fpr_QT1(QFPREG(rs2));
3801                         gen_op_fcmpeq(rd & 3);
3802                         break;
3803                     default:
3804                         goto illegal_insn;
3805                 }
3806             } else if (xop == 0x2) {
3807                 TCGv dst = gen_dest_gpr(dc, rd);
3808                 rs1 = GET_FIELD(insn, 13, 17);
3809                 if (rs1 == 0) {
3810                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3811                     if (IS_IMM) {       /* immediate */
3812                         simm = GET_FIELDs(insn, 19, 31);
3813                         tcg_gen_movi_tl(dst, simm);
3814                         gen_store_gpr(dc, rd, dst);
3815                     } else {            /* register */
3816                         rs2 = GET_FIELD(insn, 27, 31);
3817                         if (rs2 == 0) {
3818                             tcg_gen_movi_tl(dst, 0);
3819                             gen_store_gpr(dc, rd, dst);
3820                         } else {
3821                             cpu_src2 = gen_load_gpr(dc, rs2);
3822                             gen_store_gpr(dc, rd, cpu_src2);
3823                         }
3824                     }
3825                 } else {
3826                     cpu_src1 = get_src1(dc, insn);
3827                     if (IS_IMM) {       /* immediate */
3828                         simm = GET_FIELDs(insn, 19, 31);
3829                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3830                         gen_store_gpr(dc, rd, dst);
3831                     } else {            /* register */
3832                         rs2 = GET_FIELD(insn, 27, 31);
3833                         if (rs2 == 0) {
3834                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3835                             gen_store_gpr(dc, rd, cpu_src1);
3836                         } else {
3837                             cpu_src2 = gen_load_gpr(dc, rs2);
3838                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3839                             gen_store_gpr(dc, rd, dst);
3840                         }
3841                     }
3842                 }
3843 #ifdef TARGET_SPARC64
3844             } else if (xop == 0x25) { /* sll, V9 sllx */
3845                 cpu_src1 = get_src1(dc, insn);
3846                 if (IS_IMM) {   /* immediate */
3847                     simm = GET_FIELDs(insn, 20, 31);
3848                     if (insn & (1 << 12)) {
3849                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3850                     } else {
3851                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3852                     }
3853                 } else {                /* register */
3854                     rs2 = GET_FIELD(insn, 27, 31);
3855                     cpu_src2 = gen_load_gpr(dc, rs2);
3856                     cpu_tmp0 = tcg_temp_new();
3857                     if (insn & (1 << 12)) {
3858                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3859                     } else {
3860                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3861                     }
3862                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3863                 }
3864                 gen_store_gpr(dc, rd, cpu_dst);
3865             } else if (xop == 0x26) { /* srl, V9 srlx */
3866                 cpu_src1 = get_src1(dc, insn);
3867                 if (IS_IMM) {   /* immediate */
3868                     simm = GET_FIELDs(insn, 20, 31);
3869                     if (insn & (1 << 12)) {
3870                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3871                     } else {
3872                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3873                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3874                     }
3875                 } else {                /* register */
3876                     rs2 = GET_FIELD(insn, 27, 31);
3877                     cpu_src2 = gen_load_gpr(dc, rs2);
3878                     cpu_tmp0 = tcg_temp_new();
3879                     if (insn & (1 << 12)) {
3880                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3881                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3882                     } else {
3883                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3884                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3885                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3886                     }
3887                 }
3888                 gen_store_gpr(dc, rd, cpu_dst);
3889             } else if (xop == 0x27) { /* sra, V9 srax */
3890                 cpu_src1 = get_src1(dc, insn);
3891                 if (IS_IMM) {   /* immediate */
3892                     simm = GET_FIELDs(insn, 20, 31);
3893                     if (insn & (1 << 12)) {
3894                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3895                     } else {
3896                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3897                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3898                     }
3899                 } else {                /* register */
3900                     rs2 = GET_FIELD(insn, 27, 31);
3901                     cpu_src2 = gen_load_gpr(dc, rs2);
3902                     cpu_tmp0 = tcg_temp_new();
3903                     if (insn & (1 << 12)) {
3904                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3905                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3906                     } else {
3907                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3908                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3909                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3910                     }
3911                 }
3912                 gen_store_gpr(dc, rd, cpu_dst);
3913 #endif
3914             } else if (xop < 0x36) {
3915                 if (xop < 0x20) {
3916                     cpu_src1 = get_src1(dc, insn);
3917                     cpu_src2 = get_src2(dc, insn);
3918                     switch (xop & ~0x10) {
3919                     case 0x0: /* add */
3920                         if (xop & 0x10) {
3921                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3922                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3923                             dc->cc_op = CC_OP_ADD;
3924                         } else {
3925                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3926                         }
3927                         break;
3928                     case 0x1: /* and */
3929                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3930                         if (xop & 0x10) {
3931                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3932                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3933                             dc->cc_op = CC_OP_LOGIC;
3934                         }
3935                         break;
3936                     case 0x2: /* or */
3937                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3938                         if (xop & 0x10) {
3939                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3940                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3941                             dc->cc_op = CC_OP_LOGIC;
3942                         }
3943                         break;
3944                     case 0x3: /* xor */
3945                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3946                         if (xop & 0x10) {
3947                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3948                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3949                             dc->cc_op = CC_OP_LOGIC;
3950                         }
3951                         break;
3952                     case 0x4: /* sub */
3953                         if (xop & 0x10) {
3954                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3955                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3956                             dc->cc_op = CC_OP_SUB;
3957                         } else {
3958                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3959                         }
3960                         break;
3961                     case 0x5: /* andn */
3962                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3963                         if (xop & 0x10) {
3964                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3965                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3966                             dc->cc_op = CC_OP_LOGIC;
3967                         }
3968                         break;
3969                     case 0x6: /* orn */
3970                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3971                         if (xop & 0x10) {
3972                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3973                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3974                             dc->cc_op = CC_OP_LOGIC;
3975                         }
3976                         break;
3977                     case 0x7: /* xorn */
3978                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3979                         if (xop & 0x10) {
3980                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3981                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3982                             dc->cc_op = CC_OP_LOGIC;
3983                         }
3984                         break;
3985                     case 0x8: /* addx, V9 addc */
3986                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3987                                         (xop & 0x10));
3988                         break;
3989 #ifdef TARGET_SPARC64
3990                     case 0x9: /* V9 mulx */
3991                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3992                         break;
3993 #endif
3994                     case 0xa: /* umul */
3995                         CHECK_IU_FEATURE(dc, MUL);
3996                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3997                         if (xop & 0x10) {
3998                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3999                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4000                             dc->cc_op = CC_OP_LOGIC;
4001                         }
4002                         break;
4003                     case 0xb: /* smul */
4004                         CHECK_IU_FEATURE(dc, MUL);
4005                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4006                         if (xop & 0x10) {
4007                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4008                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4009                             dc->cc_op = CC_OP_LOGIC;
4010                         }
4011                         break;
4012                     case 0xc: /* subx, V9 subc */
4013                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4014                                         (xop & 0x10));
4015                         break;
4016 #ifdef TARGET_SPARC64
4017                     case 0xd: /* V9 udivx */
4018                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4019                         break;
4020 #endif
4021                     case 0xe: /* udiv */
4022                         CHECK_IU_FEATURE(dc, DIV);
4023                         if (xop & 0x10) {
4024                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4025                                                cpu_src2);
4026                             dc->cc_op = CC_OP_DIV;
4027                         } else {
4028                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4029                                             cpu_src2);
4030                         }
4031                         break;
4032                     case 0xf: /* sdiv */
4033                         CHECK_IU_FEATURE(dc, DIV);
4034                         if (xop & 0x10) {
4035                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4036                                                cpu_src2);
4037                             dc->cc_op = CC_OP_DIV;
4038                         } else {
4039                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4040                                             cpu_src2);
4041                         }
4042                         break;
4043                     default:
4044                         goto illegal_insn;
4045                     }
4046                     gen_store_gpr(dc, rd, cpu_dst);
4047                 } else {
4048                     cpu_src1 = get_src1(dc, insn);
4049                     cpu_src2 = get_src2(dc, insn);
4050                     switch (xop) {
4051                     case 0x20: /* taddcc */
4052                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4053                         gen_store_gpr(dc, rd, cpu_dst);
4054                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4055                         dc->cc_op = CC_OP_TADD;
4056                         break;
4057                     case 0x21: /* tsubcc */
4058                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4059                         gen_store_gpr(dc, rd, cpu_dst);
4060                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4061                         dc->cc_op = CC_OP_TSUB;
4062                         break;
4063                     case 0x22: /* taddcctv */
4064                         gen_helper_taddcctv(cpu_dst, cpu_env,
4065                                             cpu_src1, cpu_src2);
4066                         gen_store_gpr(dc, rd, cpu_dst);
4067                         dc->cc_op = CC_OP_TADDTV;
4068                         break;
4069                     case 0x23: /* tsubcctv */
4070                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4071                                             cpu_src1, cpu_src2);
4072                         gen_store_gpr(dc, rd, cpu_dst);
4073                         dc->cc_op = CC_OP_TSUBTV;
4074                         break;
4075                     case 0x24: /* mulscc */
4076                         update_psr(dc);
4077                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4078                         gen_store_gpr(dc, rd, cpu_dst);
4079                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4080                         dc->cc_op = CC_OP_ADD;
4081                         break;
4082 #ifndef TARGET_SPARC64
4083                     case 0x25:  /* sll */
4084                         if (IS_IMM) { /* immediate */
4085                             simm = GET_FIELDs(insn, 20, 31);
4086                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4087                         } else { /* register */
4088                             cpu_tmp0 = tcg_temp_new();
4089                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4090                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4091                         }
4092                         gen_store_gpr(dc, rd, cpu_dst);
4093                         break;
4094                     case 0x26:  /* srl */
4095                         if (IS_IMM) { /* immediate */
4096                             simm = GET_FIELDs(insn, 20, 31);
4097                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4098                         } else { /* register */
4099                             cpu_tmp0 = tcg_temp_new();
4100                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4101                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4102                         }
4103                         gen_store_gpr(dc, rd, cpu_dst);
4104                         break;
4105                     case 0x27:  /* sra */
4106                         if (IS_IMM) { /* immediate */
4107                             simm = GET_FIELDs(insn, 20, 31);
4108                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4109                         } else { /* register */
4110                             cpu_tmp0 = tcg_temp_new();
4111                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4112                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4113                         }
4114                         gen_store_gpr(dc, rd, cpu_dst);
4115                         break;
4116 #endif
4117                     case 0x30:
4118                         {
4119                             cpu_tmp0 = tcg_temp_new();
4120                             switch(rd) {
4121                             case 0: /* wry */
4122                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4123                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4124                                 break;
4125 #ifndef TARGET_SPARC64
4126                             case 0x01 ... 0x0f: /* undefined in the
4127                                                    SPARCv8 manual, nop
4128                                                    on the microSPARC
4129                                                    II */
4130                             case 0x10 ... 0x1f: /* implementation-dependent
4131                                                    in the SPARCv8
4132                                                    manual, nop on the
4133                                                    microSPARC II */
4134                                 if ((rd == 0x13) && (dc->def->features &
4135                                                      CPU_FEATURE_POWERDOWN)) {
4136                                     /* LEON3 power-down */
4137                                     save_state(dc);
4138                                     gen_helper_power_down(cpu_env);
4139                                 }
4140                                 break;
4141 #else
4142                             case 0x2: /* V9 wrccr */
4143                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4144                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4145                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4146                                 dc->cc_op = CC_OP_FLAGS;
4147                                 break;
4148                             case 0x3: /* V9 wrasi */
4149                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4150                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4151                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4152                                                 offsetof(CPUSPARCState, asi));
4153                                 /* End TB to notice changed ASI.  */
4154                                 save_state(dc);
4155                                 gen_op_next_insn();
4156                                 tcg_gen_exit_tb(NULL, 0);
4157                                 dc->base.is_jmp = DISAS_NORETURN;
4158                                 break;
4159                             case 0x6: /* V9 wrfprs */
4160                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4161                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4162                                 dc->fprs_dirty = 0;
4163                                 save_state(dc);
4164                                 gen_op_next_insn();
4165                                 tcg_gen_exit_tb(NULL, 0);
4166                                 dc->base.is_jmp = DISAS_NORETURN;
4167                                 break;
4168                             case 0xf: /* V9 sir, nop if user */
4169 #if !defined(CONFIG_USER_ONLY)
4170                                 if (supervisor(dc)) {
4171                                     ; // XXX
4172                                 }
4173 #endif
4174                                 break;
4175                             case 0x13: /* Graphics Status */
4176                                 if (gen_trap_ifnofpu(dc)) {
4177                                     goto jmp_insn;
4178                                 }
4179                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4180                                 break;
4181                             case 0x14: /* Softint set */
4182                                 if (!supervisor(dc))
4183                                     goto illegal_insn;
4184                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4185                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4186                                 break;
4187                             case 0x15: /* Softint clear */
4188                                 if (!supervisor(dc))
4189                                     goto illegal_insn;
4190                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4191                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4192                                 break;
4193                             case 0x16: /* Softint write */
4194                                 if (!supervisor(dc))
4195                                     goto illegal_insn;
4196                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4197                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4198                                 break;
4199                             case 0x17: /* Tick compare */
4200 #if !defined(CONFIG_USER_ONLY)
4201                                 if (!supervisor(dc))
4202                                     goto illegal_insn;
4203 #endif
4204                                 {
4205                                     TCGv_ptr r_tickptr;
4206 
4207                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4208                                                    cpu_src2);
4209                                     r_tickptr = tcg_temp_new_ptr();
4210                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4211                                                    offsetof(CPUSPARCState, tick));
4212                                     if (tb_cflags(dc->base.tb) &
4213                                            CF_USE_ICOUNT) {
4214                                         gen_io_start();
4215                                     }
4216                                     gen_helper_tick_set_limit(r_tickptr,
4217                                                               cpu_tick_cmpr);
4218                                     /* End TB to handle timer interrupt */
4219                                     dc->base.is_jmp = DISAS_EXIT;
4220                                 }
4221                                 break;
4222                             case 0x18: /* System tick */
4223 #if !defined(CONFIG_USER_ONLY)
4224                                 if (!supervisor(dc))
4225                                     goto illegal_insn;
4226 #endif
4227                                 {
4228                                     TCGv_ptr r_tickptr;
4229 
4230                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4231                                                    cpu_src2);
4232                                     r_tickptr = tcg_temp_new_ptr();
4233                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4234                                                    offsetof(CPUSPARCState, stick));
4235                                     if (tb_cflags(dc->base.tb) &
4236                                            CF_USE_ICOUNT) {
4237                                         gen_io_start();
4238                                     }
4239                                     gen_helper_tick_set_count(r_tickptr,
4240                                                               cpu_tmp0);
4241                                     /* End TB to handle timer interrupt */
4242                                     dc->base.is_jmp = DISAS_EXIT;
4243                                 }
4244                                 break;
4245                             case 0x19: /* System tick compare */
4246 #if !defined(CONFIG_USER_ONLY)
4247                                 if (!supervisor(dc))
4248                                     goto illegal_insn;
4249 #endif
4250                                 {
4251                                     TCGv_ptr r_tickptr;
4252 
4253                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4254                                                    cpu_src2);
4255                                     r_tickptr = tcg_temp_new_ptr();
4256                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4257                                                    offsetof(CPUSPARCState, stick));
4258                                     if (tb_cflags(dc->base.tb) &
4259                                            CF_USE_ICOUNT) {
4260                                         gen_io_start();
4261                                     }
4262                                     gen_helper_tick_set_limit(r_tickptr,
4263                                                               cpu_stick_cmpr);
4264                                     /* End TB to handle timer interrupt */
4265                                     dc->base.is_jmp = DISAS_EXIT;
4266                                 }
4267                                 break;
4268 
4269                             case 0x10: /* Performance Control */
4270                             case 0x11: /* Performance Instrumentation
4271                                           Counter */
4272                             case 0x12: /* Dispatch Control */
4273 #endif
4274                             default:
4275                                 goto illegal_insn;
4276                             }
4277                         }
4278                         break;
4279 #if !defined(CONFIG_USER_ONLY)
4280                     case 0x31: /* wrpsr, V9 saved, restored */
4281                         {
4282                             if (!supervisor(dc))
4283                                 goto priv_insn;
4284 #ifdef TARGET_SPARC64
4285                             switch (rd) {
4286                             case 0:
4287                                 gen_helper_saved(cpu_env);
4288                                 break;
4289                             case 1:
4290                                 gen_helper_restored(cpu_env);
4291                                 break;
4292                             case 2: /* UA2005 allclean */
4293                             case 3: /* UA2005 otherw */
4294                             case 4: /* UA2005 normalw */
4295                             case 5: /* UA2005 invalw */
4296                                 // XXX
4297                             default:
4298                                 goto illegal_insn;
4299                             }
4300 #else
4301                             cpu_tmp0 = tcg_temp_new();
4302                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4303                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4304                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4305                             dc->cc_op = CC_OP_FLAGS;
4306                             save_state(dc);
4307                             gen_op_next_insn();
4308                             tcg_gen_exit_tb(NULL, 0);
4309                             dc->base.is_jmp = DISAS_NORETURN;
4310 #endif
4311                         }
4312                         break;
4313                     case 0x32: /* wrwim, V9 wrpr */
4314                         {
4315                             if (!supervisor(dc))
4316                                 goto priv_insn;
4317                             cpu_tmp0 = tcg_temp_new();
4318                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4319 #ifdef TARGET_SPARC64
4320                             switch (rd) {
4321                             case 0: // tpc
4322                                 {
4323                                     TCGv_ptr r_tsptr;
4324 
4325                                     r_tsptr = tcg_temp_new_ptr();
4326                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4327                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4328                                                   offsetof(trap_state, tpc));
4329                                 }
4330                                 break;
4331                             case 1: // tnpc
4332                                 {
4333                                     TCGv_ptr r_tsptr;
4334 
4335                                     r_tsptr = tcg_temp_new_ptr();
4336                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4337                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4338                                                   offsetof(trap_state, tnpc));
4339                                 }
4340                                 break;
4341                             case 2: // tstate
4342                                 {
4343                                     TCGv_ptr r_tsptr;
4344 
4345                                     r_tsptr = tcg_temp_new_ptr();
4346                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4347                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4348                                                   offsetof(trap_state,
4349                                                            tstate));
4350                                 }
4351                                 break;
4352                             case 3: // tt
4353                                 {
4354                                     TCGv_ptr r_tsptr;
4355 
4356                                     r_tsptr = tcg_temp_new_ptr();
4357                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4358                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4359                                                     offsetof(trap_state, tt));
4360                                 }
4361                                 break;
4362                             case 4: // tick
4363                                 {
4364                                     TCGv_ptr r_tickptr;
4365 
4366                                     r_tickptr = tcg_temp_new_ptr();
4367                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4368                                                    offsetof(CPUSPARCState, tick));
4369                                     if (tb_cflags(dc->base.tb) &
4370                                            CF_USE_ICOUNT) {
4371                                         gen_io_start();
4372                                     }
4373                                     gen_helper_tick_set_count(r_tickptr,
4374                                                               cpu_tmp0);
4375                                     /* End TB to handle timer interrupt */
4376                                     dc->base.is_jmp = DISAS_EXIT;
4377                                 }
4378                                 break;
4379                             case 5: // tba
4380                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4381                                 break;
4382                             case 6: // pstate
4383                                 save_state(dc);
4384                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4385                                     gen_io_start();
4386                                 }
4387                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4388                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4389                                     /* I/O ops in icount mode must end the TB */
4390                                     dc->base.is_jmp = DISAS_EXIT;
4391                                 }
4392                                 dc->npc = DYNAMIC_PC;
4393                                 break;
4394                             case 7: // tl
4395                                 save_state(dc);
4396                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4397                                                offsetof(CPUSPARCState, tl));
4398                                 dc->npc = DYNAMIC_PC;
4399                                 break;
4400                             case 8: // pil
4401                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4402                                     gen_io_start();
4403                                 }
4404                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4405                                 if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
4406                                     /* I/O ops in icount mode must end the TB */
4407                                     dc->base.is_jmp = DISAS_EXIT;
4408                                 }
4409                                 break;
4410                             case 9: // cwp
4411                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4412                                 break;
4413                             case 10: // cansave
4414                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4415                                                 offsetof(CPUSPARCState,
4416                                                          cansave));
4417                                 break;
4418                             case 11: // canrestore
4419                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4420                                                 offsetof(CPUSPARCState,
4421                                                          canrestore));
4422                                 break;
4423                             case 12: // cleanwin
4424                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4425                                                 offsetof(CPUSPARCState,
4426                                                          cleanwin));
4427                                 break;
4428                             case 13: // otherwin
4429                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4430                                                 offsetof(CPUSPARCState,
4431                                                          otherwin));
4432                                 break;
4433                             case 14: // wstate
4434                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4435                                                 offsetof(CPUSPARCState,
4436                                                          wstate));
4437                                 break;
4438                             case 16: // UA2005 gl
4439                                 CHECK_IU_FEATURE(dc, GL);
4440                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4441                                 break;
4442                             case 26: // UA2005 strand status
4443                                 CHECK_IU_FEATURE(dc, HYPV);
4444                                 if (!hypervisor(dc))
4445                                     goto priv_insn;
4446                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4447                                 break;
4448                             default:
4449                                 goto illegal_insn;
4450                             }
4451 #else
4452                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4453                             if (dc->def->nwindows != 32) {
4454                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4455                                                 (1 << dc->def->nwindows) - 1);
4456                             }
4457 #endif
4458                         }
4459                         break;
4460                     case 0x33: /* wrtbr, UA2005 wrhpr */
4461                         {
4462 #ifndef TARGET_SPARC64
4463                             if (!supervisor(dc))
4464                                 goto priv_insn;
4465                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4466 #else
4467                             CHECK_IU_FEATURE(dc, HYPV);
4468                             if (!hypervisor(dc))
4469                                 goto priv_insn;
4470                             cpu_tmp0 = tcg_temp_new();
4471                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4472                             switch (rd) {
4473                             case 0: // hpstate
4474                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4475                                                offsetof(CPUSPARCState,
4476                                                         hpstate));
4477                                 save_state(dc);
4478                                 gen_op_next_insn();
4479                                 tcg_gen_exit_tb(NULL, 0);
4480                                 dc->base.is_jmp = DISAS_NORETURN;
4481                                 break;
4482                             case 1: // htstate
4483                                 // XXX gen_op_wrhtstate();
4484                                 break;
4485                             case 3: // hintp
4486                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4487                                 break;
4488                             case 5: // htba
4489                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4490                                 break;
4491                             case 31: // hstick_cmpr
4492                                 {
4493                                     TCGv_ptr r_tickptr;
4494 
4495                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4496                                     r_tickptr = tcg_temp_new_ptr();
4497                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4498                                                    offsetof(CPUSPARCState, hstick));
4499                                     if (tb_cflags(dc->base.tb) &
4500                                            CF_USE_ICOUNT) {
4501                                         gen_io_start();
4502                                     }
4503                                     gen_helper_tick_set_limit(r_tickptr,
4504                                                               cpu_hstick_cmpr);
4505                                     /* End TB to handle timer interrupt */
4506                                     dc->base.is_jmp = DISAS_EXIT;
4507                                 }
4508                                 break;
4509                             case 6: // hver readonly
4510                             default:
4511                                 goto illegal_insn;
4512                             }
4513 #endif
4514                         }
4515                         break;
4516 #endif
4517 #ifdef TARGET_SPARC64
4518                     case 0x2c: /* V9 movcc */
4519                         {
4520                             int cc = GET_FIELD_SP(insn, 11, 12);
4521                             int cond = GET_FIELD_SP(insn, 14, 17);
4522                             DisasCompare cmp;
4523                             TCGv dst;
4524 
4525                             if (insn & (1 << 18)) {
4526                                 if (cc == 0) {
4527                                     gen_compare(&cmp, 0, cond, dc);
4528                                 } else if (cc == 2) {
4529                                     gen_compare(&cmp, 1, cond, dc);
4530                                 } else {
4531                                     goto illegal_insn;
4532                                 }
4533                             } else {
4534                                 gen_fcompare(&cmp, cc, cond);
4535                             }
4536 
4537                             /* The get_src2 above loaded the normal 13-bit
4538                                immediate field, not the 11-bit field we have
4539                                in movcc.  But it did handle the reg case.  */
4540                             if (IS_IMM) {
4541                                 simm = GET_FIELD_SPs(insn, 0, 10);
4542                                 tcg_gen_movi_tl(cpu_src2, simm);
4543                             }
4544 
4545                             dst = gen_load_gpr(dc, rd);
4546                             tcg_gen_movcond_tl(cmp.cond, dst,
4547                                                cmp.c1, cmp.c2,
4548                                                cpu_src2, dst);
4549                             gen_store_gpr(dc, rd, dst);
4550                             break;
4551                         }
4552                     case 0x2d: /* V9 sdivx */
4553                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4554                         gen_store_gpr(dc, rd, cpu_dst);
4555                         break;
4556                     case 0x2e: /* V9 popc */
4557                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4558                         gen_store_gpr(dc, rd, cpu_dst);
4559                         break;
4560                     case 0x2f: /* V9 movr */
4561                         {
4562                             int cond = GET_FIELD_SP(insn, 10, 12);
4563                             DisasCompare cmp;
4564                             TCGv dst;
4565 
4566                             gen_compare_reg(&cmp, cond, cpu_src1);
4567 
4568                             /* The get_src2 above loaded the normal 13-bit
4569                                immediate field, not the 10-bit field we have
4570                                in movr.  But it did handle the reg case.  */
4571                             if (IS_IMM) {
4572                                 simm = GET_FIELD_SPs(insn, 0, 9);
4573                                 tcg_gen_movi_tl(cpu_src2, simm);
4574                             }
4575 
4576                             dst = gen_load_gpr(dc, rd);
4577                             tcg_gen_movcond_tl(cmp.cond, dst,
4578                                                cmp.c1, cmp.c2,
4579                                                cpu_src2, dst);
4580                             gen_store_gpr(dc, rd, dst);
4581                             break;
4582                         }
4583 #endif
4584                     default:
4585                         goto illegal_insn;
4586                     }
4587                 }
4588             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4589 #ifdef TARGET_SPARC64
4590                 int opf = GET_FIELD_SP(insn, 5, 13);
4591                 rs1 = GET_FIELD(insn, 13, 17);
4592                 rs2 = GET_FIELD(insn, 27, 31);
4593                 if (gen_trap_ifnofpu(dc)) {
4594                     goto jmp_insn;
4595                 }
4596 
4597                 switch (opf) {
4598                 case 0x000: /* VIS I edge8cc */
4599                     CHECK_FPU_FEATURE(dc, VIS1);
4600                     cpu_src1 = gen_load_gpr(dc, rs1);
4601                     cpu_src2 = gen_load_gpr(dc, rs2);
4602                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4603                     gen_store_gpr(dc, rd, cpu_dst);
4604                     break;
4605                 case 0x001: /* VIS II edge8n */
4606                     CHECK_FPU_FEATURE(dc, VIS2);
4607                     cpu_src1 = gen_load_gpr(dc, rs1);
4608                     cpu_src2 = gen_load_gpr(dc, rs2);
4609                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4610                     gen_store_gpr(dc, rd, cpu_dst);
4611                     break;
4612                 case 0x002: /* VIS I edge8lcc */
4613                     CHECK_FPU_FEATURE(dc, VIS1);
4614                     cpu_src1 = gen_load_gpr(dc, rs1);
4615                     cpu_src2 = gen_load_gpr(dc, rs2);
4616                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4617                     gen_store_gpr(dc, rd, cpu_dst);
4618                     break;
4619                 case 0x003: /* VIS II edge8ln */
4620                     CHECK_FPU_FEATURE(dc, VIS2);
4621                     cpu_src1 = gen_load_gpr(dc, rs1);
4622                     cpu_src2 = gen_load_gpr(dc, rs2);
4623                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4624                     gen_store_gpr(dc, rd, cpu_dst);
4625                     break;
4626                 case 0x004: /* VIS I edge16cc */
4627                     CHECK_FPU_FEATURE(dc, VIS1);
4628                     cpu_src1 = gen_load_gpr(dc, rs1);
4629                     cpu_src2 = gen_load_gpr(dc, rs2);
4630                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4631                     gen_store_gpr(dc, rd, cpu_dst);
4632                     break;
4633                 case 0x005: /* VIS II edge16n */
4634                     CHECK_FPU_FEATURE(dc, VIS2);
4635                     cpu_src1 = gen_load_gpr(dc, rs1);
4636                     cpu_src2 = gen_load_gpr(dc, rs2);
4637                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4638                     gen_store_gpr(dc, rd, cpu_dst);
4639                     break;
4640                 case 0x006: /* VIS I edge16lcc */
4641                     CHECK_FPU_FEATURE(dc, VIS1);
4642                     cpu_src1 = gen_load_gpr(dc, rs1);
4643                     cpu_src2 = gen_load_gpr(dc, rs2);
4644                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4645                     gen_store_gpr(dc, rd, cpu_dst);
4646                     break;
4647                 case 0x007: /* VIS II edge16ln */
4648                     CHECK_FPU_FEATURE(dc, VIS2);
4649                     cpu_src1 = gen_load_gpr(dc, rs1);
4650                     cpu_src2 = gen_load_gpr(dc, rs2);
4651                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4652                     gen_store_gpr(dc, rd, cpu_dst);
4653                     break;
4654                 case 0x008: /* VIS I edge32cc */
4655                     CHECK_FPU_FEATURE(dc, VIS1);
4656                     cpu_src1 = gen_load_gpr(dc, rs1);
4657                     cpu_src2 = gen_load_gpr(dc, rs2);
4658                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4659                     gen_store_gpr(dc, rd, cpu_dst);
4660                     break;
4661                 case 0x009: /* VIS II edge32n */
4662                     CHECK_FPU_FEATURE(dc, VIS2);
4663                     cpu_src1 = gen_load_gpr(dc, rs1);
4664                     cpu_src2 = gen_load_gpr(dc, rs2);
4665                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4666                     gen_store_gpr(dc, rd, cpu_dst);
4667                     break;
4668                 case 0x00a: /* VIS I edge32lcc */
4669                     CHECK_FPU_FEATURE(dc, VIS1);
4670                     cpu_src1 = gen_load_gpr(dc, rs1);
4671                     cpu_src2 = gen_load_gpr(dc, rs2);
4672                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4673                     gen_store_gpr(dc, rd, cpu_dst);
4674                     break;
4675                 case 0x00b: /* VIS II edge32ln */
4676                     CHECK_FPU_FEATURE(dc, VIS2);
4677                     cpu_src1 = gen_load_gpr(dc, rs1);
4678                     cpu_src2 = gen_load_gpr(dc, rs2);
4679                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4680                     gen_store_gpr(dc, rd, cpu_dst);
4681                     break;
4682                 case 0x010: /* VIS I array8 */
4683                     CHECK_FPU_FEATURE(dc, VIS1);
4684                     cpu_src1 = gen_load_gpr(dc, rs1);
4685                     cpu_src2 = gen_load_gpr(dc, rs2);
4686                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4687                     gen_store_gpr(dc, rd, cpu_dst);
4688                     break;
4689                 case 0x012: /* VIS I array16 */
4690                     CHECK_FPU_FEATURE(dc, VIS1);
4691                     cpu_src1 = gen_load_gpr(dc, rs1);
4692                     cpu_src2 = gen_load_gpr(dc, rs2);
4693                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4694                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4695                     gen_store_gpr(dc, rd, cpu_dst);
4696                     break;
4697                 case 0x014: /* VIS I array32 */
4698                     CHECK_FPU_FEATURE(dc, VIS1);
4699                     cpu_src1 = gen_load_gpr(dc, rs1);
4700                     cpu_src2 = gen_load_gpr(dc, rs2);
4701                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4702                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4703                     gen_store_gpr(dc, rd, cpu_dst);
4704                     break;
4705                 case 0x018: /* VIS I alignaddr */
4706                     CHECK_FPU_FEATURE(dc, VIS1);
4707                     cpu_src1 = gen_load_gpr(dc, rs1);
4708                     cpu_src2 = gen_load_gpr(dc, rs2);
4709                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4710                     gen_store_gpr(dc, rd, cpu_dst);
4711                     break;
4712                 case 0x01a: /* VIS I alignaddrl */
4713                     CHECK_FPU_FEATURE(dc, VIS1);
4714                     cpu_src1 = gen_load_gpr(dc, rs1);
4715                     cpu_src2 = gen_load_gpr(dc, rs2);
4716                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4717                     gen_store_gpr(dc, rd, cpu_dst);
4718                     break;
4719                 case 0x019: /* VIS II bmask */
4720                     CHECK_FPU_FEATURE(dc, VIS2);
4721                     cpu_src1 = gen_load_gpr(dc, rs1);
4722                     cpu_src2 = gen_load_gpr(dc, rs2);
4723                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4724                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4725                     gen_store_gpr(dc, rd, cpu_dst);
4726                     break;
4727                 case 0x020: /* VIS I fcmple16 */
4728                     CHECK_FPU_FEATURE(dc, VIS1);
4729                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4730                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4731                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4732                     gen_store_gpr(dc, rd, cpu_dst);
4733                     break;
4734                 case 0x022: /* VIS I fcmpne16 */
4735                     CHECK_FPU_FEATURE(dc, VIS1);
4736                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4737                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4738                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4739                     gen_store_gpr(dc, rd, cpu_dst);
4740                     break;
4741                 case 0x024: /* VIS I fcmple32 */
4742                     CHECK_FPU_FEATURE(dc, VIS1);
4743                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4744                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4745                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4746                     gen_store_gpr(dc, rd, cpu_dst);
4747                     break;
4748                 case 0x026: /* VIS I fcmpne32 */
4749                     CHECK_FPU_FEATURE(dc, VIS1);
4750                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4751                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4752                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4753                     gen_store_gpr(dc, rd, cpu_dst);
4754                     break;
4755                 case 0x028: /* VIS I fcmpgt16 */
4756                     CHECK_FPU_FEATURE(dc, VIS1);
4757                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4758                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4759                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4760                     gen_store_gpr(dc, rd, cpu_dst);
4761                     break;
4762                 case 0x02a: /* VIS I fcmpeq16 */
4763                     CHECK_FPU_FEATURE(dc, VIS1);
4764                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4765                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4766                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4767                     gen_store_gpr(dc, rd, cpu_dst);
4768                     break;
4769                 case 0x02c: /* VIS I fcmpgt32 */
4770                     CHECK_FPU_FEATURE(dc, VIS1);
4771                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4772                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4773                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4774                     gen_store_gpr(dc, rd, cpu_dst);
4775                     break;
4776                 case 0x02e: /* VIS I fcmpeq32 */
4777                     CHECK_FPU_FEATURE(dc, VIS1);
4778                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4779                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4780                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4781                     gen_store_gpr(dc, rd, cpu_dst);
4782                     break;
4783                 case 0x031: /* VIS I fmul8x16 */
4784                     CHECK_FPU_FEATURE(dc, VIS1);
4785                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4786                     break;
4787                 case 0x033: /* VIS I fmul8x16au */
4788                     CHECK_FPU_FEATURE(dc, VIS1);
4789                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4790                     break;
4791                 case 0x035: /* VIS I fmul8x16al */
4792                     CHECK_FPU_FEATURE(dc, VIS1);
4793                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4794                     break;
4795                 case 0x036: /* VIS I fmul8sux16 */
4796                     CHECK_FPU_FEATURE(dc, VIS1);
4797                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4798                     break;
4799                 case 0x037: /* VIS I fmul8ulx16 */
4800                     CHECK_FPU_FEATURE(dc, VIS1);
4801                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4802                     break;
4803                 case 0x038: /* VIS I fmuld8sux16 */
4804                     CHECK_FPU_FEATURE(dc, VIS1);
4805                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4806                     break;
4807                 case 0x039: /* VIS I fmuld8ulx16 */
4808                     CHECK_FPU_FEATURE(dc, VIS1);
4809                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4810                     break;
4811                 case 0x03a: /* VIS I fpack32 */
4812                     CHECK_FPU_FEATURE(dc, VIS1);
4813                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4814                     break;
4815                 case 0x03b: /* VIS I fpack16 */
4816                     CHECK_FPU_FEATURE(dc, VIS1);
4817                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4818                     cpu_dst_32 = gen_dest_fpr_F(dc);
4819                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4820                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4821                     break;
4822                 case 0x03d: /* VIS I fpackfix */
4823                     CHECK_FPU_FEATURE(dc, VIS1);
4824                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4825                     cpu_dst_32 = gen_dest_fpr_F(dc);
4826                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4827                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4828                     break;
4829                 case 0x03e: /* VIS I pdist */
4830                     CHECK_FPU_FEATURE(dc, VIS1);
4831                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4832                     break;
4833                 case 0x048: /* VIS I faligndata */
4834                     CHECK_FPU_FEATURE(dc, VIS1);
4835                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4836                     break;
4837                 case 0x04b: /* VIS I fpmerge */
4838                     CHECK_FPU_FEATURE(dc, VIS1);
4839                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4840                     break;
4841                 case 0x04c: /* VIS II bshuffle */
4842                     CHECK_FPU_FEATURE(dc, VIS2);
4843                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4844                     break;
4845                 case 0x04d: /* VIS I fexpand */
4846                     CHECK_FPU_FEATURE(dc, VIS1);
4847                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4848                     break;
4849                 case 0x050: /* VIS I fpadd16 */
4850                     CHECK_FPU_FEATURE(dc, VIS1);
4851                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4852                     break;
4853                 case 0x051: /* VIS I fpadd16s */
4854                     CHECK_FPU_FEATURE(dc, VIS1);
4855                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4856                     break;
4857                 case 0x052: /* VIS I fpadd32 */
4858                     CHECK_FPU_FEATURE(dc, VIS1);
4859                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4860                     break;
4861                 case 0x053: /* VIS I fpadd32s */
4862                     CHECK_FPU_FEATURE(dc, VIS1);
4863                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4864                     break;
4865                 case 0x054: /* VIS I fpsub16 */
4866                     CHECK_FPU_FEATURE(dc, VIS1);
4867                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4868                     break;
4869                 case 0x055: /* VIS I fpsub16s */
4870                     CHECK_FPU_FEATURE(dc, VIS1);
4871                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4872                     break;
4873                 case 0x056: /* VIS I fpsub32 */
4874                     CHECK_FPU_FEATURE(dc, VIS1);
4875                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4876                     break;
4877                 case 0x057: /* VIS I fpsub32s */
4878                     CHECK_FPU_FEATURE(dc, VIS1);
4879                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4880                     break;
4881                 case 0x060: /* VIS I fzero */
4882                     CHECK_FPU_FEATURE(dc, VIS1);
4883                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4884                     tcg_gen_movi_i64(cpu_dst_64, 0);
4885                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4886                     break;
4887                 case 0x061: /* VIS I fzeros */
4888                     CHECK_FPU_FEATURE(dc, VIS1);
4889                     cpu_dst_32 = gen_dest_fpr_F(dc);
4890                     tcg_gen_movi_i32(cpu_dst_32, 0);
4891                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4892                     break;
4893                 case 0x062: /* VIS I fnor */
4894                     CHECK_FPU_FEATURE(dc, VIS1);
4895                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4896                     break;
4897                 case 0x063: /* VIS I fnors */
4898                     CHECK_FPU_FEATURE(dc, VIS1);
4899                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4900                     break;
4901                 case 0x064: /* VIS I fandnot2 */
4902                     CHECK_FPU_FEATURE(dc, VIS1);
4903                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4904                     break;
4905                 case 0x065: /* VIS I fandnot2s */
4906                     CHECK_FPU_FEATURE(dc, VIS1);
4907                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4908                     break;
4909                 case 0x066: /* VIS I fnot2 */
4910                     CHECK_FPU_FEATURE(dc, VIS1);
4911                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4912                     break;
4913                 case 0x067: /* VIS I fnot2s */
4914                     CHECK_FPU_FEATURE(dc, VIS1);
4915                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4916                     break;
4917                 case 0x068: /* VIS I fandnot1 */
4918                     CHECK_FPU_FEATURE(dc, VIS1);
4919                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4920                     break;
4921                 case 0x069: /* VIS I fandnot1s */
4922                     CHECK_FPU_FEATURE(dc, VIS1);
4923                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4924                     break;
4925                 case 0x06a: /* VIS I fnot1 */
4926                     CHECK_FPU_FEATURE(dc, VIS1);
4927                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4928                     break;
4929                 case 0x06b: /* VIS I fnot1s */
4930                     CHECK_FPU_FEATURE(dc, VIS1);
4931                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4932                     break;
4933                 case 0x06c: /* VIS I fxor */
4934                     CHECK_FPU_FEATURE(dc, VIS1);
4935                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4936                     break;
4937                 case 0x06d: /* VIS I fxors */
4938                     CHECK_FPU_FEATURE(dc, VIS1);
4939                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4940                     break;
4941                 case 0x06e: /* VIS I fnand */
4942                     CHECK_FPU_FEATURE(dc, VIS1);
4943                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4944                     break;
4945                 case 0x06f: /* VIS I fnands */
4946                     CHECK_FPU_FEATURE(dc, VIS1);
4947                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4948                     break;
4949                 case 0x070: /* VIS I fand */
4950                     CHECK_FPU_FEATURE(dc, VIS1);
4951                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4952                     break;
4953                 case 0x071: /* VIS I fands */
4954                     CHECK_FPU_FEATURE(dc, VIS1);
4955                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4956                     break;
4957                 case 0x072: /* VIS I fxnor */
4958                     CHECK_FPU_FEATURE(dc, VIS1);
4959                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4960                     break;
4961                 case 0x073: /* VIS I fxnors */
4962                     CHECK_FPU_FEATURE(dc, VIS1);
4963                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4964                     break;
4965                 case 0x074: /* VIS I fsrc1 */
4966                     CHECK_FPU_FEATURE(dc, VIS1);
4967                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4968                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4969                     break;
4970                 case 0x075: /* VIS I fsrc1s */
4971                     CHECK_FPU_FEATURE(dc, VIS1);
4972                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4973                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4974                     break;
4975                 case 0x076: /* VIS I fornot2 */
4976                     CHECK_FPU_FEATURE(dc, VIS1);
4977                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4978                     break;
4979                 case 0x077: /* VIS I fornot2s */
4980                     CHECK_FPU_FEATURE(dc, VIS1);
4981                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4982                     break;
4983                 case 0x078: /* VIS I fsrc2 */
4984                     CHECK_FPU_FEATURE(dc, VIS1);
4985                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4986                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4987                     break;
4988                 case 0x079: /* VIS I fsrc2s */
4989                     CHECK_FPU_FEATURE(dc, VIS1);
4990                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4991                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4992                     break;
4993                 case 0x07a: /* VIS I fornot1 */
4994                     CHECK_FPU_FEATURE(dc, VIS1);
4995                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4996                     break;
4997                 case 0x07b: /* VIS I fornot1s */
4998                     CHECK_FPU_FEATURE(dc, VIS1);
4999                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5000                     break;
5001                 case 0x07c: /* VIS I for */
5002                     CHECK_FPU_FEATURE(dc, VIS1);
5003                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5004                     break;
5005                 case 0x07d: /* VIS I fors */
5006                     CHECK_FPU_FEATURE(dc, VIS1);
5007                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5008                     break;
5009                 case 0x07e: /* VIS I fone */
5010                     CHECK_FPU_FEATURE(dc, VIS1);
5011                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5012                     tcg_gen_movi_i64(cpu_dst_64, -1);
5013                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5014                     break;
5015                 case 0x07f: /* VIS I fones */
5016                     CHECK_FPU_FEATURE(dc, VIS1);
5017                     cpu_dst_32 = gen_dest_fpr_F(dc);
5018                     tcg_gen_movi_i32(cpu_dst_32, -1);
5019                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5020                     break;
5021                 case 0x080: /* VIS I shutdown */
5022                 case 0x081: /* VIS II siam */
5023                     // XXX
5024                     goto illegal_insn;
5025                 default:
5026                     goto illegal_insn;
5027                 }
5028 #else
5029                 goto ncp_insn;
5030 #endif
5031             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5032 #ifdef TARGET_SPARC64
5033                 goto illegal_insn;
5034 #else
5035                 goto ncp_insn;
5036 #endif
5037 #ifdef TARGET_SPARC64
5038             } else if (xop == 0x39) { /* V9 return */
5039                 save_state(dc);
5040                 cpu_src1 = get_src1(dc, insn);
5041                 cpu_tmp0 = tcg_temp_new();
5042                 if (IS_IMM) {   /* immediate */
5043                     simm = GET_FIELDs(insn, 19, 31);
5044                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5045                 } else {                /* register */
5046                     rs2 = GET_FIELD(insn, 27, 31);
5047                     if (rs2) {
5048                         cpu_src2 = gen_load_gpr(dc, rs2);
5049                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5050                     } else {
5051                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5052                     }
5053                 }
5054                 gen_helper_restore(cpu_env);
5055                 gen_mov_pc_npc(dc);
5056                 gen_check_align(cpu_tmp0, 3);
5057                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5058                 dc->npc = DYNAMIC_PC;
5059                 goto jmp_insn;
5060 #endif
5061             } else {
5062                 cpu_src1 = get_src1(dc, insn);
5063                 cpu_tmp0 = tcg_temp_new();
5064                 if (IS_IMM) {   /* immediate */
5065                     simm = GET_FIELDs(insn, 19, 31);
5066                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5067                 } else {                /* register */
5068                     rs2 = GET_FIELD(insn, 27, 31);
5069                     if (rs2) {
5070                         cpu_src2 = gen_load_gpr(dc, rs2);
5071                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5072                     } else {
5073                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5074                     }
5075                 }
5076                 switch (xop) {
5077                 case 0x38:      /* jmpl */
5078                     {
5079                         TCGv t = gen_dest_gpr(dc, rd);
5080                         tcg_gen_movi_tl(t, dc->pc);
5081                         gen_store_gpr(dc, rd, t);
5082 
5083                         gen_mov_pc_npc(dc);
5084                         gen_check_align(cpu_tmp0, 3);
5085                         gen_address_mask(dc, cpu_tmp0);
5086                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5087                         dc->npc = DYNAMIC_PC;
5088                     }
5089                     goto jmp_insn;
5090 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5091                 case 0x39:      /* rett, V9 return */
5092                     {
5093                         if (!supervisor(dc))
5094                             goto priv_insn;
5095                         gen_mov_pc_npc(dc);
5096                         gen_check_align(cpu_tmp0, 3);
5097                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5098                         dc->npc = DYNAMIC_PC;
5099                         gen_helper_rett(cpu_env);
5100                     }
5101                     goto jmp_insn;
5102 #endif
5103                 case 0x3b: /* flush */
5104                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5105                         goto unimp_flush;
5106                     /* nop */
5107                     break;
5108                 case 0x3c:      /* save */
5109                     gen_helper_save(cpu_env);
5110                     gen_store_gpr(dc, rd, cpu_tmp0);
5111                     break;
5112                 case 0x3d:      /* restore */
5113                     gen_helper_restore(cpu_env);
5114                     gen_store_gpr(dc, rd, cpu_tmp0);
5115                     break;
5116 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5117                 case 0x3e:      /* V9 done/retry */
5118                     {
5119                         switch (rd) {
5120                         case 0:
5121                             if (!supervisor(dc))
5122                                 goto priv_insn;
5123                             dc->npc = DYNAMIC_PC;
5124                             dc->pc = DYNAMIC_PC;
5125                             if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5126                                 gen_io_start();
5127                             }
5128                             gen_helper_done(cpu_env);
5129                             goto jmp_insn;
5130                         case 1:
5131                             if (!supervisor(dc))
5132                                 goto priv_insn;
5133                             dc->npc = DYNAMIC_PC;
5134                             dc->pc = DYNAMIC_PC;
5135                             if (tb_cflags(dc->base.tb) & CF_USE_ICOUNT) {
5136                                 gen_io_start();
5137                             }
5138                             gen_helper_retry(cpu_env);
5139                             goto jmp_insn;
5140                         default:
5141                             goto illegal_insn;
5142                         }
5143                     }
5144                     break;
5145 #endif
5146                 default:
5147                     goto illegal_insn;
5148                 }
5149             }
5150             break;
5151         }
5152         break;
5153     case 3:                     /* load/store instructions */
5154         {
5155             unsigned int xop = GET_FIELD(insn, 7, 12);
5156             /* ??? gen_address_mask prevents us from using a source
5157                register directly.  Always generate a temporary.  */
5158             TCGv cpu_addr = tcg_temp_new();
5159 
5160             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5161             if (xop == 0x3c || xop == 0x3e) {
5162                 /* V9 casa/casxa : no offset */
5163             } else if (IS_IMM) {     /* immediate */
5164                 simm = GET_FIELDs(insn, 19, 31);
5165                 if (simm != 0) {
5166                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5167                 }
5168             } else {            /* register */
5169                 rs2 = GET_FIELD(insn, 27, 31);
5170                 if (rs2 != 0) {
5171                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5172                 }
5173             }
5174             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5175                 (xop > 0x17 && xop <= 0x1d ) ||
5176                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5177                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5178 
5179                 switch (xop) {
5180                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5181                     gen_address_mask(dc, cpu_addr);
5182                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5183                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5184                     break;
5185                 case 0x1:       /* ldub, load unsigned byte */
5186                     gen_address_mask(dc, cpu_addr);
5187                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5188                                        dc->mem_idx, MO_UB);
5189                     break;
5190                 case 0x2:       /* lduh, load unsigned halfword */
5191                     gen_address_mask(dc, cpu_addr);
5192                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5193                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5194                     break;
5195                 case 0x3:       /* ldd, load double word */
5196                     if (rd & 1)
5197                         goto illegal_insn;
5198                     else {
5199                         TCGv_i64 t64;
5200 
5201                         gen_address_mask(dc, cpu_addr);
5202                         t64 = tcg_temp_new_i64();
5203                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5204                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5205                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5206                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5207                         gen_store_gpr(dc, rd + 1, cpu_val);
5208                         tcg_gen_shri_i64(t64, t64, 32);
5209                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5210                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5211                     }
5212                     break;
5213                 case 0x9:       /* ldsb, load signed byte */
5214                     gen_address_mask(dc, cpu_addr);
5215                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5216                     break;
5217                 case 0xa:       /* ldsh, load signed halfword */
5218                     gen_address_mask(dc, cpu_addr);
5219                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5220                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5221                     break;
5222                 case 0xd:       /* ldstub */
5223                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5224                     break;
5225                 case 0x0f:
5226                     /* swap, swap register with memory. Also atomically */
5227                     CHECK_IU_FEATURE(dc, SWAP);
5228                     cpu_src1 = gen_load_gpr(dc, rd);
5229                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5230                              dc->mem_idx, MO_TEUL);
5231                     break;
5232 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5233                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5234                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5235                     break;
5236                 case 0x11:      /* lduba, load unsigned byte alternate */
5237                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5238                     break;
5239                 case 0x12:      /* lduha, load unsigned halfword alternate */
5240                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5241                     break;
5242                 case 0x13:      /* ldda, load double word alternate */
5243                     if (rd & 1) {
5244                         goto illegal_insn;
5245                     }
5246                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5247                     goto skip_move;
5248                 case 0x19:      /* ldsba, load signed byte alternate */
5249                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5250                     break;
5251                 case 0x1a:      /* ldsha, load signed halfword alternate */
5252                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5253                     break;
5254                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5255                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5256                     break;
5257                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5258                                    atomically */
5259                     CHECK_IU_FEATURE(dc, SWAP);
5260                     cpu_src1 = gen_load_gpr(dc, rd);
5261                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5262                     break;
5263 
5264 #ifndef TARGET_SPARC64
5265                 case 0x30: /* ldc */
5266                 case 0x31: /* ldcsr */
5267                 case 0x33: /* lddc */
5268                     goto ncp_insn;
5269 #endif
5270 #endif
5271 #ifdef TARGET_SPARC64
5272                 case 0x08: /* V9 ldsw */
5273                     gen_address_mask(dc, cpu_addr);
5274                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5275                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5276                     break;
5277                 case 0x0b: /* V9 ldx */
5278                     gen_address_mask(dc, cpu_addr);
5279                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5280                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5281                     break;
5282                 case 0x18: /* V9 ldswa */
5283                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5284                     break;
5285                 case 0x1b: /* V9 ldxa */
5286                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5287                     break;
5288                 case 0x2d: /* V9 prefetch, no effect */
5289                     goto skip_move;
5290                 case 0x30: /* V9 ldfa */
5291                     if (gen_trap_ifnofpu(dc)) {
5292                         goto jmp_insn;
5293                     }
5294                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5295                     gen_update_fprs_dirty(dc, rd);
5296                     goto skip_move;
5297                 case 0x33: /* V9 lddfa */
5298                     if (gen_trap_ifnofpu(dc)) {
5299                         goto jmp_insn;
5300                     }
5301                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5302                     gen_update_fprs_dirty(dc, DFPREG(rd));
5303                     goto skip_move;
5304                 case 0x3d: /* V9 prefetcha, no effect */
5305                     goto skip_move;
5306                 case 0x32: /* V9 ldqfa */
5307                     CHECK_FPU_FEATURE(dc, FLOAT128);
5308                     if (gen_trap_ifnofpu(dc)) {
5309                         goto jmp_insn;
5310                     }
5311                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5312                     gen_update_fprs_dirty(dc, QFPREG(rd));
5313                     goto skip_move;
5314 #endif
5315                 default:
5316                     goto illegal_insn;
5317                 }
5318                 gen_store_gpr(dc, rd, cpu_val);
5319 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5320             skip_move: ;
5321 #endif
5322             } else if (xop >= 0x20 && xop < 0x24) {
5323                 if (gen_trap_ifnofpu(dc)) {
5324                     goto jmp_insn;
5325                 }
5326                 switch (xop) {
5327                 case 0x20:      /* ldf, load fpreg */
5328                     gen_address_mask(dc, cpu_addr);
5329                     cpu_dst_32 = gen_dest_fpr_F(dc);
5330                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5331                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5332                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5333                     break;
5334                 case 0x21:      /* ldfsr, V9 ldxfsr */
5335 #ifdef TARGET_SPARC64
5336                     gen_address_mask(dc, cpu_addr);
5337                     if (rd == 1) {
5338                         TCGv_i64 t64 = tcg_temp_new_i64();
5339                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5340                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5341                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5342                         break;
5343                     }
5344 #endif
5345                     cpu_dst_32 = tcg_temp_new_i32();
5346                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5347                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5348                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5349                     break;
5350                 case 0x22:      /* ldqf, load quad fpreg */
5351                     CHECK_FPU_FEATURE(dc, FLOAT128);
5352                     gen_address_mask(dc, cpu_addr);
5353                     cpu_src1_64 = tcg_temp_new_i64();
5354                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5355                                         MO_TEUQ | MO_ALIGN_4);
5356                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5357                     cpu_src2_64 = tcg_temp_new_i64();
5358                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5359                                         MO_TEUQ | MO_ALIGN_4);
5360                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5361                     break;
5362                 case 0x23:      /* lddf, load double fpreg */
5363                     gen_address_mask(dc, cpu_addr);
5364                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5365                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5366                                         MO_TEUQ | MO_ALIGN_4);
5367                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5368                     break;
5369                 default:
5370                     goto illegal_insn;
5371                 }
5372             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5373                        xop == 0xe || xop == 0x1e) {
5374                 TCGv cpu_val = gen_load_gpr(dc, rd);
5375 
5376                 switch (xop) {
5377                 case 0x4: /* st, store word */
5378                     gen_address_mask(dc, cpu_addr);
5379                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5380                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5381                     break;
5382                 case 0x5: /* stb, store byte */
5383                     gen_address_mask(dc, cpu_addr);
5384                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5385                     break;
5386                 case 0x6: /* sth, store halfword */
5387                     gen_address_mask(dc, cpu_addr);
5388                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5389                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5390                     break;
5391                 case 0x7: /* std, store double word */
5392                     if (rd & 1)
5393                         goto illegal_insn;
5394                     else {
5395                         TCGv_i64 t64;
5396                         TCGv lo;
5397 
5398                         gen_address_mask(dc, cpu_addr);
5399                         lo = gen_load_gpr(dc, rd + 1);
5400                         t64 = tcg_temp_new_i64();
5401                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5402                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5403                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5404                     }
5405                     break;
5406 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5407                 case 0x14: /* sta, V9 stwa, store word alternate */
5408                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5409                     break;
5410                 case 0x15: /* stba, store byte alternate */
5411                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5412                     break;
5413                 case 0x16: /* stha, store halfword alternate */
5414                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5415                     break;
5416                 case 0x17: /* stda, store double word alternate */
5417                     if (rd & 1) {
5418                         goto illegal_insn;
5419                     }
5420                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5421                     break;
5422 #endif
5423 #ifdef TARGET_SPARC64
5424                 case 0x0e: /* V9 stx */
5425                     gen_address_mask(dc, cpu_addr);
5426                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5427                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5428                     break;
5429                 case 0x1e: /* V9 stxa */
5430                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5431                     break;
5432 #endif
5433                 default:
5434                     goto illegal_insn;
5435                 }
5436             } else if (xop > 0x23 && xop < 0x28) {
5437                 if (gen_trap_ifnofpu(dc)) {
5438                     goto jmp_insn;
5439                 }
5440                 switch (xop) {
5441                 case 0x24: /* stf, store fpreg */
5442                     gen_address_mask(dc, cpu_addr);
5443                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5444                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5445                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5446                     break;
5447                 case 0x25: /* stfsr, V9 stxfsr */
5448                     {
5449 #ifdef TARGET_SPARC64
5450                         gen_address_mask(dc, cpu_addr);
5451                         if (rd == 1) {
5452                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5453                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5454                             break;
5455                         }
5456 #endif
5457                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5458                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5459                     }
5460                     break;
5461                 case 0x26:
5462 #ifdef TARGET_SPARC64
5463                     /* V9 stqf, store quad fpreg */
5464                     CHECK_FPU_FEATURE(dc, FLOAT128);
5465                     gen_address_mask(dc, cpu_addr);
5466                     /* ??? While stqf only requires 4-byte alignment, it is
5467                        legal for the cpu to signal the unaligned exception.
5468                        The OS trap handler is then required to fix it up.
5469                        For qemu, this avoids having to probe the second page
5470                        before performing the first write.  */
5471                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5472                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5473                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5474                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5475                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5476                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5477                                         dc->mem_idx, MO_TEUQ);
5478                     break;
5479 #else /* !TARGET_SPARC64 */
5480                     /* stdfq, store floating point queue */
5481 #if defined(CONFIG_USER_ONLY)
5482                     goto illegal_insn;
5483 #else
5484                     if (!supervisor(dc))
5485                         goto priv_insn;
5486                     if (gen_trap_ifnofpu(dc)) {
5487                         goto jmp_insn;
5488                     }
5489                     goto nfq_insn;
5490 #endif
5491 #endif
5492                 case 0x27: /* stdf, store double fpreg */
5493                     gen_address_mask(dc, cpu_addr);
5494                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5495                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5496                                         MO_TEUQ | MO_ALIGN_4);
5497                     break;
5498                 default:
5499                     goto illegal_insn;
5500                 }
5501             } else if (xop > 0x33 && xop < 0x3f) {
5502                 switch (xop) {
5503 #ifdef TARGET_SPARC64
5504                 case 0x34: /* V9 stfa */
5505                     if (gen_trap_ifnofpu(dc)) {
5506                         goto jmp_insn;
5507                     }
5508                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5509                     break;
5510                 case 0x36: /* V9 stqfa */
5511                     {
5512                         CHECK_FPU_FEATURE(dc, FLOAT128);
5513                         if (gen_trap_ifnofpu(dc)) {
5514                             goto jmp_insn;
5515                         }
5516                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5517                     }
5518                     break;
5519                 case 0x37: /* V9 stdfa */
5520                     if (gen_trap_ifnofpu(dc)) {
5521                         goto jmp_insn;
5522                     }
5523                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5524                     break;
5525                 case 0x3e: /* V9 casxa */
5526                     rs2 = GET_FIELD(insn, 27, 31);
5527                     cpu_src2 = gen_load_gpr(dc, rs2);
5528                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5529                     break;
5530 #else
5531                 case 0x34: /* stc */
5532                 case 0x35: /* stcsr */
5533                 case 0x36: /* stdcq */
5534                 case 0x37: /* stdc */
5535                     goto ncp_insn;
5536 #endif
5537 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5538                 case 0x3c: /* V9 or LEON3 casa */
5539 #ifndef TARGET_SPARC64
5540                     CHECK_IU_FEATURE(dc, CASA);
5541 #endif
5542                     rs2 = GET_FIELD(insn, 27, 31);
5543                     cpu_src2 = gen_load_gpr(dc, rs2);
5544                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5545                     break;
5546 #endif
5547                 default:
5548                     goto illegal_insn;
5549                 }
5550             } else {
5551                 goto illegal_insn;
5552             }
5553         }
5554         break;
5555     }
5556     /* default case for non jump instructions */
5557     if (dc->npc == DYNAMIC_PC) {
5558         dc->pc = DYNAMIC_PC;
5559         gen_op_next_insn();
5560     } else if (dc->npc == JUMP_PC) {
5561         /* we can do a static jump */
5562         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5563         dc->base.is_jmp = DISAS_NORETURN;
5564     } else {
5565         dc->pc = dc->npc;
5566         dc->npc = dc->npc + 4;
5567     }
5568  jmp_insn:
5569     return;
5570  illegal_insn:
5571     gen_exception(dc, TT_ILL_INSN);
5572     return;
5573  unimp_flush:
5574     gen_exception(dc, TT_UNIMP_FLUSH);
5575     return;
5576 #if !defined(CONFIG_USER_ONLY)
5577  priv_insn:
5578     gen_exception(dc, TT_PRIV_INSN);
5579     return;
5580 #endif
5581  nfpu_insn:
5582     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5583     return;
5584 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5585  nfq_insn:
5586     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5587     return;
5588 #endif
5589 #ifndef TARGET_SPARC64
5590  ncp_insn:
5591     gen_exception(dc, TT_NCP_INSN);
5592     return;
5593 #endif
5594 }
5595 
5596 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5597 {
5598     DisasContext *dc = container_of(dcbase, DisasContext, base);
5599     CPUSPARCState *env = cs->env_ptr;
5600     int bound;
5601 
5602     dc->pc = dc->base.pc_first;
5603     dc->npc = (target_ulong)dc->base.tb->cs_base;
5604     dc->cc_op = CC_OP_DYNAMIC;
5605     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5606     dc->def = &env->def;
5607     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5608     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5609 #ifndef CONFIG_USER_ONLY
5610     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5611 #endif
5612 #ifdef TARGET_SPARC64
5613     dc->fprs_dirty = 0;
5614     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5615 #ifndef CONFIG_USER_ONLY
5616     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5617 #endif
5618 #endif
5619     /*
5620      * if we reach a page boundary, we stop generation so that the
5621      * PC of a TT_TFAULT exception is always in the right page
5622      */
5623     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5624     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5625 }
5626 
5627 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5628 {
5629 }
5630 
5631 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5632 {
5633     DisasContext *dc = container_of(dcbase, DisasContext, base);
5634 
5635     if (dc->npc & JUMP_PC) {
5636         assert(dc->jump_pc[1] == dc->pc + 4);
5637         tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5638     } else {
5639         tcg_gen_insn_start(dc->pc, dc->npc);
5640     }
5641 }
5642 
5643 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5644 {
5645     DisasContext *dc = container_of(dcbase, DisasContext, base);
5646     CPUSPARCState *env = cs->env_ptr;
5647     unsigned int insn;
5648 
5649     insn = translator_ldl(env, &dc->base, dc->pc);
5650     dc->base.pc_next += 4;
5651     disas_sparc_insn(dc, insn);
5652 
5653     if (dc->base.is_jmp == DISAS_NORETURN) {
5654         return;
5655     }
5656     if (dc->pc != dc->base.pc_next) {
5657         dc->base.is_jmp = DISAS_TOO_MANY;
5658     }
5659 }
5660 
5661 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5662 {
5663     DisasContext *dc = container_of(dcbase, DisasContext, base);
5664 
5665     switch (dc->base.is_jmp) {
5666     case DISAS_NEXT:
5667     case DISAS_TOO_MANY:
5668         if (dc->pc != DYNAMIC_PC &&
5669             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5670             /* static PC and NPC: we can use direct chaining */
5671             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5672         } else {
5673             if (dc->pc != DYNAMIC_PC) {
5674                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5675             }
5676             save_npc(dc);
5677             tcg_gen_exit_tb(NULL, 0);
5678         }
5679         break;
5680 
5681     case DISAS_NORETURN:
5682        break;
5683 
5684     case DISAS_EXIT:
5685         /* Exit TB */
5686         save_state(dc);
5687         tcg_gen_exit_tb(NULL, 0);
5688         break;
5689 
5690     default:
5691         g_assert_not_reached();
5692     }
5693 }
5694 
5695 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5696                                CPUState *cpu, FILE *logfile)
5697 {
5698     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5699     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5700 }
5701 
5702 static const TranslatorOps sparc_tr_ops = {
5703     .init_disas_context = sparc_tr_init_disas_context,
5704     .tb_start           = sparc_tr_tb_start,
5705     .insn_start         = sparc_tr_insn_start,
5706     .translate_insn     = sparc_tr_translate_insn,
5707     .tb_stop            = sparc_tr_tb_stop,
5708     .disas_log          = sparc_tr_disas_log,
5709 };
5710 
5711 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5712                            target_ulong pc, void *host_pc)
5713 {
5714     DisasContext dc = {};
5715 
5716     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5717 }
5718 
5719 void sparc_tcg_init(void)
5720 {
5721     static const char gregnames[32][4] = {
5722         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5723         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5724         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5725         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5726     };
5727     static const char fregnames[32][4] = {
5728         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5729         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5730         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5731         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5732     };
5733 
5734     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5735 #ifdef TARGET_SPARC64
5736         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5737         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5738 #else
5739         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5740 #endif
5741         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5742         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5743     };
5744 
5745     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5746 #ifdef TARGET_SPARC64
5747         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5748         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5749         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5750         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5751           "hstick_cmpr" },
5752         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5753         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5754         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5755         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5756         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5757 #endif
5758         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5759         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5760         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5761         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5762         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5763         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5764         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5765         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5766 #ifndef CONFIG_USER_ONLY
5767         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5768 #endif
5769     };
5770 
5771     unsigned int i;
5772 
5773     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5774                                          offsetof(CPUSPARCState, regwptr),
5775                                          "regwptr");
5776 
5777     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5778         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5779     }
5780 
5781     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5782         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5783     }
5784 
5785     cpu_regs[0] = NULL;
5786     for (i = 1; i < 8; ++i) {
5787         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5788                                          offsetof(CPUSPARCState, gregs[i]),
5789                                          gregnames[i]);
5790     }
5791 
5792     for (i = 8; i < 32; ++i) {
5793         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5794                                          (i - 8) * sizeof(target_ulong),
5795                                          gregnames[i]);
5796     }
5797 
5798     for (i = 0; i < TARGET_DPREGS; i++) {
5799         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5800                                             offsetof(CPUSPARCState, fpr[i]),
5801                                             fregnames[i]);
5802     }
5803 }
5804 
5805 void sparc_restore_state_to_opc(CPUState *cs,
5806                                 const TranslationBlock *tb,
5807                                 const uint64_t *data)
5808 {
5809     SPARCCPU *cpu = SPARC_CPU(cs);
5810     CPUSPARCState *env = &cpu->env;
5811     target_ulong pc = data[0];
5812     target_ulong npc = data[1];
5813 
5814     env->pc = pc;
5815     if (npc == DYNAMIC_PC) {
5816         /* dynamic NPC: already stored */
5817     } else if (npc & JUMP_PC) {
5818         /* jump PC: use 'cond' and the jump targets of the translation */
5819         if (env->cond) {
5820             env->npc = npc & ~3;
5821         } else {
5822             env->npc = pc + 4;
5823         }
5824     } else {
5825         env->npc = npc;
5826     }
5827 }
5828