xref: /openbmc/qemu/target/sparc/translate.c (revision c85cad81)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 
30 #include "exec/helper-gen.h"
31 
32 #include "exec/translator.h"
33 #include "exec/log.h"
34 #include "asi.h"
35 
36 #define HELPER_H "helper.h"
37 #include "exec/helper-info.c.inc"
38 #undef  HELPER_H
39 
40 #define DYNAMIC_PC  1 /* dynamic pc value */
41 #define JUMP_PC     2 /* dynamic pc value which takes only two values
42                          according to jump_pc[T2] */
43 
44 #define DISAS_EXIT  DISAS_TARGET_0
45 
46 /* global register indexes */
47 static TCGv_ptr cpu_regwptr;
48 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
49 static TCGv_i32 cpu_cc_op;
50 static TCGv_i32 cpu_psr;
51 static TCGv cpu_fsr, cpu_pc, cpu_npc;
52 static TCGv cpu_regs[32];
53 static TCGv cpu_y;
54 #ifndef CONFIG_USER_ONLY
55 static TCGv cpu_tbr;
56 #endif
57 static TCGv cpu_cond;
58 #ifdef TARGET_SPARC64
59 static TCGv_i32 cpu_xcc, cpu_fprs;
60 static TCGv cpu_gsr;
61 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
62 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
63 #else
64 static TCGv cpu_wim;
65 #endif
66 /* Floating point registers */
67 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
68 
69 typedef struct DisasContext {
70     DisasContextBase base;
71     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
72     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
73     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
74     int mem_idx;
75     bool fpu_enabled;
76     bool address_mask_32bit;
77 #ifndef CONFIG_USER_ONLY
78     bool supervisor;
79 #ifdef TARGET_SPARC64
80     bool hypervisor;
81 #endif
82 #endif
83 
84     uint32_t cc_op;  /* current CC operation */
85     sparc_def_t *def;
86 #ifdef TARGET_SPARC64
87     int fprs_dirty;
88     int asi;
89 #endif
90 } DisasContext;
91 
92 typedef struct {
93     TCGCond cond;
94     bool is_bool;
95     TCGv c1, c2;
96 } DisasCompare;
97 
98 // This function uses non-native bit order
99 #define GET_FIELD(X, FROM, TO)                                  \
100     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
101 
102 // This function uses the order in the manuals, i.e. bit 0 is 2^0
103 #define GET_FIELD_SP(X, FROM, TO)               \
104     GET_FIELD(X, 31 - (TO), 31 - (FROM))
105 
106 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
107 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
108 
109 #ifdef TARGET_SPARC64
110 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
111 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
112 #else
113 #define DFPREG(r) (r & 0x1e)
114 #define QFPREG(r) (r & 0x1c)
115 #endif
116 
117 #define UA2005_HTRAP_MASK 0xff
118 #define V8_TRAP_MASK 0x7f
119 
120 static int sign_extend(int x, int len)
121 {
122     len = 32 - len;
123     return (x << len) >> len;
124 }
125 
126 #define IS_IMM (insn & (1<<13))
127 
128 static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
129 {
130 #if defined(TARGET_SPARC64)
131     int bit = (rd < 32) ? 1 : 2;
132     /* If we know we've already set this bit within the TB,
133        we can avoid setting it again.  */
134     if (!(dc->fprs_dirty & bit)) {
135         dc->fprs_dirty |= bit;
136         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
137     }
138 #endif
139 }
140 
141 /* floating point registers moves */
142 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
143 {
144     TCGv_i32 ret = tcg_temp_new_i32();
145     if (src & 1) {
146         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
147     } else {
148         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
149     }
150     return ret;
151 }
152 
153 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
154 {
155     TCGv_i64 t = tcg_temp_new_i64();
156 
157     tcg_gen_extu_i32_i64(t, v);
158     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
159                         (dst & 1 ? 0 : 32), 32);
160     gen_update_fprs_dirty(dc, dst);
161 }
162 
163 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
164 {
165     return tcg_temp_new_i32();
166 }
167 
168 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
169 {
170     src = DFPREG(src);
171     return cpu_fpr[src / 2];
172 }
173 
174 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
175 {
176     dst = DFPREG(dst);
177     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
178     gen_update_fprs_dirty(dc, dst);
179 }
180 
181 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
182 {
183     return cpu_fpr[DFPREG(dst) / 2];
184 }
185 
186 static void gen_op_load_fpr_QT0(unsigned int src)
187 {
188     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
189                    offsetof(CPU_QuadU, ll.upper));
190     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
191                    offsetof(CPU_QuadU, ll.lower));
192 }
193 
194 static void gen_op_load_fpr_QT1(unsigned int src)
195 {
196     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
197                    offsetof(CPU_QuadU, ll.upper));
198     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
199                    offsetof(CPU_QuadU, ll.lower));
200 }
201 
202 static void gen_op_store_QT0_fpr(unsigned int dst)
203 {
204     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
205                    offsetof(CPU_QuadU, ll.upper));
206     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
207                    offsetof(CPU_QuadU, ll.lower));
208 }
209 
210 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
211                             TCGv_i64 v1, TCGv_i64 v2)
212 {
213     dst = QFPREG(dst);
214 
215     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
216     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
217     gen_update_fprs_dirty(dc, dst);
218 }
219 
220 #ifdef TARGET_SPARC64
221 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
222 {
223     src = QFPREG(src);
224     return cpu_fpr[src / 2];
225 }
226 
227 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
228 {
229     src = QFPREG(src);
230     return cpu_fpr[src / 2 + 1];
231 }
232 
233 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
234 {
235     rd = QFPREG(rd);
236     rs = QFPREG(rs);
237 
238     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
239     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
240     gen_update_fprs_dirty(dc, rd);
241 }
242 #endif
243 
244 /* moves */
245 #ifdef CONFIG_USER_ONLY
246 #define supervisor(dc) 0
247 #ifdef TARGET_SPARC64
248 #define hypervisor(dc) 0
249 #endif
250 #else
251 #ifdef TARGET_SPARC64
252 #define hypervisor(dc) (dc->hypervisor)
253 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
254 #else
255 #define supervisor(dc) (dc->supervisor)
256 #endif
257 #endif
258 
259 #ifdef TARGET_SPARC64
260 #ifndef TARGET_ABI32
261 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
262 #else
263 #define AM_CHECK(dc) (1)
264 #endif
265 #endif
266 
267 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
268 {
269 #ifdef TARGET_SPARC64
270     if (AM_CHECK(dc))
271         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
272 #endif
273 }
274 
275 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
276 {
277     if (reg > 0) {
278         assert(reg < 32);
279         return cpu_regs[reg];
280     } else {
281         TCGv t = tcg_temp_new();
282         tcg_gen_movi_tl(t, 0);
283         return t;
284     }
285 }
286 
287 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
288 {
289     if (reg > 0) {
290         assert(reg < 32);
291         tcg_gen_mov_tl(cpu_regs[reg], v);
292     }
293 }
294 
295 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
296 {
297     if (reg > 0) {
298         assert(reg < 32);
299         return cpu_regs[reg];
300     } else {
301         return tcg_temp_new();
302     }
303 }
304 
305 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
306 {
307     return translator_use_goto_tb(&s->base, pc) &&
308            translator_use_goto_tb(&s->base, npc);
309 }
310 
311 static void gen_goto_tb(DisasContext *s, int tb_num,
312                         target_ulong pc, target_ulong npc)
313 {
314     if (use_goto_tb(s, pc, npc))  {
315         /* jump to same page: we can use a direct jump */
316         tcg_gen_goto_tb(tb_num);
317         tcg_gen_movi_tl(cpu_pc, pc);
318         tcg_gen_movi_tl(cpu_npc, npc);
319         tcg_gen_exit_tb(s->base.tb, tb_num);
320     } else {
321         /* jump to another page: currently not optimized */
322         tcg_gen_movi_tl(cpu_pc, pc);
323         tcg_gen_movi_tl(cpu_npc, npc);
324         tcg_gen_exit_tb(NULL, 0);
325     }
326 }
327 
328 // XXX suboptimal
329 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
330 {
331     tcg_gen_extu_i32_tl(reg, src);
332     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
333 }
334 
335 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
336 {
337     tcg_gen_extu_i32_tl(reg, src);
338     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
339 }
340 
341 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
342 {
343     tcg_gen_extu_i32_tl(reg, src);
344     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
345 }
346 
347 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
348 {
349     tcg_gen_extu_i32_tl(reg, src);
350     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
351 }
352 
353 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
354 {
355     tcg_gen_mov_tl(cpu_cc_src, src1);
356     tcg_gen_mov_tl(cpu_cc_src2, src2);
357     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
358     tcg_gen_mov_tl(dst, cpu_cc_dst);
359 }
360 
361 static TCGv_i32 gen_add32_carry32(void)
362 {
363     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
364 
365     /* Carry is computed from a previous add: (dst < src)  */
366 #if TARGET_LONG_BITS == 64
367     cc_src1_32 = tcg_temp_new_i32();
368     cc_src2_32 = tcg_temp_new_i32();
369     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
370     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
371 #else
372     cc_src1_32 = cpu_cc_dst;
373     cc_src2_32 = cpu_cc_src;
374 #endif
375 
376     carry_32 = tcg_temp_new_i32();
377     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
378 
379     return carry_32;
380 }
381 
382 static TCGv_i32 gen_sub32_carry32(void)
383 {
384     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
385 
386     /* Carry is computed from a previous borrow: (src1 < src2)  */
387 #if TARGET_LONG_BITS == 64
388     cc_src1_32 = tcg_temp_new_i32();
389     cc_src2_32 = tcg_temp_new_i32();
390     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
391     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
392 #else
393     cc_src1_32 = cpu_cc_src;
394     cc_src2_32 = cpu_cc_src2;
395 #endif
396 
397     carry_32 = tcg_temp_new_i32();
398     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
399 
400     return carry_32;
401 }
402 
403 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
404                             TCGv src2, int update_cc)
405 {
406     TCGv_i32 carry_32;
407     TCGv carry;
408 
409     switch (dc->cc_op) {
410     case CC_OP_DIV:
411     case CC_OP_LOGIC:
412         /* Carry is known to be zero.  Fall back to plain ADD.  */
413         if (update_cc) {
414             gen_op_add_cc(dst, src1, src2);
415         } else {
416             tcg_gen_add_tl(dst, src1, src2);
417         }
418         return;
419 
420     case CC_OP_ADD:
421     case CC_OP_TADD:
422     case CC_OP_TADDTV:
423         if (TARGET_LONG_BITS == 32) {
424             /* We can re-use the host's hardware carry generation by using
425                an ADD2 opcode.  We discard the low part of the output.
426                Ideally we'd combine this operation with the add that
427                generated the carry in the first place.  */
428             carry = tcg_temp_new();
429             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
430             goto add_done;
431         }
432         carry_32 = gen_add32_carry32();
433         break;
434 
435     case CC_OP_SUB:
436     case CC_OP_TSUB:
437     case CC_OP_TSUBTV:
438         carry_32 = gen_sub32_carry32();
439         break;
440 
441     default:
442         /* We need external help to produce the carry.  */
443         carry_32 = tcg_temp_new_i32();
444         gen_helper_compute_C_icc(carry_32, cpu_env);
445         break;
446     }
447 
448 #if TARGET_LONG_BITS == 64
449     carry = tcg_temp_new();
450     tcg_gen_extu_i32_i64(carry, carry_32);
451 #else
452     carry = carry_32;
453 #endif
454 
455     tcg_gen_add_tl(dst, src1, src2);
456     tcg_gen_add_tl(dst, dst, carry);
457 
458  add_done:
459     if (update_cc) {
460         tcg_gen_mov_tl(cpu_cc_src, src1);
461         tcg_gen_mov_tl(cpu_cc_src2, src2);
462         tcg_gen_mov_tl(cpu_cc_dst, dst);
463         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
464         dc->cc_op = CC_OP_ADDX;
465     }
466 }
467 
468 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
469 {
470     tcg_gen_mov_tl(cpu_cc_src, src1);
471     tcg_gen_mov_tl(cpu_cc_src2, src2);
472     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
473     tcg_gen_mov_tl(dst, cpu_cc_dst);
474 }
475 
476 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
477                             TCGv src2, int update_cc)
478 {
479     TCGv_i32 carry_32;
480     TCGv carry;
481 
482     switch (dc->cc_op) {
483     case CC_OP_DIV:
484     case CC_OP_LOGIC:
485         /* Carry is known to be zero.  Fall back to plain SUB.  */
486         if (update_cc) {
487             gen_op_sub_cc(dst, src1, src2);
488         } else {
489             tcg_gen_sub_tl(dst, src1, src2);
490         }
491         return;
492 
493     case CC_OP_ADD:
494     case CC_OP_TADD:
495     case CC_OP_TADDTV:
496         carry_32 = gen_add32_carry32();
497         break;
498 
499     case CC_OP_SUB:
500     case CC_OP_TSUB:
501     case CC_OP_TSUBTV:
502         if (TARGET_LONG_BITS == 32) {
503             /* We can re-use the host's hardware carry generation by using
504                a SUB2 opcode.  We discard the low part of the output.
505                Ideally we'd combine this operation with the add that
506                generated the carry in the first place.  */
507             carry = tcg_temp_new();
508             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
509             goto sub_done;
510         }
511         carry_32 = gen_sub32_carry32();
512         break;
513 
514     default:
515         /* We need external help to produce the carry.  */
516         carry_32 = tcg_temp_new_i32();
517         gen_helper_compute_C_icc(carry_32, cpu_env);
518         break;
519     }
520 
521 #if TARGET_LONG_BITS == 64
522     carry = tcg_temp_new();
523     tcg_gen_extu_i32_i64(carry, carry_32);
524 #else
525     carry = carry_32;
526 #endif
527 
528     tcg_gen_sub_tl(dst, src1, src2);
529     tcg_gen_sub_tl(dst, dst, carry);
530 
531  sub_done:
532     if (update_cc) {
533         tcg_gen_mov_tl(cpu_cc_src, src1);
534         tcg_gen_mov_tl(cpu_cc_src2, src2);
535         tcg_gen_mov_tl(cpu_cc_dst, dst);
536         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
537         dc->cc_op = CC_OP_SUBX;
538     }
539 }
540 
541 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
542 {
543     TCGv r_temp, zero, t0;
544 
545     r_temp = tcg_temp_new();
546     t0 = tcg_temp_new();
547 
548     /* old op:
549     if (!(env->y & 1))
550         T1 = 0;
551     */
552     zero = tcg_constant_tl(0);
553     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
554     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
555     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
556     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
557                        zero, cpu_cc_src2);
558 
559     // b2 = T0 & 1;
560     // env->y = (b2 << 31) | (env->y >> 1);
561     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
562     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
563 
564     // b1 = N ^ V;
565     gen_mov_reg_N(t0, cpu_psr);
566     gen_mov_reg_V(r_temp, cpu_psr);
567     tcg_gen_xor_tl(t0, t0, r_temp);
568 
569     // T0 = (b1 << 31) | (T0 >> 1);
570     // src1 = T0;
571     tcg_gen_shli_tl(t0, t0, 31);
572     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
573     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
574 
575     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
576 
577     tcg_gen_mov_tl(dst, cpu_cc_dst);
578 }
579 
580 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
581 {
582 #if TARGET_LONG_BITS == 32
583     if (sign_ext) {
584         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
585     } else {
586         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
587     }
588 #else
589     TCGv t0 = tcg_temp_new_i64();
590     TCGv t1 = tcg_temp_new_i64();
591 
592     if (sign_ext) {
593         tcg_gen_ext32s_i64(t0, src1);
594         tcg_gen_ext32s_i64(t1, src2);
595     } else {
596         tcg_gen_ext32u_i64(t0, src1);
597         tcg_gen_ext32u_i64(t1, src2);
598     }
599 
600     tcg_gen_mul_i64(dst, t0, t1);
601     tcg_gen_shri_i64(cpu_y, dst, 32);
602 #endif
603 }
604 
605 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
606 {
607     /* zero-extend truncated operands before multiplication */
608     gen_op_multiply(dst, src1, src2, 0);
609 }
610 
611 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
612 {
613     /* sign-extend truncated operands before multiplication */
614     gen_op_multiply(dst, src1, src2, 1);
615 }
616 
617 // 1
618 static inline void gen_op_eval_ba(TCGv dst)
619 {
620     tcg_gen_movi_tl(dst, 1);
621 }
622 
623 // Z
624 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
625 {
626     gen_mov_reg_Z(dst, src);
627 }
628 
629 // Z | (N ^ V)
630 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
631 {
632     TCGv t0 = tcg_temp_new();
633     gen_mov_reg_N(t0, src);
634     gen_mov_reg_V(dst, src);
635     tcg_gen_xor_tl(dst, dst, t0);
636     gen_mov_reg_Z(t0, src);
637     tcg_gen_or_tl(dst, dst, t0);
638 }
639 
640 // N ^ V
641 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
642 {
643     TCGv t0 = tcg_temp_new();
644     gen_mov_reg_V(t0, src);
645     gen_mov_reg_N(dst, src);
646     tcg_gen_xor_tl(dst, dst, t0);
647 }
648 
649 // C | Z
650 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
651 {
652     TCGv t0 = tcg_temp_new();
653     gen_mov_reg_Z(t0, src);
654     gen_mov_reg_C(dst, src);
655     tcg_gen_or_tl(dst, dst, t0);
656 }
657 
658 // C
659 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
660 {
661     gen_mov_reg_C(dst, src);
662 }
663 
664 // V
665 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
666 {
667     gen_mov_reg_V(dst, src);
668 }
669 
670 // 0
671 static inline void gen_op_eval_bn(TCGv dst)
672 {
673     tcg_gen_movi_tl(dst, 0);
674 }
675 
676 // N
677 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
678 {
679     gen_mov_reg_N(dst, src);
680 }
681 
682 // !Z
683 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
684 {
685     gen_mov_reg_Z(dst, src);
686     tcg_gen_xori_tl(dst, dst, 0x1);
687 }
688 
689 // !(Z | (N ^ V))
690 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
691 {
692     gen_op_eval_ble(dst, src);
693     tcg_gen_xori_tl(dst, dst, 0x1);
694 }
695 
696 // !(N ^ V)
697 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
698 {
699     gen_op_eval_bl(dst, src);
700     tcg_gen_xori_tl(dst, dst, 0x1);
701 }
702 
703 // !(C | Z)
704 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
705 {
706     gen_op_eval_bleu(dst, src);
707     tcg_gen_xori_tl(dst, dst, 0x1);
708 }
709 
710 // !C
711 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
712 {
713     gen_mov_reg_C(dst, src);
714     tcg_gen_xori_tl(dst, dst, 0x1);
715 }
716 
717 // !N
718 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
719 {
720     gen_mov_reg_N(dst, src);
721     tcg_gen_xori_tl(dst, dst, 0x1);
722 }
723 
724 // !V
725 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
726 {
727     gen_mov_reg_V(dst, src);
728     tcg_gen_xori_tl(dst, dst, 0x1);
729 }
730 
731 /*
732   FPSR bit field FCC1 | FCC0:
733    0 =
734    1 <
735    2 >
736    3 unordered
737 */
738 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
739                                     unsigned int fcc_offset)
740 {
741     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
742     tcg_gen_andi_tl(reg, reg, 0x1);
743 }
744 
745 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
746                                     unsigned int fcc_offset)
747 {
748     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
749     tcg_gen_andi_tl(reg, reg, 0x1);
750 }
751 
752 // !0: FCC0 | FCC1
753 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
754                                     unsigned int fcc_offset)
755 {
756     TCGv t0 = tcg_temp_new();
757     gen_mov_reg_FCC0(dst, src, fcc_offset);
758     gen_mov_reg_FCC1(t0, src, fcc_offset);
759     tcg_gen_or_tl(dst, dst, t0);
760 }
761 
762 // 1 or 2: FCC0 ^ FCC1
763 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
764                                     unsigned int fcc_offset)
765 {
766     TCGv t0 = tcg_temp_new();
767     gen_mov_reg_FCC0(dst, src, fcc_offset);
768     gen_mov_reg_FCC1(t0, src, fcc_offset);
769     tcg_gen_xor_tl(dst, dst, t0);
770 }
771 
772 // 1 or 3: FCC0
773 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
774                                     unsigned int fcc_offset)
775 {
776     gen_mov_reg_FCC0(dst, src, fcc_offset);
777 }
778 
779 // 1: FCC0 & !FCC1
780 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
781                                     unsigned int fcc_offset)
782 {
783     TCGv t0 = tcg_temp_new();
784     gen_mov_reg_FCC0(dst, src, fcc_offset);
785     gen_mov_reg_FCC1(t0, src, fcc_offset);
786     tcg_gen_andc_tl(dst, dst, t0);
787 }
788 
789 // 2 or 3: FCC1
790 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
791                                     unsigned int fcc_offset)
792 {
793     gen_mov_reg_FCC1(dst, src, fcc_offset);
794 }
795 
796 // 2: !FCC0 & FCC1
797 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
798                                     unsigned int fcc_offset)
799 {
800     TCGv t0 = tcg_temp_new();
801     gen_mov_reg_FCC0(dst, src, fcc_offset);
802     gen_mov_reg_FCC1(t0, src, fcc_offset);
803     tcg_gen_andc_tl(dst, t0, dst);
804 }
805 
806 // 3: FCC0 & FCC1
807 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
808                                     unsigned int fcc_offset)
809 {
810     TCGv t0 = tcg_temp_new();
811     gen_mov_reg_FCC0(dst, src, fcc_offset);
812     gen_mov_reg_FCC1(t0, src, fcc_offset);
813     tcg_gen_and_tl(dst, dst, t0);
814 }
815 
816 // 0: !(FCC0 | FCC1)
817 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
818                                     unsigned int fcc_offset)
819 {
820     TCGv t0 = tcg_temp_new();
821     gen_mov_reg_FCC0(dst, src, fcc_offset);
822     gen_mov_reg_FCC1(t0, src, fcc_offset);
823     tcg_gen_or_tl(dst, dst, t0);
824     tcg_gen_xori_tl(dst, dst, 0x1);
825 }
826 
827 // 0 or 3: !(FCC0 ^ FCC1)
828 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
829                                     unsigned int fcc_offset)
830 {
831     TCGv t0 = tcg_temp_new();
832     gen_mov_reg_FCC0(dst, src, fcc_offset);
833     gen_mov_reg_FCC1(t0, src, fcc_offset);
834     tcg_gen_xor_tl(dst, dst, t0);
835     tcg_gen_xori_tl(dst, dst, 0x1);
836 }
837 
838 // 0 or 2: !FCC0
839 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
840                                     unsigned int fcc_offset)
841 {
842     gen_mov_reg_FCC0(dst, src, fcc_offset);
843     tcg_gen_xori_tl(dst, dst, 0x1);
844 }
845 
846 // !1: !(FCC0 & !FCC1)
847 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
848                                     unsigned int fcc_offset)
849 {
850     TCGv t0 = tcg_temp_new();
851     gen_mov_reg_FCC0(dst, src, fcc_offset);
852     gen_mov_reg_FCC1(t0, src, fcc_offset);
853     tcg_gen_andc_tl(dst, dst, t0);
854     tcg_gen_xori_tl(dst, dst, 0x1);
855 }
856 
857 // 0 or 1: !FCC1
858 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
859                                     unsigned int fcc_offset)
860 {
861     gen_mov_reg_FCC1(dst, src, fcc_offset);
862     tcg_gen_xori_tl(dst, dst, 0x1);
863 }
864 
865 // !2: !(!FCC0 & FCC1)
866 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
867                                     unsigned int fcc_offset)
868 {
869     TCGv t0 = tcg_temp_new();
870     gen_mov_reg_FCC0(dst, src, fcc_offset);
871     gen_mov_reg_FCC1(t0, src, fcc_offset);
872     tcg_gen_andc_tl(dst, t0, dst);
873     tcg_gen_xori_tl(dst, dst, 0x1);
874 }
875 
876 // !3: !(FCC0 & FCC1)
877 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
878                                     unsigned int fcc_offset)
879 {
880     TCGv t0 = tcg_temp_new();
881     gen_mov_reg_FCC0(dst, src, fcc_offset);
882     gen_mov_reg_FCC1(t0, src, fcc_offset);
883     tcg_gen_and_tl(dst, dst, t0);
884     tcg_gen_xori_tl(dst, dst, 0x1);
885 }
886 
887 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
888                                target_ulong pc2, TCGv r_cond)
889 {
890     TCGLabel *l1 = gen_new_label();
891 
892     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
893 
894     gen_goto_tb(dc, 0, pc1, pc1 + 4);
895 
896     gen_set_label(l1);
897     gen_goto_tb(dc, 1, pc2, pc2 + 4);
898 }
899 
900 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
901 {
902     TCGLabel *l1 = gen_new_label();
903     target_ulong npc = dc->npc;
904 
905     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
906 
907     gen_goto_tb(dc, 0, npc, pc1);
908 
909     gen_set_label(l1);
910     gen_goto_tb(dc, 1, npc + 4, npc + 8);
911 
912     dc->base.is_jmp = DISAS_NORETURN;
913 }
914 
915 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
916 {
917     target_ulong npc = dc->npc;
918 
919     if (likely(npc != DYNAMIC_PC)) {
920         dc->pc = npc;
921         dc->jump_pc[0] = pc1;
922         dc->jump_pc[1] = npc + 4;
923         dc->npc = JUMP_PC;
924     } else {
925         TCGv t, z;
926 
927         tcg_gen_mov_tl(cpu_pc, cpu_npc);
928 
929         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
930         t = tcg_constant_tl(pc1);
931         z = tcg_constant_tl(0);
932         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
933 
934         dc->pc = DYNAMIC_PC;
935     }
936 }
937 
938 static inline void gen_generic_branch(DisasContext *dc)
939 {
940     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
941     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
942     TCGv zero = tcg_constant_tl(0);
943 
944     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
945 }
946 
947 /* call this function before using the condition register as it may
948    have been set for a jump */
949 static inline void flush_cond(DisasContext *dc)
950 {
951     if (dc->npc == JUMP_PC) {
952         gen_generic_branch(dc);
953         dc->npc = DYNAMIC_PC;
954     }
955 }
956 
957 static inline void save_npc(DisasContext *dc)
958 {
959     if (dc->npc == JUMP_PC) {
960         gen_generic_branch(dc);
961         dc->npc = DYNAMIC_PC;
962     } else if (dc->npc != DYNAMIC_PC) {
963         tcg_gen_movi_tl(cpu_npc, dc->npc);
964     }
965 }
966 
967 static inline void update_psr(DisasContext *dc)
968 {
969     if (dc->cc_op != CC_OP_FLAGS) {
970         dc->cc_op = CC_OP_FLAGS;
971         gen_helper_compute_psr(cpu_env);
972     }
973 }
974 
975 static inline void save_state(DisasContext *dc)
976 {
977     tcg_gen_movi_tl(cpu_pc, dc->pc);
978     save_npc(dc);
979 }
980 
981 static void gen_exception(DisasContext *dc, int which)
982 {
983     save_state(dc);
984     gen_helper_raise_exception(cpu_env, tcg_constant_i32(which));
985     dc->base.is_jmp = DISAS_NORETURN;
986 }
987 
988 static void gen_check_align(TCGv addr, int mask)
989 {
990     gen_helper_check_align(cpu_env, addr, tcg_constant_i32(mask));
991 }
992 
993 static inline void gen_mov_pc_npc(DisasContext *dc)
994 {
995     if (dc->npc == JUMP_PC) {
996         gen_generic_branch(dc);
997         tcg_gen_mov_tl(cpu_pc, cpu_npc);
998         dc->pc = DYNAMIC_PC;
999     } else if (dc->npc == DYNAMIC_PC) {
1000         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1001         dc->pc = DYNAMIC_PC;
1002     } else {
1003         dc->pc = dc->npc;
1004     }
1005 }
1006 
1007 static inline void gen_op_next_insn(void)
1008 {
1009     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1010     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1011 }
1012 
1013 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1014                         DisasContext *dc)
1015 {
1016     static int subcc_cond[16] = {
1017         TCG_COND_NEVER,
1018         TCG_COND_EQ,
1019         TCG_COND_LE,
1020         TCG_COND_LT,
1021         TCG_COND_LEU,
1022         TCG_COND_LTU,
1023         -1, /* neg */
1024         -1, /* overflow */
1025         TCG_COND_ALWAYS,
1026         TCG_COND_NE,
1027         TCG_COND_GT,
1028         TCG_COND_GE,
1029         TCG_COND_GTU,
1030         TCG_COND_GEU,
1031         -1, /* pos */
1032         -1, /* no overflow */
1033     };
1034 
1035     static int logic_cond[16] = {
1036         TCG_COND_NEVER,
1037         TCG_COND_EQ,     /* eq:  Z */
1038         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1039         TCG_COND_LT,     /* lt:  N ^ V -> N */
1040         TCG_COND_EQ,     /* leu: C | Z -> Z */
1041         TCG_COND_NEVER,  /* ltu: C -> 0 */
1042         TCG_COND_LT,     /* neg: N */
1043         TCG_COND_NEVER,  /* vs:  V -> 0 */
1044         TCG_COND_ALWAYS,
1045         TCG_COND_NE,     /* ne:  !Z */
1046         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1047         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1048         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1049         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1050         TCG_COND_GE,     /* pos: !N */
1051         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1052     };
1053 
1054     TCGv_i32 r_src;
1055     TCGv r_dst;
1056 
1057 #ifdef TARGET_SPARC64
1058     if (xcc) {
1059         r_src = cpu_xcc;
1060     } else {
1061         r_src = cpu_psr;
1062     }
1063 #else
1064     r_src = cpu_psr;
1065 #endif
1066 
1067     switch (dc->cc_op) {
1068     case CC_OP_LOGIC:
1069         cmp->cond = logic_cond[cond];
1070     do_compare_dst_0:
1071         cmp->is_bool = false;
1072         cmp->c2 = tcg_constant_tl(0);
1073 #ifdef TARGET_SPARC64
1074         if (!xcc) {
1075             cmp->c1 = tcg_temp_new();
1076             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1077             break;
1078         }
1079 #endif
1080         cmp->c1 = cpu_cc_dst;
1081         break;
1082 
1083     case CC_OP_SUB:
1084         switch (cond) {
1085         case 6:  /* neg */
1086         case 14: /* pos */
1087             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1088             goto do_compare_dst_0;
1089 
1090         case 7: /* overflow */
1091         case 15: /* !overflow */
1092             goto do_dynamic;
1093 
1094         default:
1095             cmp->cond = subcc_cond[cond];
1096             cmp->is_bool = false;
1097 #ifdef TARGET_SPARC64
1098             if (!xcc) {
1099                 /* Note that sign-extension works for unsigned compares as
1100                    long as both operands are sign-extended.  */
1101                 cmp->c1 = tcg_temp_new();
1102                 cmp->c2 = tcg_temp_new();
1103                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1104                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1105                 break;
1106             }
1107 #endif
1108             cmp->c1 = cpu_cc_src;
1109             cmp->c2 = cpu_cc_src2;
1110             break;
1111         }
1112         break;
1113 
1114     default:
1115     do_dynamic:
1116         gen_helper_compute_psr(cpu_env);
1117         dc->cc_op = CC_OP_FLAGS;
1118         /* FALLTHRU */
1119 
1120     case CC_OP_FLAGS:
1121         /* We're going to generate a boolean result.  */
1122         cmp->cond = TCG_COND_NE;
1123         cmp->is_bool = true;
1124         cmp->c1 = r_dst = tcg_temp_new();
1125         cmp->c2 = tcg_constant_tl(0);
1126 
1127         switch (cond) {
1128         case 0x0:
1129             gen_op_eval_bn(r_dst);
1130             break;
1131         case 0x1:
1132             gen_op_eval_be(r_dst, r_src);
1133             break;
1134         case 0x2:
1135             gen_op_eval_ble(r_dst, r_src);
1136             break;
1137         case 0x3:
1138             gen_op_eval_bl(r_dst, r_src);
1139             break;
1140         case 0x4:
1141             gen_op_eval_bleu(r_dst, r_src);
1142             break;
1143         case 0x5:
1144             gen_op_eval_bcs(r_dst, r_src);
1145             break;
1146         case 0x6:
1147             gen_op_eval_bneg(r_dst, r_src);
1148             break;
1149         case 0x7:
1150             gen_op_eval_bvs(r_dst, r_src);
1151             break;
1152         case 0x8:
1153             gen_op_eval_ba(r_dst);
1154             break;
1155         case 0x9:
1156             gen_op_eval_bne(r_dst, r_src);
1157             break;
1158         case 0xa:
1159             gen_op_eval_bg(r_dst, r_src);
1160             break;
1161         case 0xb:
1162             gen_op_eval_bge(r_dst, r_src);
1163             break;
1164         case 0xc:
1165             gen_op_eval_bgu(r_dst, r_src);
1166             break;
1167         case 0xd:
1168             gen_op_eval_bcc(r_dst, r_src);
1169             break;
1170         case 0xe:
1171             gen_op_eval_bpos(r_dst, r_src);
1172             break;
1173         case 0xf:
1174             gen_op_eval_bvc(r_dst, r_src);
1175             break;
1176         }
1177         break;
1178     }
1179 }
1180 
1181 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1182 {
1183     unsigned int offset;
1184     TCGv r_dst;
1185 
1186     /* For now we still generate a straight boolean result.  */
1187     cmp->cond = TCG_COND_NE;
1188     cmp->is_bool = true;
1189     cmp->c1 = r_dst = tcg_temp_new();
1190     cmp->c2 = tcg_constant_tl(0);
1191 
1192     switch (cc) {
1193     default:
1194     case 0x0:
1195         offset = 0;
1196         break;
1197     case 0x1:
1198         offset = 32 - 10;
1199         break;
1200     case 0x2:
1201         offset = 34 - 10;
1202         break;
1203     case 0x3:
1204         offset = 36 - 10;
1205         break;
1206     }
1207 
1208     switch (cond) {
1209     case 0x0:
1210         gen_op_eval_bn(r_dst);
1211         break;
1212     case 0x1:
1213         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1214         break;
1215     case 0x2:
1216         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1217         break;
1218     case 0x3:
1219         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1220         break;
1221     case 0x4:
1222         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1223         break;
1224     case 0x5:
1225         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1226         break;
1227     case 0x6:
1228         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1229         break;
1230     case 0x7:
1231         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1232         break;
1233     case 0x8:
1234         gen_op_eval_ba(r_dst);
1235         break;
1236     case 0x9:
1237         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1238         break;
1239     case 0xa:
1240         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1241         break;
1242     case 0xb:
1243         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1244         break;
1245     case 0xc:
1246         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1247         break;
1248     case 0xd:
1249         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1250         break;
1251     case 0xe:
1252         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1253         break;
1254     case 0xf:
1255         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1256         break;
1257     }
1258 }
1259 
1260 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1261                      DisasContext *dc)
1262 {
1263     DisasCompare cmp;
1264     gen_compare(&cmp, cc, cond, dc);
1265 
1266     /* The interface is to return a boolean in r_dst.  */
1267     if (cmp.is_bool) {
1268         tcg_gen_mov_tl(r_dst, cmp.c1);
1269     } else {
1270         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1271     }
1272 }
1273 
1274 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1275 {
1276     DisasCompare cmp;
1277     gen_fcompare(&cmp, cc, cond);
1278 
1279     /* The interface is to return a boolean in r_dst.  */
1280     if (cmp.is_bool) {
1281         tcg_gen_mov_tl(r_dst, cmp.c1);
1282     } else {
1283         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1284     }
1285 }
1286 
1287 #ifdef TARGET_SPARC64
1288 // Inverted logic
1289 static const int gen_tcg_cond_reg[8] = {
1290     -1,
1291     TCG_COND_NE,
1292     TCG_COND_GT,
1293     TCG_COND_GE,
1294     -1,
1295     TCG_COND_EQ,
1296     TCG_COND_LE,
1297     TCG_COND_LT,
1298 };
1299 
1300 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1301 {
1302     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1303     cmp->is_bool = false;
1304     cmp->c1 = r_src;
1305     cmp->c2 = tcg_constant_tl(0);
1306 }
1307 
1308 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1309 {
1310     DisasCompare cmp;
1311     gen_compare_reg(&cmp, cond, r_src);
1312 
1313     /* The interface is to return a boolean in r_dst.  */
1314     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1315 }
1316 #endif
1317 
1318 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1319 {
1320     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1321     target_ulong target = dc->pc + offset;
1322 
1323 #ifdef TARGET_SPARC64
1324     if (unlikely(AM_CHECK(dc))) {
1325         target &= 0xffffffffULL;
1326     }
1327 #endif
1328     if (cond == 0x0) {
1329         /* unconditional not taken */
1330         if (a) {
1331             dc->pc = dc->npc + 4;
1332             dc->npc = dc->pc + 4;
1333         } else {
1334             dc->pc = dc->npc;
1335             dc->npc = dc->pc + 4;
1336         }
1337     } else if (cond == 0x8) {
1338         /* unconditional taken */
1339         if (a) {
1340             dc->pc = target;
1341             dc->npc = dc->pc + 4;
1342         } else {
1343             dc->pc = dc->npc;
1344             dc->npc = target;
1345             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1346         }
1347     } else {
1348         flush_cond(dc);
1349         gen_cond(cpu_cond, cc, cond, dc);
1350         if (a) {
1351             gen_branch_a(dc, target);
1352         } else {
1353             gen_branch_n(dc, target);
1354         }
1355     }
1356 }
1357 
1358 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1359 {
1360     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1361     target_ulong target = dc->pc + offset;
1362 
1363 #ifdef TARGET_SPARC64
1364     if (unlikely(AM_CHECK(dc))) {
1365         target &= 0xffffffffULL;
1366     }
1367 #endif
1368     if (cond == 0x0) {
1369         /* unconditional not taken */
1370         if (a) {
1371             dc->pc = dc->npc + 4;
1372             dc->npc = dc->pc + 4;
1373         } else {
1374             dc->pc = dc->npc;
1375             dc->npc = dc->pc + 4;
1376         }
1377     } else if (cond == 0x8) {
1378         /* unconditional taken */
1379         if (a) {
1380             dc->pc = target;
1381             dc->npc = dc->pc + 4;
1382         } else {
1383             dc->pc = dc->npc;
1384             dc->npc = target;
1385             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1386         }
1387     } else {
1388         flush_cond(dc);
1389         gen_fcond(cpu_cond, cc, cond);
1390         if (a) {
1391             gen_branch_a(dc, target);
1392         } else {
1393             gen_branch_n(dc, target);
1394         }
1395     }
1396 }
1397 
1398 #ifdef TARGET_SPARC64
1399 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1400                           TCGv r_reg)
1401 {
1402     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1403     target_ulong target = dc->pc + offset;
1404 
1405     if (unlikely(AM_CHECK(dc))) {
1406         target &= 0xffffffffULL;
1407     }
1408     flush_cond(dc);
1409     gen_cond_reg(cpu_cond, cond, r_reg);
1410     if (a) {
1411         gen_branch_a(dc, target);
1412     } else {
1413         gen_branch_n(dc, target);
1414     }
1415 }
1416 
1417 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1418 {
1419     switch (fccno) {
1420     case 0:
1421         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1422         break;
1423     case 1:
1424         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1425         break;
1426     case 2:
1427         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1428         break;
1429     case 3:
1430         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1431         break;
1432     }
1433 }
1434 
1435 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1436 {
1437     switch (fccno) {
1438     case 0:
1439         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1440         break;
1441     case 1:
1442         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1443         break;
1444     case 2:
1445         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1446         break;
1447     case 3:
1448         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1449         break;
1450     }
1451 }
1452 
1453 static inline void gen_op_fcmpq(int fccno)
1454 {
1455     switch (fccno) {
1456     case 0:
1457         gen_helper_fcmpq(cpu_fsr, cpu_env);
1458         break;
1459     case 1:
1460         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1461         break;
1462     case 2:
1463         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1464         break;
1465     case 3:
1466         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1467         break;
1468     }
1469 }
1470 
1471 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1472 {
1473     switch (fccno) {
1474     case 0:
1475         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1476         break;
1477     case 1:
1478         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1479         break;
1480     case 2:
1481         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1482         break;
1483     case 3:
1484         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1485         break;
1486     }
1487 }
1488 
1489 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1490 {
1491     switch (fccno) {
1492     case 0:
1493         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1494         break;
1495     case 1:
1496         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1497         break;
1498     case 2:
1499         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1500         break;
1501     case 3:
1502         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1503         break;
1504     }
1505 }
1506 
1507 static inline void gen_op_fcmpeq(int fccno)
1508 {
1509     switch (fccno) {
1510     case 0:
1511         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1512         break;
1513     case 1:
1514         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1515         break;
1516     case 2:
1517         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1518         break;
1519     case 3:
1520         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1521         break;
1522     }
1523 }
1524 
1525 #else
1526 
1527 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1528 {
1529     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1530 }
1531 
1532 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1533 {
1534     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1535 }
1536 
1537 static inline void gen_op_fcmpq(int fccno)
1538 {
1539     gen_helper_fcmpq(cpu_fsr, cpu_env);
1540 }
1541 
1542 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1543 {
1544     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1545 }
1546 
1547 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1548 {
1549     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1550 }
1551 
1552 static inline void gen_op_fcmpeq(int fccno)
1553 {
1554     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1555 }
1556 #endif
1557 
1558 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1559 {
1560     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1561     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1562     gen_exception(dc, TT_FP_EXCP);
1563 }
1564 
1565 static int gen_trap_ifnofpu(DisasContext *dc)
1566 {
1567 #if !defined(CONFIG_USER_ONLY)
1568     if (!dc->fpu_enabled) {
1569         gen_exception(dc, TT_NFPU_INSN);
1570         return 1;
1571     }
1572 #endif
1573     return 0;
1574 }
1575 
1576 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1577 {
1578     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1579 }
1580 
1581 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1582                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1583 {
1584     TCGv_i32 dst, src;
1585 
1586     src = gen_load_fpr_F(dc, rs);
1587     dst = gen_dest_fpr_F(dc);
1588 
1589     gen(dst, cpu_env, src);
1590     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1591 
1592     gen_store_fpr_F(dc, rd, dst);
1593 }
1594 
1595 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1596                                  void (*gen)(TCGv_i32, TCGv_i32))
1597 {
1598     TCGv_i32 dst, src;
1599 
1600     src = gen_load_fpr_F(dc, rs);
1601     dst = gen_dest_fpr_F(dc);
1602 
1603     gen(dst, src);
1604 
1605     gen_store_fpr_F(dc, rd, dst);
1606 }
1607 
1608 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1609                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1610 {
1611     TCGv_i32 dst, src1, src2;
1612 
1613     src1 = gen_load_fpr_F(dc, rs1);
1614     src2 = gen_load_fpr_F(dc, rs2);
1615     dst = gen_dest_fpr_F(dc);
1616 
1617     gen(dst, cpu_env, src1, src2);
1618     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1619 
1620     gen_store_fpr_F(dc, rd, dst);
1621 }
1622 
1623 #ifdef TARGET_SPARC64
1624 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1625                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1626 {
1627     TCGv_i32 dst, src1, src2;
1628 
1629     src1 = gen_load_fpr_F(dc, rs1);
1630     src2 = gen_load_fpr_F(dc, rs2);
1631     dst = gen_dest_fpr_F(dc);
1632 
1633     gen(dst, src1, src2);
1634 
1635     gen_store_fpr_F(dc, rd, dst);
1636 }
1637 #endif
1638 
1639 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1640                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1641 {
1642     TCGv_i64 dst, src;
1643 
1644     src = gen_load_fpr_D(dc, rs);
1645     dst = gen_dest_fpr_D(dc, rd);
1646 
1647     gen(dst, cpu_env, src);
1648     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1649 
1650     gen_store_fpr_D(dc, rd, dst);
1651 }
1652 
1653 #ifdef TARGET_SPARC64
1654 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1655                                  void (*gen)(TCGv_i64, TCGv_i64))
1656 {
1657     TCGv_i64 dst, src;
1658 
1659     src = gen_load_fpr_D(dc, rs);
1660     dst = gen_dest_fpr_D(dc, rd);
1661 
1662     gen(dst, src);
1663 
1664     gen_store_fpr_D(dc, rd, dst);
1665 }
1666 #endif
1667 
1668 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1669                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1670 {
1671     TCGv_i64 dst, src1, src2;
1672 
1673     src1 = gen_load_fpr_D(dc, rs1);
1674     src2 = gen_load_fpr_D(dc, rs2);
1675     dst = gen_dest_fpr_D(dc, rd);
1676 
1677     gen(dst, cpu_env, src1, src2);
1678     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1679 
1680     gen_store_fpr_D(dc, rd, dst);
1681 }
1682 
1683 #ifdef TARGET_SPARC64
1684 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1685                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1686 {
1687     TCGv_i64 dst, src1, src2;
1688 
1689     src1 = gen_load_fpr_D(dc, rs1);
1690     src2 = gen_load_fpr_D(dc, rs2);
1691     dst = gen_dest_fpr_D(dc, rd);
1692 
1693     gen(dst, src1, src2);
1694 
1695     gen_store_fpr_D(dc, rd, dst);
1696 }
1697 
1698 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1699                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1700 {
1701     TCGv_i64 dst, src1, src2;
1702 
1703     src1 = gen_load_fpr_D(dc, rs1);
1704     src2 = gen_load_fpr_D(dc, rs2);
1705     dst = gen_dest_fpr_D(dc, rd);
1706 
1707     gen(dst, cpu_gsr, src1, src2);
1708 
1709     gen_store_fpr_D(dc, rd, dst);
1710 }
1711 
1712 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1713                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1714 {
1715     TCGv_i64 dst, src0, src1, src2;
1716 
1717     src1 = gen_load_fpr_D(dc, rs1);
1718     src2 = gen_load_fpr_D(dc, rs2);
1719     src0 = gen_load_fpr_D(dc, rd);
1720     dst = gen_dest_fpr_D(dc, rd);
1721 
1722     gen(dst, src0, src1, src2);
1723 
1724     gen_store_fpr_D(dc, rd, dst);
1725 }
1726 #endif
1727 
1728 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1729                               void (*gen)(TCGv_ptr))
1730 {
1731     gen_op_load_fpr_QT1(QFPREG(rs));
1732 
1733     gen(cpu_env);
1734     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1735 
1736     gen_op_store_QT0_fpr(QFPREG(rd));
1737     gen_update_fprs_dirty(dc, QFPREG(rd));
1738 }
1739 
1740 #ifdef TARGET_SPARC64
1741 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1742                                  void (*gen)(TCGv_ptr))
1743 {
1744     gen_op_load_fpr_QT1(QFPREG(rs));
1745 
1746     gen(cpu_env);
1747 
1748     gen_op_store_QT0_fpr(QFPREG(rd));
1749     gen_update_fprs_dirty(dc, QFPREG(rd));
1750 }
1751 #endif
1752 
1753 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1754                                void (*gen)(TCGv_ptr))
1755 {
1756     gen_op_load_fpr_QT0(QFPREG(rs1));
1757     gen_op_load_fpr_QT1(QFPREG(rs2));
1758 
1759     gen(cpu_env);
1760     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1761 
1762     gen_op_store_QT0_fpr(QFPREG(rd));
1763     gen_update_fprs_dirty(dc, QFPREG(rd));
1764 }
1765 
1766 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1767                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1768 {
1769     TCGv_i64 dst;
1770     TCGv_i32 src1, src2;
1771 
1772     src1 = gen_load_fpr_F(dc, rs1);
1773     src2 = gen_load_fpr_F(dc, rs2);
1774     dst = gen_dest_fpr_D(dc, rd);
1775 
1776     gen(dst, cpu_env, src1, src2);
1777     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1778 
1779     gen_store_fpr_D(dc, rd, dst);
1780 }
1781 
1782 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1783                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1784 {
1785     TCGv_i64 src1, src2;
1786 
1787     src1 = gen_load_fpr_D(dc, rs1);
1788     src2 = gen_load_fpr_D(dc, rs2);
1789 
1790     gen(cpu_env, src1, src2);
1791     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1792 
1793     gen_op_store_QT0_fpr(QFPREG(rd));
1794     gen_update_fprs_dirty(dc, QFPREG(rd));
1795 }
1796 
1797 #ifdef TARGET_SPARC64
1798 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1799                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1800 {
1801     TCGv_i64 dst;
1802     TCGv_i32 src;
1803 
1804     src = gen_load_fpr_F(dc, rs);
1805     dst = gen_dest_fpr_D(dc, rd);
1806 
1807     gen(dst, cpu_env, src);
1808     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1809 
1810     gen_store_fpr_D(dc, rd, dst);
1811 }
1812 #endif
1813 
1814 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1815                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1816 {
1817     TCGv_i64 dst;
1818     TCGv_i32 src;
1819 
1820     src = gen_load_fpr_F(dc, rs);
1821     dst = gen_dest_fpr_D(dc, rd);
1822 
1823     gen(dst, cpu_env, src);
1824 
1825     gen_store_fpr_D(dc, rd, dst);
1826 }
1827 
1828 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1829                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1830 {
1831     TCGv_i32 dst;
1832     TCGv_i64 src;
1833 
1834     src = gen_load_fpr_D(dc, rs);
1835     dst = gen_dest_fpr_F(dc);
1836 
1837     gen(dst, cpu_env, src);
1838     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1839 
1840     gen_store_fpr_F(dc, rd, dst);
1841 }
1842 
1843 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1844                               void (*gen)(TCGv_i32, TCGv_ptr))
1845 {
1846     TCGv_i32 dst;
1847 
1848     gen_op_load_fpr_QT1(QFPREG(rs));
1849     dst = gen_dest_fpr_F(dc);
1850 
1851     gen(dst, cpu_env);
1852     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1853 
1854     gen_store_fpr_F(dc, rd, dst);
1855 }
1856 
1857 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1858                               void (*gen)(TCGv_i64, TCGv_ptr))
1859 {
1860     TCGv_i64 dst;
1861 
1862     gen_op_load_fpr_QT1(QFPREG(rs));
1863     dst = gen_dest_fpr_D(dc, rd);
1864 
1865     gen(dst, cpu_env);
1866     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1867 
1868     gen_store_fpr_D(dc, rd, dst);
1869 }
1870 
1871 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1872                                  void (*gen)(TCGv_ptr, TCGv_i32))
1873 {
1874     TCGv_i32 src;
1875 
1876     src = gen_load_fpr_F(dc, rs);
1877 
1878     gen(cpu_env, src);
1879 
1880     gen_op_store_QT0_fpr(QFPREG(rd));
1881     gen_update_fprs_dirty(dc, QFPREG(rd));
1882 }
1883 
1884 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1885                                  void (*gen)(TCGv_ptr, TCGv_i64))
1886 {
1887     TCGv_i64 src;
1888 
1889     src = gen_load_fpr_D(dc, rs);
1890 
1891     gen(cpu_env, src);
1892 
1893     gen_op_store_QT0_fpr(QFPREG(rd));
1894     gen_update_fprs_dirty(dc, QFPREG(rd));
1895 }
1896 
1897 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1898                      TCGv addr, int mmu_idx, MemOp memop)
1899 {
1900     gen_address_mask(dc, addr);
1901     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1902 }
1903 
1904 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1905 {
1906     TCGv m1 = tcg_constant_tl(0xff);
1907     gen_address_mask(dc, addr);
1908     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1909 }
1910 
1911 /* asi moves */
1912 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1913 typedef enum {
1914     GET_ASI_HELPER,
1915     GET_ASI_EXCP,
1916     GET_ASI_DIRECT,
1917     GET_ASI_DTWINX,
1918     GET_ASI_BLOCK,
1919     GET_ASI_SHORT,
1920     GET_ASI_BCOPY,
1921     GET_ASI_BFILL,
1922 } ASIType;
1923 
1924 typedef struct {
1925     ASIType type;
1926     int asi;
1927     int mem_idx;
1928     MemOp memop;
1929 } DisasASI;
1930 
1931 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1932 {
1933     int asi = GET_FIELD(insn, 19, 26);
1934     ASIType type = GET_ASI_HELPER;
1935     int mem_idx = dc->mem_idx;
1936 
1937 #ifndef TARGET_SPARC64
1938     /* Before v9, all asis are immediate and privileged.  */
1939     if (IS_IMM) {
1940         gen_exception(dc, TT_ILL_INSN);
1941         type = GET_ASI_EXCP;
1942     } else if (supervisor(dc)
1943                /* Note that LEON accepts ASI_USERDATA in user mode, for
1944                   use with CASA.  Also note that previous versions of
1945                   QEMU allowed (and old versions of gcc emitted) ASI_P
1946                   for LEON, which is incorrect.  */
1947                || (asi == ASI_USERDATA
1948                    && (dc->def->features & CPU_FEATURE_CASA))) {
1949         switch (asi) {
1950         case ASI_USERDATA:   /* User data access */
1951             mem_idx = MMU_USER_IDX;
1952             type = GET_ASI_DIRECT;
1953             break;
1954         case ASI_KERNELDATA: /* Supervisor data access */
1955             mem_idx = MMU_KERNEL_IDX;
1956             type = GET_ASI_DIRECT;
1957             break;
1958         case ASI_M_BYPASS:    /* MMU passthrough */
1959         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1960             mem_idx = MMU_PHYS_IDX;
1961             type = GET_ASI_DIRECT;
1962             break;
1963         case ASI_M_BCOPY: /* Block copy, sta access */
1964             mem_idx = MMU_KERNEL_IDX;
1965             type = GET_ASI_BCOPY;
1966             break;
1967         case ASI_M_BFILL: /* Block fill, stda access */
1968             mem_idx = MMU_KERNEL_IDX;
1969             type = GET_ASI_BFILL;
1970             break;
1971         }
1972 
1973         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1974          * permissions check in get_physical_address(..).
1975          */
1976         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1977     } else {
1978         gen_exception(dc, TT_PRIV_INSN);
1979         type = GET_ASI_EXCP;
1980     }
1981 #else
1982     if (IS_IMM) {
1983         asi = dc->asi;
1984     }
1985     /* With v9, all asis below 0x80 are privileged.  */
1986     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1987        down that bit into DisasContext.  For the moment that's ok,
1988        since the direct implementations below doesn't have any ASIs
1989        in the restricted [0x30, 0x7f] range, and the check will be
1990        done properly in the helper.  */
1991     if (!supervisor(dc) && asi < 0x80) {
1992         gen_exception(dc, TT_PRIV_ACT);
1993         type = GET_ASI_EXCP;
1994     } else {
1995         switch (asi) {
1996         case ASI_REAL:      /* Bypass */
1997         case ASI_REAL_IO:   /* Bypass, non-cacheable */
1998         case ASI_REAL_L:    /* Bypass LE */
1999         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2000         case ASI_TWINX_REAL:   /* Real address, twinx */
2001         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2002         case ASI_QUAD_LDD_PHYS:
2003         case ASI_QUAD_LDD_PHYS_L:
2004             mem_idx = MMU_PHYS_IDX;
2005             break;
2006         case ASI_N:  /* Nucleus */
2007         case ASI_NL: /* Nucleus LE */
2008         case ASI_TWINX_N:
2009         case ASI_TWINX_NL:
2010         case ASI_NUCLEUS_QUAD_LDD:
2011         case ASI_NUCLEUS_QUAD_LDD_L:
2012             if (hypervisor(dc)) {
2013                 mem_idx = MMU_PHYS_IDX;
2014             } else {
2015                 mem_idx = MMU_NUCLEUS_IDX;
2016             }
2017             break;
2018         case ASI_AIUP:  /* As if user primary */
2019         case ASI_AIUPL: /* As if user primary LE */
2020         case ASI_TWINX_AIUP:
2021         case ASI_TWINX_AIUP_L:
2022         case ASI_BLK_AIUP_4V:
2023         case ASI_BLK_AIUP_L_4V:
2024         case ASI_BLK_AIUP:
2025         case ASI_BLK_AIUPL:
2026             mem_idx = MMU_USER_IDX;
2027             break;
2028         case ASI_AIUS:  /* As if user secondary */
2029         case ASI_AIUSL: /* As if user secondary LE */
2030         case ASI_TWINX_AIUS:
2031         case ASI_TWINX_AIUS_L:
2032         case ASI_BLK_AIUS_4V:
2033         case ASI_BLK_AIUS_L_4V:
2034         case ASI_BLK_AIUS:
2035         case ASI_BLK_AIUSL:
2036             mem_idx = MMU_USER_SECONDARY_IDX;
2037             break;
2038         case ASI_S:  /* Secondary */
2039         case ASI_SL: /* Secondary LE */
2040         case ASI_TWINX_S:
2041         case ASI_TWINX_SL:
2042         case ASI_BLK_COMMIT_S:
2043         case ASI_BLK_S:
2044         case ASI_BLK_SL:
2045         case ASI_FL8_S:
2046         case ASI_FL8_SL:
2047         case ASI_FL16_S:
2048         case ASI_FL16_SL:
2049             if (mem_idx == MMU_USER_IDX) {
2050                 mem_idx = MMU_USER_SECONDARY_IDX;
2051             } else if (mem_idx == MMU_KERNEL_IDX) {
2052                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2053             }
2054             break;
2055         case ASI_P:  /* Primary */
2056         case ASI_PL: /* Primary LE */
2057         case ASI_TWINX_P:
2058         case ASI_TWINX_PL:
2059         case ASI_BLK_COMMIT_P:
2060         case ASI_BLK_P:
2061         case ASI_BLK_PL:
2062         case ASI_FL8_P:
2063         case ASI_FL8_PL:
2064         case ASI_FL16_P:
2065         case ASI_FL16_PL:
2066             break;
2067         }
2068         switch (asi) {
2069         case ASI_REAL:
2070         case ASI_REAL_IO:
2071         case ASI_REAL_L:
2072         case ASI_REAL_IO_L:
2073         case ASI_N:
2074         case ASI_NL:
2075         case ASI_AIUP:
2076         case ASI_AIUPL:
2077         case ASI_AIUS:
2078         case ASI_AIUSL:
2079         case ASI_S:
2080         case ASI_SL:
2081         case ASI_P:
2082         case ASI_PL:
2083             type = GET_ASI_DIRECT;
2084             break;
2085         case ASI_TWINX_REAL:
2086         case ASI_TWINX_REAL_L:
2087         case ASI_TWINX_N:
2088         case ASI_TWINX_NL:
2089         case ASI_TWINX_AIUP:
2090         case ASI_TWINX_AIUP_L:
2091         case ASI_TWINX_AIUS:
2092         case ASI_TWINX_AIUS_L:
2093         case ASI_TWINX_P:
2094         case ASI_TWINX_PL:
2095         case ASI_TWINX_S:
2096         case ASI_TWINX_SL:
2097         case ASI_QUAD_LDD_PHYS:
2098         case ASI_QUAD_LDD_PHYS_L:
2099         case ASI_NUCLEUS_QUAD_LDD:
2100         case ASI_NUCLEUS_QUAD_LDD_L:
2101             type = GET_ASI_DTWINX;
2102             break;
2103         case ASI_BLK_COMMIT_P:
2104         case ASI_BLK_COMMIT_S:
2105         case ASI_BLK_AIUP_4V:
2106         case ASI_BLK_AIUP_L_4V:
2107         case ASI_BLK_AIUP:
2108         case ASI_BLK_AIUPL:
2109         case ASI_BLK_AIUS_4V:
2110         case ASI_BLK_AIUS_L_4V:
2111         case ASI_BLK_AIUS:
2112         case ASI_BLK_AIUSL:
2113         case ASI_BLK_S:
2114         case ASI_BLK_SL:
2115         case ASI_BLK_P:
2116         case ASI_BLK_PL:
2117             type = GET_ASI_BLOCK;
2118             break;
2119         case ASI_FL8_S:
2120         case ASI_FL8_SL:
2121         case ASI_FL8_P:
2122         case ASI_FL8_PL:
2123             memop = MO_UB;
2124             type = GET_ASI_SHORT;
2125             break;
2126         case ASI_FL16_S:
2127         case ASI_FL16_SL:
2128         case ASI_FL16_P:
2129         case ASI_FL16_PL:
2130             memop = MO_TEUW;
2131             type = GET_ASI_SHORT;
2132             break;
2133         }
2134         /* The little-endian asis all have bit 3 set.  */
2135         if (asi & 8) {
2136             memop ^= MO_BSWAP;
2137         }
2138     }
2139 #endif
2140 
2141     return (DisasASI){ type, asi, mem_idx, memop };
2142 }
2143 
2144 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2145                        int insn, MemOp memop)
2146 {
2147     DisasASI da = get_asi(dc, insn, memop);
2148 
2149     switch (da.type) {
2150     case GET_ASI_EXCP:
2151         break;
2152     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2153         gen_exception(dc, TT_ILL_INSN);
2154         break;
2155     case GET_ASI_DIRECT:
2156         gen_address_mask(dc, addr);
2157         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2158         break;
2159     default:
2160         {
2161             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2162             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2163 
2164             save_state(dc);
2165 #ifdef TARGET_SPARC64
2166             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2167 #else
2168             {
2169                 TCGv_i64 t64 = tcg_temp_new_i64();
2170                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2171                 tcg_gen_trunc_i64_tl(dst, t64);
2172             }
2173 #endif
2174         }
2175         break;
2176     }
2177 }
2178 
2179 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2180                        int insn, MemOp memop)
2181 {
2182     DisasASI da = get_asi(dc, insn, memop);
2183 
2184     switch (da.type) {
2185     case GET_ASI_EXCP:
2186         break;
2187     case GET_ASI_DTWINX: /* Reserved for stda.  */
2188 #ifndef TARGET_SPARC64
2189         gen_exception(dc, TT_ILL_INSN);
2190         break;
2191 #else
2192         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2193             /* Pre OpenSPARC CPUs don't have these */
2194             gen_exception(dc, TT_ILL_INSN);
2195             return;
2196         }
2197         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2198          * are ST_BLKINIT_ ASIs */
2199 #endif
2200         /* fall through */
2201     case GET_ASI_DIRECT:
2202         gen_address_mask(dc, addr);
2203         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2204         break;
2205 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2206     case GET_ASI_BCOPY:
2207         /* Copy 32 bytes from the address in SRC to ADDR.  */
2208         /* ??? The original qemu code suggests 4-byte alignment, dropping
2209            the low bits, but the only place I can see this used is in the
2210            Linux kernel with 32 byte alignment, which would make more sense
2211            as a cacheline-style operation.  */
2212         {
2213             TCGv saddr = tcg_temp_new();
2214             TCGv daddr = tcg_temp_new();
2215             TCGv four = tcg_constant_tl(4);
2216             TCGv_i32 tmp = tcg_temp_new_i32();
2217             int i;
2218 
2219             tcg_gen_andi_tl(saddr, src, -4);
2220             tcg_gen_andi_tl(daddr, addr, -4);
2221             for (i = 0; i < 32; i += 4) {
2222                 /* Since the loads and stores are paired, allow the
2223                    copy to happen in the host endianness.  */
2224                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2225                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2226                 tcg_gen_add_tl(saddr, saddr, four);
2227                 tcg_gen_add_tl(daddr, daddr, four);
2228             }
2229         }
2230         break;
2231 #endif
2232     default:
2233         {
2234             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2235             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2236 
2237             save_state(dc);
2238 #ifdef TARGET_SPARC64
2239             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2240 #else
2241             {
2242                 TCGv_i64 t64 = tcg_temp_new_i64();
2243                 tcg_gen_extu_tl_i64(t64, src);
2244                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2245             }
2246 #endif
2247 
2248             /* A write to a TLB register may alter page maps.  End the TB. */
2249             dc->npc = DYNAMIC_PC;
2250         }
2251         break;
2252     }
2253 }
2254 
2255 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2256                          TCGv addr, int insn)
2257 {
2258     DisasASI da = get_asi(dc, insn, MO_TEUL);
2259 
2260     switch (da.type) {
2261     case GET_ASI_EXCP:
2262         break;
2263     case GET_ASI_DIRECT:
2264         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2265         break;
2266     default:
2267         /* ??? Should be DAE_invalid_asi.  */
2268         gen_exception(dc, TT_DATA_ACCESS);
2269         break;
2270     }
2271 }
2272 
2273 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2274                         int insn, int rd)
2275 {
2276     DisasASI da = get_asi(dc, insn, MO_TEUL);
2277     TCGv oldv;
2278 
2279     switch (da.type) {
2280     case GET_ASI_EXCP:
2281         return;
2282     case GET_ASI_DIRECT:
2283         oldv = tcg_temp_new();
2284         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2285                                   da.mem_idx, da.memop | MO_ALIGN);
2286         gen_store_gpr(dc, rd, oldv);
2287         break;
2288     default:
2289         /* ??? Should be DAE_invalid_asi.  */
2290         gen_exception(dc, TT_DATA_ACCESS);
2291         break;
2292     }
2293 }
2294 
2295 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2296 {
2297     DisasASI da = get_asi(dc, insn, MO_UB);
2298 
2299     switch (da.type) {
2300     case GET_ASI_EXCP:
2301         break;
2302     case GET_ASI_DIRECT:
2303         gen_ldstub(dc, dst, addr, da.mem_idx);
2304         break;
2305     default:
2306         /* ??? In theory, this should be raise DAE_invalid_asi.
2307            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2308         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2309             gen_helper_exit_atomic(cpu_env);
2310         } else {
2311             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2312             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2313             TCGv_i64 s64, t64;
2314 
2315             save_state(dc);
2316             t64 = tcg_temp_new_i64();
2317             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2318 
2319             s64 = tcg_constant_i64(0xff);
2320             gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2321 
2322             tcg_gen_trunc_i64_tl(dst, t64);
2323 
2324             /* End the TB.  */
2325             dc->npc = DYNAMIC_PC;
2326         }
2327         break;
2328     }
2329 }
2330 #endif
2331 
2332 #ifdef TARGET_SPARC64
2333 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2334                         int insn, int size, int rd)
2335 {
2336     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2337     TCGv_i32 d32;
2338     TCGv_i64 d64;
2339 
2340     switch (da.type) {
2341     case GET_ASI_EXCP:
2342         break;
2343 
2344     case GET_ASI_DIRECT:
2345         gen_address_mask(dc, addr);
2346         switch (size) {
2347         case 4:
2348             d32 = gen_dest_fpr_F(dc);
2349             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2350             gen_store_fpr_F(dc, rd, d32);
2351             break;
2352         case 8:
2353             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2354                                 da.memop | MO_ALIGN_4);
2355             break;
2356         case 16:
2357             d64 = tcg_temp_new_i64();
2358             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2359             tcg_gen_addi_tl(addr, addr, 8);
2360             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2361                                 da.memop | MO_ALIGN_4);
2362             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2363             break;
2364         default:
2365             g_assert_not_reached();
2366         }
2367         break;
2368 
2369     case GET_ASI_BLOCK:
2370         /* Valid for lddfa on aligned registers only.  */
2371         if (size == 8 && (rd & 7) == 0) {
2372             MemOp memop;
2373             TCGv eight;
2374             int i;
2375 
2376             gen_address_mask(dc, addr);
2377 
2378             /* The first operation checks required alignment.  */
2379             memop = da.memop | MO_ALIGN_64;
2380             eight = tcg_constant_tl(8);
2381             for (i = 0; ; ++i) {
2382                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2383                                     da.mem_idx, memop);
2384                 if (i == 7) {
2385                     break;
2386                 }
2387                 tcg_gen_add_tl(addr, addr, eight);
2388                 memop = da.memop;
2389             }
2390         } else {
2391             gen_exception(dc, TT_ILL_INSN);
2392         }
2393         break;
2394 
2395     case GET_ASI_SHORT:
2396         /* Valid for lddfa only.  */
2397         if (size == 8) {
2398             gen_address_mask(dc, addr);
2399             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2400                                 da.memop | MO_ALIGN);
2401         } else {
2402             gen_exception(dc, TT_ILL_INSN);
2403         }
2404         break;
2405 
2406     default:
2407         {
2408             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2409             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2410 
2411             save_state(dc);
2412             /* According to the table in the UA2011 manual, the only
2413                other asis that are valid for ldfa/lddfa/ldqfa are
2414                the NO_FAULT asis.  We still need a helper for these,
2415                but we can just use the integer asi helper for them.  */
2416             switch (size) {
2417             case 4:
2418                 d64 = tcg_temp_new_i64();
2419                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2420                 d32 = gen_dest_fpr_F(dc);
2421                 tcg_gen_extrl_i64_i32(d32, d64);
2422                 gen_store_fpr_F(dc, rd, d32);
2423                 break;
2424             case 8:
2425                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2426                 break;
2427             case 16:
2428                 d64 = tcg_temp_new_i64();
2429                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2430                 tcg_gen_addi_tl(addr, addr, 8);
2431                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2432                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2433                 break;
2434             default:
2435                 g_assert_not_reached();
2436             }
2437         }
2438         break;
2439     }
2440 }
2441 
2442 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2443                         int insn, int size, int rd)
2444 {
2445     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2446     TCGv_i32 d32;
2447 
2448     switch (da.type) {
2449     case GET_ASI_EXCP:
2450         break;
2451 
2452     case GET_ASI_DIRECT:
2453         gen_address_mask(dc, addr);
2454         switch (size) {
2455         case 4:
2456             d32 = gen_load_fpr_F(dc, rd);
2457             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2458             break;
2459         case 8:
2460             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2461                                 da.memop | MO_ALIGN_4);
2462             break;
2463         case 16:
2464             /* Only 4-byte alignment required.  However, it is legal for the
2465                cpu to signal the alignment fault, and the OS trap handler is
2466                required to fix it up.  Requiring 16-byte alignment here avoids
2467                having to probe the second page before performing the first
2468                write.  */
2469             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2470                                 da.memop | MO_ALIGN_16);
2471             tcg_gen_addi_tl(addr, addr, 8);
2472             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2473             break;
2474         default:
2475             g_assert_not_reached();
2476         }
2477         break;
2478 
2479     case GET_ASI_BLOCK:
2480         /* Valid for stdfa on aligned registers only.  */
2481         if (size == 8 && (rd & 7) == 0) {
2482             MemOp memop;
2483             TCGv eight;
2484             int i;
2485 
2486             gen_address_mask(dc, addr);
2487 
2488             /* The first operation checks required alignment.  */
2489             memop = da.memop | MO_ALIGN_64;
2490             eight = tcg_constant_tl(8);
2491             for (i = 0; ; ++i) {
2492                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2493                                     da.mem_idx, memop);
2494                 if (i == 7) {
2495                     break;
2496                 }
2497                 tcg_gen_add_tl(addr, addr, eight);
2498                 memop = da.memop;
2499             }
2500         } else {
2501             gen_exception(dc, TT_ILL_INSN);
2502         }
2503         break;
2504 
2505     case GET_ASI_SHORT:
2506         /* Valid for stdfa only.  */
2507         if (size == 8) {
2508             gen_address_mask(dc, addr);
2509             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2510                                 da.memop | MO_ALIGN);
2511         } else {
2512             gen_exception(dc, TT_ILL_INSN);
2513         }
2514         break;
2515 
2516     default:
2517         /* According to the table in the UA2011 manual, the only
2518            other asis that are valid for ldfa/lddfa/ldqfa are
2519            the PST* asis, which aren't currently handled.  */
2520         gen_exception(dc, TT_ILL_INSN);
2521         break;
2522     }
2523 }
2524 
2525 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2526 {
2527     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2528     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2529     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2530 
2531     switch (da.type) {
2532     case GET_ASI_EXCP:
2533         return;
2534 
2535     case GET_ASI_DTWINX:
2536         gen_address_mask(dc, addr);
2537         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2538         tcg_gen_addi_tl(addr, addr, 8);
2539         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2540         break;
2541 
2542     case GET_ASI_DIRECT:
2543         {
2544             TCGv_i64 tmp = tcg_temp_new_i64();
2545 
2546             gen_address_mask(dc, addr);
2547             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2548 
2549             /* Note that LE ldda acts as if each 32-bit register
2550                result is byte swapped.  Having just performed one
2551                64-bit bswap, we need now to swap the writebacks.  */
2552             if ((da.memop & MO_BSWAP) == MO_TE) {
2553                 tcg_gen_extr32_i64(lo, hi, tmp);
2554             } else {
2555                 tcg_gen_extr32_i64(hi, lo, tmp);
2556             }
2557         }
2558         break;
2559 
2560     default:
2561         /* ??? In theory we've handled all of the ASIs that are valid
2562            for ldda, and this should raise DAE_invalid_asi.  However,
2563            real hardware allows others.  This can be seen with e.g.
2564            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2565         {
2566             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2567             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2568             TCGv_i64 tmp = tcg_temp_new_i64();
2569 
2570             save_state(dc);
2571             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2572 
2573             /* See above.  */
2574             if ((da.memop & MO_BSWAP) == MO_TE) {
2575                 tcg_gen_extr32_i64(lo, hi, tmp);
2576             } else {
2577                 tcg_gen_extr32_i64(hi, lo, tmp);
2578             }
2579         }
2580         break;
2581     }
2582 
2583     gen_store_gpr(dc, rd, hi);
2584     gen_store_gpr(dc, rd + 1, lo);
2585 }
2586 
2587 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2588                          int insn, int rd)
2589 {
2590     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2591     TCGv lo = gen_load_gpr(dc, rd + 1);
2592 
2593     switch (da.type) {
2594     case GET_ASI_EXCP:
2595         break;
2596 
2597     case GET_ASI_DTWINX:
2598         gen_address_mask(dc, addr);
2599         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2600         tcg_gen_addi_tl(addr, addr, 8);
2601         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2602         break;
2603 
2604     case GET_ASI_DIRECT:
2605         {
2606             TCGv_i64 t64 = tcg_temp_new_i64();
2607 
2608             /* Note that LE stda acts as if each 32-bit register result is
2609                byte swapped.  We will perform one 64-bit LE store, so now
2610                we must swap the order of the construction.  */
2611             if ((da.memop & MO_BSWAP) == MO_TE) {
2612                 tcg_gen_concat32_i64(t64, lo, hi);
2613             } else {
2614                 tcg_gen_concat32_i64(t64, hi, lo);
2615             }
2616             gen_address_mask(dc, addr);
2617             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2618         }
2619         break;
2620 
2621     default:
2622         /* ??? In theory we've handled all of the ASIs that are valid
2623            for stda, and this should raise DAE_invalid_asi.  */
2624         {
2625             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2626             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2627             TCGv_i64 t64 = tcg_temp_new_i64();
2628 
2629             /* See above.  */
2630             if ((da.memop & MO_BSWAP) == MO_TE) {
2631                 tcg_gen_concat32_i64(t64, lo, hi);
2632             } else {
2633                 tcg_gen_concat32_i64(t64, hi, lo);
2634             }
2635 
2636             save_state(dc);
2637             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2638         }
2639         break;
2640     }
2641 }
2642 
2643 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2644                          int insn, int rd)
2645 {
2646     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2647     TCGv oldv;
2648 
2649     switch (da.type) {
2650     case GET_ASI_EXCP:
2651         return;
2652     case GET_ASI_DIRECT:
2653         oldv = tcg_temp_new();
2654         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2655                                   da.mem_idx, da.memop | MO_ALIGN);
2656         gen_store_gpr(dc, rd, oldv);
2657         break;
2658     default:
2659         /* ??? Should be DAE_invalid_asi.  */
2660         gen_exception(dc, TT_DATA_ACCESS);
2661         break;
2662     }
2663 }
2664 
2665 #elif !defined(CONFIG_USER_ONLY)
2666 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2667 {
2668     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2669        whereby "rd + 1" elicits "error: array subscript is above array".
2670        Since we have already asserted that rd is even, the semantics
2671        are unchanged.  */
2672     TCGv lo = gen_dest_gpr(dc, rd | 1);
2673     TCGv hi = gen_dest_gpr(dc, rd);
2674     TCGv_i64 t64 = tcg_temp_new_i64();
2675     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2676 
2677     switch (da.type) {
2678     case GET_ASI_EXCP:
2679         return;
2680     case GET_ASI_DIRECT:
2681         gen_address_mask(dc, addr);
2682         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2683         break;
2684     default:
2685         {
2686             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2687             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2688 
2689             save_state(dc);
2690             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2691         }
2692         break;
2693     }
2694 
2695     tcg_gen_extr_i64_i32(lo, hi, t64);
2696     gen_store_gpr(dc, rd | 1, lo);
2697     gen_store_gpr(dc, rd, hi);
2698 }
2699 
2700 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2701                          int insn, int rd)
2702 {
2703     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2704     TCGv lo = gen_load_gpr(dc, rd + 1);
2705     TCGv_i64 t64 = tcg_temp_new_i64();
2706 
2707     tcg_gen_concat_tl_i64(t64, lo, hi);
2708 
2709     switch (da.type) {
2710     case GET_ASI_EXCP:
2711         break;
2712     case GET_ASI_DIRECT:
2713         gen_address_mask(dc, addr);
2714         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2715         break;
2716     case GET_ASI_BFILL:
2717         /* Store 32 bytes of T64 to ADDR.  */
2718         /* ??? The original qemu code suggests 8-byte alignment, dropping
2719            the low bits, but the only place I can see this used is in the
2720            Linux kernel with 32 byte alignment, which would make more sense
2721            as a cacheline-style operation.  */
2722         {
2723             TCGv d_addr = tcg_temp_new();
2724             TCGv eight = tcg_constant_tl(8);
2725             int i;
2726 
2727             tcg_gen_andi_tl(d_addr, addr, -8);
2728             for (i = 0; i < 32; i += 8) {
2729                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2730                 tcg_gen_add_tl(d_addr, d_addr, eight);
2731             }
2732         }
2733         break;
2734     default:
2735         {
2736             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2737             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2738 
2739             save_state(dc);
2740             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2741         }
2742         break;
2743     }
2744 }
2745 #endif
2746 
2747 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2748 {
2749     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2750     return gen_load_gpr(dc, rs1);
2751 }
2752 
2753 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2754 {
2755     if (IS_IMM) { /* immediate */
2756         target_long simm = GET_FIELDs(insn, 19, 31);
2757         TCGv t = tcg_temp_new();
2758         tcg_gen_movi_tl(t, simm);
2759         return t;
2760     } else {      /* register */
2761         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2762         return gen_load_gpr(dc, rs2);
2763     }
2764 }
2765 
2766 #ifdef TARGET_SPARC64
2767 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2768 {
2769     TCGv_i32 c32, zero, dst, s1, s2;
2770 
2771     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2772        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2773        the later.  */
2774     c32 = tcg_temp_new_i32();
2775     if (cmp->is_bool) {
2776         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2777     } else {
2778         TCGv_i64 c64 = tcg_temp_new_i64();
2779         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2780         tcg_gen_extrl_i64_i32(c32, c64);
2781     }
2782 
2783     s1 = gen_load_fpr_F(dc, rs);
2784     s2 = gen_load_fpr_F(dc, rd);
2785     dst = gen_dest_fpr_F(dc);
2786     zero = tcg_constant_i32(0);
2787 
2788     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2789 
2790     gen_store_fpr_F(dc, rd, dst);
2791 }
2792 
2793 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2794 {
2795     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2796     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2797                         gen_load_fpr_D(dc, rs),
2798                         gen_load_fpr_D(dc, rd));
2799     gen_store_fpr_D(dc, rd, dst);
2800 }
2801 
2802 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2803 {
2804     int qd = QFPREG(rd);
2805     int qs = QFPREG(rs);
2806 
2807     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2808                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2809     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2810                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2811 
2812     gen_update_fprs_dirty(dc, qd);
2813 }
2814 
2815 #ifndef CONFIG_USER_ONLY
2816 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2817 {
2818     TCGv_i32 r_tl = tcg_temp_new_i32();
2819 
2820     /* load env->tl into r_tl */
2821     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2822 
2823     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2824     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2825 
2826     /* calculate offset to current trap state from env->ts, reuse r_tl */
2827     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2828     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2829 
2830     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2831     {
2832         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2833         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2834         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2835     }
2836 }
2837 #endif
2838 
2839 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2840                      int width, bool cc, bool left)
2841 {
2842     TCGv lo1, lo2;
2843     uint64_t amask, tabl, tabr;
2844     int shift, imask, omask;
2845 
2846     if (cc) {
2847         tcg_gen_mov_tl(cpu_cc_src, s1);
2848         tcg_gen_mov_tl(cpu_cc_src2, s2);
2849         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2850         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2851         dc->cc_op = CC_OP_SUB;
2852     }
2853 
2854     /* Theory of operation: there are two tables, left and right (not to
2855        be confused with the left and right versions of the opcode).  These
2856        are indexed by the low 3 bits of the inputs.  To make things "easy",
2857        these tables are loaded into two constants, TABL and TABR below.
2858        The operation index = (input & imask) << shift calculates the index
2859        into the constant, while val = (table >> index) & omask calculates
2860        the value we're looking for.  */
2861     switch (width) {
2862     case 8:
2863         imask = 0x7;
2864         shift = 3;
2865         omask = 0xff;
2866         if (left) {
2867             tabl = 0x80c0e0f0f8fcfeffULL;
2868             tabr = 0xff7f3f1f0f070301ULL;
2869         } else {
2870             tabl = 0x0103070f1f3f7fffULL;
2871             tabr = 0xfffefcf8f0e0c080ULL;
2872         }
2873         break;
2874     case 16:
2875         imask = 0x6;
2876         shift = 1;
2877         omask = 0xf;
2878         if (left) {
2879             tabl = 0x8cef;
2880             tabr = 0xf731;
2881         } else {
2882             tabl = 0x137f;
2883             tabr = 0xfec8;
2884         }
2885         break;
2886     case 32:
2887         imask = 0x4;
2888         shift = 0;
2889         omask = 0x3;
2890         if (left) {
2891             tabl = (2 << 2) | 3;
2892             tabr = (3 << 2) | 1;
2893         } else {
2894             tabl = (1 << 2) | 3;
2895             tabr = (3 << 2) | 2;
2896         }
2897         break;
2898     default:
2899         abort();
2900     }
2901 
2902     lo1 = tcg_temp_new();
2903     lo2 = tcg_temp_new();
2904     tcg_gen_andi_tl(lo1, s1, imask);
2905     tcg_gen_andi_tl(lo2, s2, imask);
2906     tcg_gen_shli_tl(lo1, lo1, shift);
2907     tcg_gen_shli_tl(lo2, lo2, shift);
2908 
2909     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2910     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2911     tcg_gen_andi_tl(dst, lo1, omask);
2912     tcg_gen_andi_tl(lo2, lo2, omask);
2913 
2914     amask = -8;
2915     if (AM_CHECK(dc)) {
2916         amask &= 0xffffffffULL;
2917     }
2918     tcg_gen_andi_tl(s1, s1, amask);
2919     tcg_gen_andi_tl(s2, s2, amask);
2920 
2921     /* We want to compute
2922         dst = (s1 == s2 ? lo1 : lo1 & lo2).
2923        We've already done dst = lo1, so this reduces to
2924         dst &= (s1 == s2 ? -1 : lo2)
2925        Which we perform by
2926         lo2 |= -(s1 == s2)
2927         dst &= lo2
2928     */
2929     tcg_gen_setcond_tl(TCG_COND_EQ, lo1, s1, s2);
2930     tcg_gen_neg_tl(lo1, lo1);
2931     tcg_gen_or_tl(lo2, lo2, lo1);
2932     tcg_gen_and_tl(dst, dst, lo2);
2933 }
2934 
2935 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2936 {
2937     TCGv tmp = tcg_temp_new();
2938 
2939     tcg_gen_add_tl(tmp, s1, s2);
2940     tcg_gen_andi_tl(dst, tmp, -8);
2941     if (left) {
2942         tcg_gen_neg_tl(tmp, tmp);
2943     }
2944     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2945 }
2946 
2947 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2948 {
2949     TCGv t1, t2, shift;
2950 
2951     t1 = tcg_temp_new();
2952     t2 = tcg_temp_new();
2953     shift = tcg_temp_new();
2954 
2955     tcg_gen_andi_tl(shift, gsr, 7);
2956     tcg_gen_shli_tl(shift, shift, 3);
2957     tcg_gen_shl_tl(t1, s1, shift);
2958 
2959     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2960        shift of (up to 63) followed by a constant shift of 1.  */
2961     tcg_gen_xori_tl(shift, shift, 63);
2962     tcg_gen_shr_tl(t2, s2, shift);
2963     tcg_gen_shri_tl(t2, t2, 1);
2964 
2965     tcg_gen_or_tl(dst, t1, t2);
2966 }
2967 #endif
2968 
2969 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
2970     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2971         goto illegal_insn;
2972 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
2973     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
2974         goto nfpu_insn;
2975 
2976 /* before an instruction, dc->pc must be static */
2977 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
2978 {
2979     unsigned int opc, rs1, rs2, rd;
2980     TCGv cpu_src1, cpu_src2;
2981     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
2982     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
2983     target_long simm;
2984 
2985     opc = GET_FIELD(insn, 0, 1);
2986     rd = GET_FIELD(insn, 2, 6);
2987 
2988     switch (opc) {
2989     case 0:                     /* branches/sethi */
2990         {
2991             unsigned int xop = GET_FIELD(insn, 7, 9);
2992             int32_t target;
2993             switch (xop) {
2994 #ifdef TARGET_SPARC64
2995             case 0x1:           /* V9 BPcc */
2996                 {
2997                     int cc;
2998 
2999                     target = GET_FIELD_SP(insn, 0, 18);
3000                     target = sign_extend(target, 19);
3001                     target <<= 2;
3002                     cc = GET_FIELD_SP(insn, 20, 21);
3003                     if (cc == 0)
3004                         do_branch(dc, target, insn, 0);
3005                     else if (cc == 2)
3006                         do_branch(dc, target, insn, 1);
3007                     else
3008                         goto illegal_insn;
3009                     goto jmp_insn;
3010                 }
3011             case 0x3:           /* V9 BPr */
3012                 {
3013                     target = GET_FIELD_SP(insn, 0, 13) |
3014                         (GET_FIELD_SP(insn, 20, 21) << 14);
3015                     target = sign_extend(target, 16);
3016                     target <<= 2;
3017                     cpu_src1 = get_src1(dc, insn);
3018                     do_branch_reg(dc, target, insn, cpu_src1);
3019                     goto jmp_insn;
3020                 }
3021             case 0x5:           /* V9 FBPcc */
3022                 {
3023                     int cc = GET_FIELD_SP(insn, 20, 21);
3024                     if (gen_trap_ifnofpu(dc)) {
3025                         goto jmp_insn;
3026                     }
3027                     target = GET_FIELD_SP(insn, 0, 18);
3028                     target = sign_extend(target, 19);
3029                     target <<= 2;
3030                     do_fbranch(dc, target, insn, cc);
3031                     goto jmp_insn;
3032                 }
3033 #else
3034             case 0x7:           /* CBN+x */
3035                 {
3036                     goto ncp_insn;
3037                 }
3038 #endif
3039             case 0x2:           /* BN+x */
3040                 {
3041                     target = GET_FIELD(insn, 10, 31);
3042                     target = sign_extend(target, 22);
3043                     target <<= 2;
3044                     do_branch(dc, target, insn, 0);
3045                     goto jmp_insn;
3046                 }
3047             case 0x6:           /* FBN+x */
3048                 {
3049                     if (gen_trap_ifnofpu(dc)) {
3050                         goto jmp_insn;
3051                     }
3052                     target = GET_FIELD(insn, 10, 31);
3053                     target = sign_extend(target, 22);
3054                     target <<= 2;
3055                     do_fbranch(dc, target, insn, 0);
3056                     goto jmp_insn;
3057                 }
3058             case 0x4:           /* SETHI */
3059                 /* Special-case %g0 because that's the canonical nop.  */
3060                 if (rd) {
3061                     uint32_t value = GET_FIELD(insn, 10, 31);
3062                     TCGv t = gen_dest_gpr(dc, rd);
3063                     tcg_gen_movi_tl(t, value << 10);
3064                     gen_store_gpr(dc, rd, t);
3065                 }
3066                 break;
3067             case 0x0:           /* UNIMPL */
3068             default:
3069                 goto illegal_insn;
3070             }
3071             break;
3072         }
3073         break;
3074     case 1:                     /*CALL*/
3075         {
3076             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3077             TCGv o7 = gen_dest_gpr(dc, 15);
3078 
3079             tcg_gen_movi_tl(o7, dc->pc);
3080             gen_store_gpr(dc, 15, o7);
3081             target += dc->pc;
3082             gen_mov_pc_npc(dc);
3083 #ifdef TARGET_SPARC64
3084             if (unlikely(AM_CHECK(dc))) {
3085                 target &= 0xffffffffULL;
3086             }
3087 #endif
3088             dc->npc = target;
3089         }
3090         goto jmp_insn;
3091     case 2:                     /* FPU & Logical Operations */
3092         {
3093             unsigned int xop = GET_FIELD(insn, 7, 12);
3094             TCGv cpu_dst = tcg_temp_new();
3095             TCGv cpu_tmp0;
3096 
3097             if (xop == 0x3a) {  /* generate trap */
3098                 int cond = GET_FIELD(insn, 3, 6);
3099                 TCGv_i32 trap;
3100                 TCGLabel *l1 = NULL;
3101                 int mask;
3102 
3103                 if (cond == 0) {
3104                     /* Trap never.  */
3105                     break;
3106                 }
3107 
3108                 save_state(dc);
3109 
3110                 if (cond != 8) {
3111                     /* Conditional trap.  */
3112                     DisasCompare cmp;
3113 #ifdef TARGET_SPARC64
3114                     /* V9 icc/xcc */
3115                     int cc = GET_FIELD_SP(insn, 11, 12);
3116                     if (cc == 0) {
3117                         gen_compare(&cmp, 0, cond, dc);
3118                     } else if (cc == 2) {
3119                         gen_compare(&cmp, 1, cond, dc);
3120                     } else {
3121                         goto illegal_insn;
3122                     }
3123 #else
3124                     gen_compare(&cmp, 0, cond, dc);
3125 #endif
3126                     l1 = gen_new_label();
3127                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3128                                       cmp.c1, cmp.c2, l1);
3129                 }
3130 
3131                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3132                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3133 
3134                 /* Don't use the normal temporaries, as they may well have
3135                    gone out of scope with the branch above.  While we're
3136                    doing that we might as well pre-truncate to 32-bit.  */
3137                 trap = tcg_temp_new_i32();
3138 
3139                 rs1 = GET_FIELD_SP(insn, 14, 18);
3140                 if (IS_IMM) {
3141                     rs2 = GET_FIELD_SP(insn, 0, 7);
3142                     if (rs1 == 0) {
3143                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3144                         /* Signal that the trap value is fully constant.  */
3145                         mask = 0;
3146                     } else {
3147                         TCGv t1 = gen_load_gpr(dc, rs1);
3148                         tcg_gen_trunc_tl_i32(trap, t1);
3149                         tcg_gen_addi_i32(trap, trap, rs2);
3150                     }
3151                 } else {
3152                     TCGv t1, t2;
3153                     rs2 = GET_FIELD_SP(insn, 0, 4);
3154                     t1 = gen_load_gpr(dc, rs1);
3155                     t2 = gen_load_gpr(dc, rs2);
3156                     tcg_gen_add_tl(t1, t1, t2);
3157                     tcg_gen_trunc_tl_i32(trap, t1);
3158                 }
3159                 if (mask != 0) {
3160                     tcg_gen_andi_i32(trap, trap, mask);
3161                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3162                 }
3163 
3164                 gen_helper_raise_exception(cpu_env, trap);
3165 
3166                 if (cond == 8) {
3167                     /* An unconditional trap ends the TB.  */
3168                     dc->base.is_jmp = DISAS_NORETURN;
3169                     goto jmp_insn;
3170                 } else {
3171                     /* A conditional trap falls through to the next insn.  */
3172                     gen_set_label(l1);
3173                     break;
3174                 }
3175             } else if (xop == 0x28) {
3176                 rs1 = GET_FIELD(insn, 13, 17);
3177                 switch(rs1) {
3178                 case 0: /* rdy */
3179 #ifndef TARGET_SPARC64
3180                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3181                                        manual, rdy on the microSPARC
3182                                        II */
3183                 case 0x0f:          /* stbar in the SPARCv8 manual,
3184                                        rdy on the microSPARC II */
3185                 case 0x10 ... 0x1f: /* implementation-dependent in the
3186                                        SPARCv8 manual, rdy on the
3187                                        microSPARC II */
3188                     /* Read Asr17 */
3189                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3190                         TCGv t = gen_dest_gpr(dc, rd);
3191                         /* Read Asr17 for a Leon3 monoprocessor */
3192                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3193                         gen_store_gpr(dc, rd, t);
3194                         break;
3195                     }
3196 #endif
3197                     gen_store_gpr(dc, rd, cpu_y);
3198                     break;
3199 #ifdef TARGET_SPARC64
3200                 case 0x2: /* V9 rdccr */
3201                     update_psr(dc);
3202                     gen_helper_rdccr(cpu_dst, cpu_env);
3203                     gen_store_gpr(dc, rd, cpu_dst);
3204                     break;
3205                 case 0x3: /* V9 rdasi */
3206                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3207                     gen_store_gpr(dc, rd, cpu_dst);
3208                     break;
3209                 case 0x4: /* V9 rdtick */
3210                     {
3211                         TCGv_ptr r_tickptr;
3212                         TCGv_i32 r_const;
3213 
3214                         r_tickptr = tcg_temp_new_ptr();
3215                         r_const = tcg_constant_i32(dc->mem_idx);
3216                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3217                                        offsetof(CPUSPARCState, tick));
3218                         if (translator_io_start(&dc->base)) {
3219                             dc->base.is_jmp = DISAS_EXIT;
3220                         }
3221                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3222                                                   r_const);
3223                         gen_store_gpr(dc, rd, cpu_dst);
3224                     }
3225                     break;
3226                 case 0x5: /* V9 rdpc */
3227                     {
3228                         TCGv t = gen_dest_gpr(dc, rd);
3229                         if (unlikely(AM_CHECK(dc))) {
3230                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3231                         } else {
3232                             tcg_gen_movi_tl(t, dc->pc);
3233                         }
3234                         gen_store_gpr(dc, rd, t);
3235                     }
3236                     break;
3237                 case 0x6: /* V9 rdfprs */
3238                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3239                     gen_store_gpr(dc, rd, cpu_dst);
3240                     break;
3241                 case 0xf: /* V9 membar */
3242                     break; /* no effect */
3243                 case 0x13: /* Graphics Status */
3244                     if (gen_trap_ifnofpu(dc)) {
3245                         goto jmp_insn;
3246                     }
3247                     gen_store_gpr(dc, rd, cpu_gsr);
3248                     break;
3249                 case 0x16: /* Softint */
3250                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3251                                      offsetof(CPUSPARCState, softint));
3252                     gen_store_gpr(dc, rd, cpu_dst);
3253                     break;
3254                 case 0x17: /* Tick compare */
3255                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3256                     break;
3257                 case 0x18: /* System tick */
3258                     {
3259                         TCGv_ptr r_tickptr;
3260                         TCGv_i32 r_const;
3261 
3262                         r_tickptr = tcg_temp_new_ptr();
3263                         r_const = tcg_constant_i32(dc->mem_idx);
3264                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3265                                        offsetof(CPUSPARCState, stick));
3266                         if (translator_io_start(&dc->base)) {
3267                             dc->base.is_jmp = DISAS_EXIT;
3268                         }
3269                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3270                                                   r_const);
3271                         gen_store_gpr(dc, rd, cpu_dst);
3272                     }
3273                     break;
3274                 case 0x19: /* System tick compare */
3275                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3276                     break;
3277                 case 0x1a: /* UltraSPARC-T1 Strand status */
3278                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3279                      * this ASR as impl. dep
3280                      */
3281                     CHECK_IU_FEATURE(dc, HYPV);
3282                     {
3283                         TCGv t = gen_dest_gpr(dc, rd);
3284                         tcg_gen_movi_tl(t, 1UL);
3285                         gen_store_gpr(dc, rd, t);
3286                     }
3287                     break;
3288                 case 0x10: /* Performance Control */
3289                 case 0x11: /* Performance Instrumentation Counter */
3290                 case 0x12: /* Dispatch Control */
3291                 case 0x14: /* Softint set, WO */
3292                 case 0x15: /* Softint clear, WO */
3293 #endif
3294                 default:
3295                     goto illegal_insn;
3296                 }
3297 #if !defined(CONFIG_USER_ONLY)
3298             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3299 #ifndef TARGET_SPARC64
3300                 if (!supervisor(dc)) {
3301                     goto priv_insn;
3302                 }
3303                 update_psr(dc);
3304                 gen_helper_rdpsr(cpu_dst, cpu_env);
3305 #else
3306                 CHECK_IU_FEATURE(dc, HYPV);
3307                 if (!hypervisor(dc))
3308                     goto priv_insn;
3309                 rs1 = GET_FIELD(insn, 13, 17);
3310                 switch (rs1) {
3311                 case 0: // hpstate
3312                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3313                                    offsetof(CPUSPARCState, hpstate));
3314                     break;
3315                 case 1: // htstate
3316                     // gen_op_rdhtstate();
3317                     break;
3318                 case 3: // hintp
3319                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3320                     break;
3321                 case 5: // htba
3322                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3323                     break;
3324                 case 6: // hver
3325                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3326                     break;
3327                 case 31: // hstick_cmpr
3328                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3329                     break;
3330                 default:
3331                     goto illegal_insn;
3332                 }
3333 #endif
3334                 gen_store_gpr(dc, rd, cpu_dst);
3335                 break;
3336             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3337                 if (!supervisor(dc)) {
3338                     goto priv_insn;
3339                 }
3340                 cpu_tmp0 = tcg_temp_new();
3341 #ifdef TARGET_SPARC64
3342                 rs1 = GET_FIELD(insn, 13, 17);
3343                 switch (rs1) {
3344                 case 0: // tpc
3345                     {
3346                         TCGv_ptr r_tsptr;
3347 
3348                         r_tsptr = tcg_temp_new_ptr();
3349                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3350                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3351                                       offsetof(trap_state, tpc));
3352                     }
3353                     break;
3354                 case 1: // tnpc
3355                     {
3356                         TCGv_ptr r_tsptr;
3357 
3358                         r_tsptr = tcg_temp_new_ptr();
3359                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3360                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3361                                       offsetof(trap_state, tnpc));
3362                     }
3363                     break;
3364                 case 2: // tstate
3365                     {
3366                         TCGv_ptr r_tsptr;
3367 
3368                         r_tsptr = tcg_temp_new_ptr();
3369                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3370                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3371                                       offsetof(trap_state, tstate));
3372                     }
3373                     break;
3374                 case 3: // tt
3375                     {
3376                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3377 
3378                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3379                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3380                                          offsetof(trap_state, tt));
3381                     }
3382                     break;
3383                 case 4: // tick
3384                     {
3385                         TCGv_ptr r_tickptr;
3386                         TCGv_i32 r_const;
3387 
3388                         r_tickptr = tcg_temp_new_ptr();
3389                         r_const = tcg_constant_i32(dc->mem_idx);
3390                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3391                                        offsetof(CPUSPARCState, tick));
3392                         if (translator_io_start(&dc->base)) {
3393                             dc->base.is_jmp = DISAS_EXIT;
3394                         }
3395                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3396                                                   r_tickptr, r_const);
3397                     }
3398                     break;
3399                 case 5: // tba
3400                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3401                     break;
3402                 case 6: // pstate
3403                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3404                                      offsetof(CPUSPARCState, pstate));
3405                     break;
3406                 case 7: // tl
3407                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3408                                      offsetof(CPUSPARCState, tl));
3409                     break;
3410                 case 8: // pil
3411                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3412                                      offsetof(CPUSPARCState, psrpil));
3413                     break;
3414                 case 9: // cwp
3415                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3416                     break;
3417                 case 10: // cansave
3418                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3419                                      offsetof(CPUSPARCState, cansave));
3420                     break;
3421                 case 11: // canrestore
3422                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3423                                      offsetof(CPUSPARCState, canrestore));
3424                     break;
3425                 case 12: // cleanwin
3426                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3427                                      offsetof(CPUSPARCState, cleanwin));
3428                     break;
3429                 case 13: // otherwin
3430                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3431                                      offsetof(CPUSPARCState, otherwin));
3432                     break;
3433                 case 14: // wstate
3434                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3435                                      offsetof(CPUSPARCState, wstate));
3436                     break;
3437                 case 16: // UA2005 gl
3438                     CHECK_IU_FEATURE(dc, GL);
3439                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3440                                      offsetof(CPUSPARCState, gl));
3441                     break;
3442                 case 26: // UA2005 strand status
3443                     CHECK_IU_FEATURE(dc, HYPV);
3444                     if (!hypervisor(dc))
3445                         goto priv_insn;
3446                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3447                     break;
3448                 case 31: // ver
3449                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3450                     break;
3451                 case 15: // fq
3452                 default:
3453                     goto illegal_insn;
3454                 }
3455 #else
3456                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3457 #endif
3458                 gen_store_gpr(dc, rd, cpu_tmp0);
3459                 break;
3460 #endif
3461 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3462             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3463 #ifdef TARGET_SPARC64
3464                 gen_helper_flushw(cpu_env);
3465 #else
3466                 if (!supervisor(dc))
3467                     goto priv_insn;
3468                 gen_store_gpr(dc, rd, cpu_tbr);
3469 #endif
3470                 break;
3471 #endif
3472             } else if (xop == 0x34) {   /* FPU Operations */
3473                 if (gen_trap_ifnofpu(dc)) {
3474                     goto jmp_insn;
3475                 }
3476                 gen_op_clear_ieee_excp_and_FTT();
3477                 rs1 = GET_FIELD(insn, 13, 17);
3478                 rs2 = GET_FIELD(insn, 27, 31);
3479                 xop = GET_FIELD(insn, 18, 26);
3480 
3481                 switch (xop) {
3482                 case 0x1: /* fmovs */
3483                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3484                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3485                     break;
3486                 case 0x5: /* fnegs */
3487                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3488                     break;
3489                 case 0x9: /* fabss */
3490                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3491                     break;
3492                 case 0x29: /* fsqrts */
3493                     CHECK_FPU_FEATURE(dc, FSQRT);
3494                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3495                     break;
3496                 case 0x2a: /* fsqrtd */
3497                     CHECK_FPU_FEATURE(dc, FSQRT);
3498                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3499                     break;
3500                 case 0x2b: /* fsqrtq */
3501                     CHECK_FPU_FEATURE(dc, FLOAT128);
3502                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3503                     break;
3504                 case 0x41: /* fadds */
3505                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3506                     break;
3507                 case 0x42: /* faddd */
3508                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3509                     break;
3510                 case 0x43: /* faddq */
3511                     CHECK_FPU_FEATURE(dc, FLOAT128);
3512                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3513                     break;
3514                 case 0x45: /* fsubs */
3515                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3516                     break;
3517                 case 0x46: /* fsubd */
3518                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3519                     break;
3520                 case 0x47: /* fsubq */
3521                     CHECK_FPU_FEATURE(dc, FLOAT128);
3522                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3523                     break;
3524                 case 0x49: /* fmuls */
3525                     CHECK_FPU_FEATURE(dc, FMUL);
3526                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3527                     break;
3528                 case 0x4a: /* fmuld */
3529                     CHECK_FPU_FEATURE(dc, FMUL);
3530                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3531                     break;
3532                 case 0x4b: /* fmulq */
3533                     CHECK_FPU_FEATURE(dc, FLOAT128);
3534                     CHECK_FPU_FEATURE(dc, FMUL);
3535                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3536                     break;
3537                 case 0x4d: /* fdivs */
3538                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3539                     break;
3540                 case 0x4e: /* fdivd */
3541                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3542                     break;
3543                 case 0x4f: /* fdivq */
3544                     CHECK_FPU_FEATURE(dc, FLOAT128);
3545                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3546                     break;
3547                 case 0x69: /* fsmuld */
3548                     CHECK_FPU_FEATURE(dc, FSMULD);
3549                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3550                     break;
3551                 case 0x6e: /* fdmulq */
3552                     CHECK_FPU_FEATURE(dc, FLOAT128);
3553                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3554                     break;
3555                 case 0xc4: /* fitos */
3556                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3557                     break;
3558                 case 0xc6: /* fdtos */
3559                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3560                     break;
3561                 case 0xc7: /* fqtos */
3562                     CHECK_FPU_FEATURE(dc, FLOAT128);
3563                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3564                     break;
3565                 case 0xc8: /* fitod */
3566                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3567                     break;
3568                 case 0xc9: /* fstod */
3569                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3570                     break;
3571                 case 0xcb: /* fqtod */
3572                     CHECK_FPU_FEATURE(dc, FLOAT128);
3573                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3574                     break;
3575                 case 0xcc: /* fitoq */
3576                     CHECK_FPU_FEATURE(dc, FLOAT128);
3577                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3578                     break;
3579                 case 0xcd: /* fstoq */
3580                     CHECK_FPU_FEATURE(dc, FLOAT128);
3581                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3582                     break;
3583                 case 0xce: /* fdtoq */
3584                     CHECK_FPU_FEATURE(dc, FLOAT128);
3585                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3586                     break;
3587                 case 0xd1: /* fstoi */
3588                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3589                     break;
3590                 case 0xd2: /* fdtoi */
3591                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3592                     break;
3593                 case 0xd3: /* fqtoi */
3594                     CHECK_FPU_FEATURE(dc, FLOAT128);
3595                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3596                     break;
3597 #ifdef TARGET_SPARC64
3598                 case 0x2: /* V9 fmovd */
3599                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3600                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3601                     break;
3602                 case 0x3: /* V9 fmovq */
3603                     CHECK_FPU_FEATURE(dc, FLOAT128);
3604                     gen_move_Q(dc, rd, rs2);
3605                     break;
3606                 case 0x6: /* V9 fnegd */
3607                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3608                     break;
3609                 case 0x7: /* V9 fnegq */
3610                     CHECK_FPU_FEATURE(dc, FLOAT128);
3611                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3612                     break;
3613                 case 0xa: /* V9 fabsd */
3614                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3615                     break;
3616                 case 0xb: /* V9 fabsq */
3617                     CHECK_FPU_FEATURE(dc, FLOAT128);
3618                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3619                     break;
3620                 case 0x81: /* V9 fstox */
3621                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3622                     break;
3623                 case 0x82: /* V9 fdtox */
3624                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3625                     break;
3626                 case 0x83: /* V9 fqtox */
3627                     CHECK_FPU_FEATURE(dc, FLOAT128);
3628                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3629                     break;
3630                 case 0x84: /* V9 fxtos */
3631                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3632                     break;
3633                 case 0x88: /* V9 fxtod */
3634                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3635                     break;
3636                 case 0x8c: /* V9 fxtoq */
3637                     CHECK_FPU_FEATURE(dc, FLOAT128);
3638                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3639                     break;
3640 #endif
3641                 default:
3642                     goto illegal_insn;
3643                 }
3644             } else if (xop == 0x35) {   /* FPU Operations */
3645 #ifdef TARGET_SPARC64
3646                 int cond;
3647 #endif
3648                 if (gen_trap_ifnofpu(dc)) {
3649                     goto jmp_insn;
3650                 }
3651                 gen_op_clear_ieee_excp_and_FTT();
3652                 rs1 = GET_FIELD(insn, 13, 17);
3653                 rs2 = GET_FIELD(insn, 27, 31);
3654                 xop = GET_FIELD(insn, 18, 26);
3655 
3656 #ifdef TARGET_SPARC64
3657 #define FMOVR(sz)                                                  \
3658                 do {                                               \
3659                     DisasCompare cmp;                              \
3660                     cond = GET_FIELD_SP(insn, 10, 12);             \
3661                     cpu_src1 = get_src1(dc, insn);                 \
3662                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3663                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3664                 } while (0)
3665 
3666                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3667                     FMOVR(s);
3668                     break;
3669                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3670                     FMOVR(d);
3671                     break;
3672                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3673                     CHECK_FPU_FEATURE(dc, FLOAT128);
3674                     FMOVR(q);
3675                     break;
3676                 }
3677 #undef FMOVR
3678 #endif
3679                 switch (xop) {
3680 #ifdef TARGET_SPARC64
3681 #define FMOVCC(fcc, sz)                                                 \
3682                     do {                                                \
3683                         DisasCompare cmp;                               \
3684                         cond = GET_FIELD_SP(insn, 14, 17);              \
3685                         gen_fcompare(&cmp, fcc, cond);                  \
3686                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3687                     } while (0)
3688 
3689                     case 0x001: /* V9 fmovscc %fcc0 */
3690                         FMOVCC(0, s);
3691                         break;
3692                     case 0x002: /* V9 fmovdcc %fcc0 */
3693                         FMOVCC(0, d);
3694                         break;
3695                     case 0x003: /* V9 fmovqcc %fcc0 */
3696                         CHECK_FPU_FEATURE(dc, FLOAT128);
3697                         FMOVCC(0, q);
3698                         break;
3699                     case 0x041: /* V9 fmovscc %fcc1 */
3700                         FMOVCC(1, s);
3701                         break;
3702                     case 0x042: /* V9 fmovdcc %fcc1 */
3703                         FMOVCC(1, d);
3704                         break;
3705                     case 0x043: /* V9 fmovqcc %fcc1 */
3706                         CHECK_FPU_FEATURE(dc, FLOAT128);
3707                         FMOVCC(1, q);
3708                         break;
3709                     case 0x081: /* V9 fmovscc %fcc2 */
3710                         FMOVCC(2, s);
3711                         break;
3712                     case 0x082: /* V9 fmovdcc %fcc2 */
3713                         FMOVCC(2, d);
3714                         break;
3715                     case 0x083: /* V9 fmovqcc %fcc2 */
3716                         CHECK_FPU_FEATURE(dc, FLOAT128);
3717                         FMOVCC(2, q);
3718                         break;
3719                     case 0x0c1: /* V9 fmovscc %fcc3 */
3720                         FMOVCC(3, s);
3721                         break;
3722                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3723                         FMOVCC(3, d);
3724                         break;
3725                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3726                         CHECK_FPU_FEATURE(dc, FLOAT128);
3727                         FMOVCC(3, q);
3728                         break;
3729 #undef FMOVCC
3730 #define FMOVCC(xcc, sz)                                                 \
3731                     do {                                                \
3732                         DisasCompare cmp;                               \
3733                         cond = GET_FIELD_SP(insn, 14, 17);              \
3734                         gen_compare(&cmp, xcc, cond, dc);               \
3735                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3736                     } while (0)
3737 
3738                     case 0x101: /* V9 fmovscc %icc */
3739                         FMOVCC(0, s);
3740                         break;
3741                     case 0x102: /* V9 fmovdcc %icc */
3742                         FMOVCC(0, d);
3743                         break;
3744                     case 0x103: /* V9 fmovqcc %icc */
3745                         CHECK_FPU_FEATURE(dc, FLOAT128);
3746                         FMOVCC(0, q);
3747                         break;
3748                     case 0x181: /* V9 fmovscc %xcc */
3749                         FMOVCC(1, s);
3750                         break;
3751                     case 0x182: /* V9 fmovdcc %xcc */
3752                         FMOVCC(1, d);
3753                         break;
3754                     case 0x183: /* V9 fmovqcc %xcc */
3755                         CHECK_FPU_FEATURE(dc, FLOAT128);
3756                         FMOVCC(1, q);
3757                         break;
3758 #undef FMOVCC
3759 #endif
3760                     case 0x51: /* fcmps, V9 %fcc */
3761                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3762                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3763                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3764                         break;
3765                     case 0x52: /* fcmpd, V9 %fcc */
3766                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3767                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3768                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3769                         break;
3770                     case 0x53: /* fcmpq, V9 %fcc */
3771                         CHECK_FPU_FEATURE(dc, FLOAT128);
3772                         gen_op_load_fpr_QT0(QFPREG(rs1));
3773                         gen_op_load_fpr_QT1(QFPREG(rs2));
3774                         gen_op_fcmpq(rd & 3);
3775                         break;
3776                     case 0x55: /* fcmpes, V9 %fcc */
3777                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3778                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3779                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3780                         break;
3781                     case 0x56: /* fcmped, V9 %fcc */
3782                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3783                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3784                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3785                         break;
3786                     case 0x57: /* fcmpeq, V9 %fcc */
3787                         CHECK_FPU_FEATURE(dc, FLOAT128);
3788                         gen_op_load_fpr_QT0(QFPREG(rs1));
3789                         gen_op_load_fpr_QT1(QFPREG(rs2));
3790                         gen_op_fcmpeq(rd & 3);
3791                         break;
3792                     default:
3793                         goto illegal_insn;
3794                 }
3795             } else if (xop == 0x2) {
3796                 TCGv dst = gen_dest_gpr(dc, rd);
3797                 rs1 = GET_FIELD(insn, 13, 17);
3798                 if (rs1 == 0) {
3799                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3800                     if (IS_IMM) {       /* immediate */
3801                         simm = GET_FIELDs(insn, 19, 31);
3802                         tcg_gen_movi_tl(dst, simm);
3803                         gen_store_gpr(dc, rd, dst);
3804                     } else {            /* register */
3805                         rs2 = GET_FIELD(insn, 27, 31);
3806                         if (rs2 == 0) {
3807                             tcg_gen_movi_tl(dst, 0);
3808                             gen_store_gpr(dc, rd, dst);
3809                         } else {
3810                             cpu_src2 = gen_load_gpr(dc, rs2);
3811                             gen_store_gpr(dc, rd, cpu_src2);
3812                         }
3813                     }
3814                 } else {
3815                     cpu_src1 = get_src1(dc, insn);
3816                     if (IS_IMM) {       /* immediate */
3817                         simm = GET_FIELDs(insn, 19, 31);
3818                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3819                         gen_store_gpr(dc, rd, dst);
3820                     } else {            /* register */
3821                         rs2 = GET_FIELD(insn, 27, 31);
3822                         if (rs2 == 0) {
3823                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3824                             gen_store_gpr(dc, rd, cpu_src1);
3825                         } else {
3826                             cpu_src2 = gen_load_gpr(dc, rs2);
3827                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3828                             gen_store_gpr(dc, rd, dst);
3829                         }
3830                     }
3831                 }
3832 #ifdef TARGET_SPARC64
3833             } else if (xop == 0x25) { /* sll, V9 sllx */
3834                 cpu_src1 = get_src1(dc, insn);
3835                 if (IS_IMM) {   /* immediate */
3836                     simm = GET_FIELDs(insn, 20, 31);
3837                     if (insn & (1 << 12)) {
3838                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3839                     } else {
3840                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3841                     }
3842                 } else {                /* register */
3843                     rs2 = GET_FIELD(insn, 27, 31);
3844                     cpu_src2 = gen_load_gpr(dc, rs2);
3845                     cpu_tmp0 = tcg_temp_new();
3846                     if (insn & (1 << 12)) {
3847                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3848                     } else {
3849                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3850                     }
3851                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3852                 }
3853                 gen_store_gpr(dc, rd, cpu_dst);
3854             } else if (xop == 0x26) { /* srl, V9 srlx */
3855                 cpu_src1 = get_src1(dc, insn);
3856                 if (IS_IMM) {   /* immediate */
3857                     simm = GET_FIELDs(insn, 20, 31);
3858                     if (insn & (1 << 12)) {
3859                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3860                     } else {
3861                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3862                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3863                     }
3864                 } else {                /* register */
3865                     rs2 = GET_FIELD(insn, 27, 31);
3866                     cpu_src2 = gen_load_gpr(dc, rs2);
3867                     cpu_tmp0 = tcg_temp_new();
3868                     if (insn & (1 << 12)) {
3869                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3870                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3871                     } else {
3872                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3873                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3874                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3875                     }
3876                 }
3877                 gen_store_gpr(dc, rd, cpu_dst);
3878             } else if (xop == 0x27) { /* sra, V9 srax */
3879                 cpu_src1 = get_src1(dc, insn);
3880                 if (IS_IMM) {   /* immediate */
3881                     simm = GET_FIELDs(insn, 20, 31);
3882                     if (insn & (1 << 12)) {
3883                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3884                     } else {
3885                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3886                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3887                     }
3888                 } else {                /* register */
3889                     rs2 = GET_FIELD(insn, 27, 31);
3890                     cpu_src2 = gen_load_gpr(dc, rs2);
3891                     cpu_tmp0 = tcg_temp_new();
3892                     if (insn & (1 << 12)) {
3893                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3894                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3895                     } else {
3896                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3897                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3898                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3899                     }
3900                 }
3901                 gen_store_gpr(dc, rd, cpu_dst);
3902 #endif
3903             } else if (xop < 0x36) {
3904                 if (xop < 0x20) {
3905                     cpu_src1 = get_src1(dc, insn);
3906                     cpu_src2 = get_src2(dc, insn);
3907                     switch (xop & ~0x10) {
3908                     case 0x0: /* add */
3909                         if (xop & 0x10) {
3910                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3911                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3912                             dc->cc_op = CC_OP_ADD;
3913                         } else {
3914                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3915                         }
3916                         break;
3917                     case 0x1: /* and */
3918                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3919                         if (xop & 0x10) {
3920                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3921                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3922                             dc->cc_op = CC_OP_LOGIC;
3923                         }
3924                         break;
3925                     case 0x2: /* or */
3926                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3927                         if (xop & 0x10) {
3928                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3929                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3930                             dc->cc_op = CC_OP_LOGIC;
3931                         }
3932                         break;
3933                     case 0x3: /* xor */
3934                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3935                         if (xop & 0x10) {
3936                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3937                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3938                             dc->cc_op = CC_OP_LOGIC;
3939                         }
3940                         break;
3941                     case 0x4: /* sub */
3942                         if (xop & 0x10) {
3943                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3944                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3945                             dc->cc_op = CC_OP_SUB;
3946                         } else {
3947                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3948                         }
3949                         break;
3950                     case 0x5: /* andn */
3951                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3952                         if (xop & 0x10) {
3953                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3954                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3955                             dc->cc_op = CC_OP_LOGIC;
3956                         }
3957                         break;
3958                     case 0x6: /* orn */
3959                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3960                         if (xop & 0x10) {
3961                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3962                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3963                             dc->cc_op = CC_OP_LOGIC;
3964                         }
3965                         break;
3966                     case 0x7: /* xorn */
3967                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3968                         if (xop & 0x10) {
3969                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3970                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3971                             dc->cc_op = CC_OP_LOGIC;
3972                         }
3973                         break;
3974                     case 0x8: /* addx, V9 addc */
3975                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3976                                         (xop & 0x10));
3977                         break;
3978 #ifdef TARGET_SPARC64
3979                     case 0x9: /* V9 mulx */
3980                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3981                         break;
3982 #endif
3983                     case 0xa: /* umul */
3984                         CHECK_IU_FEATURE(dc, MUL);
3985                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3986                         if (xop & 0x10) {
3987                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3988                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3989                             dc->cc_op = CC_OP_LOGIC;
3990                         }
3991                         break;
3992                     case 0xb: /* smul */
3993                         CHECK_IU_FEATURE(dc, MUL);
3994                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3995                         if (xop & 0x10) {
3996                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3997                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3998                             dc->cc_op = CC_OP_LOGIC;
3999                         }
4000                         break;
4001                     case 0xc: /* subx, V9 subc */
4002                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4003                                         (xop & 0x10));
4004                         break;
4005 #ifdef TARGET_SPARC64
4006                     case 0xd: /* V9 udivx */
4007                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4008                         break;
4009 #endif
4010                     case 0xe: /* udiv */
4011                         CHECK_IU_FEATURE(dc, DIV);
4012                         if (xop & 0x10) {
4013                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4014                                                cpu_src2);
4015                             dc->cc_op = CC_OP_DIV;
4016                         } else {
4017                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4018                                             cpu_src2);
4019                         }
4020                         break;
4021                     case 0xf: /* sdiv */
4022                         CHECK_IU_FEATURE(dc, DIV);
4023                         if (xop & 0x10) {
4024                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4025                                                cpu_src2);
4026                             dc->cc_op = CC_OP_DIV;
4027                         } else {
4028                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4029                                             cpu_src2);
4030                         }
4031                         break;
4032                     default:
4033                         goto illegal_insn;
4034                     }
4035                     gen_store_gpr(dc, rd, cpu_dst);
4036                 } else {
4037                     cpu_src1 = get_src1(dc, insn);
4038                     cpu_src2 = get_src2(dc, insn);
4039                     switch (xop) {
4040                     case 0x20: /* taddcc */
4041                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4042                         gen_store_gpr(dc, rd, cpu_dst);
4043                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4044                         dc->cc_op = CC_OP_TADD;
4045                         break;
4046                     case 0x21: /* tsubcc */
4047                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4048                         gen_store_gpr(dc, rd, cpu_dst);
4049                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4050                         dc->cc_op = CC_OP_TSUB;
4051                         break;
4052                     case 0x22: /* taddcctv */
4053                         gen_helper_taddcctv(cpu_dst, cpu_env,
4054                                             cpu_src1, cpu_src2);
4055                         gen_store_gpr(dc, rd, cpu_dst);
4056                         dc->cc_op = CC_OP_TADDTV;
4057                         break;
4058                     case 0x23: /* tsubcctv */
4059                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4060                                             cpu_src1, cpu_src2);
4061                         gen_store_gpr(dc, rd, cpu_dst);
4062                         dc->cc_op = CC_OP_TSUBTV;
4063                         break;
4064                     case 0x24: /* mulscc */
4065                         update_psr(dc);
4066                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4067                         gen_store_gpr(dc, rd, cpu_dst);
4068                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4069                         dc->cc_op = CC_OP_ADD;
4070                         break;
4071 #ifndef TARGET_SPARC64
4072                     case 0x25:  /* sll */
4073                         if (IS_IMM) { /* immediate */
4074                             simm = GET_FIELDs(insn, 20, 31);
4075                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4076                         } else { /* register */
4077                             cpu_tmp0 = tcg_temp_new();
4078                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4079                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4080                         }
4081                         gen_store_gpr(dc, rd, cpu_dst);
4082                         break;
4083                     case 0x26:  /* srl */
4084                         if (IS_IMM) { /* immediate */
4085                             simm = GET_FIELDs(insn, 20, 31);
4086                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4087                         } else { /* register */
4088                             cpu_tmp0 = tcg_temp_new();
4089                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4090                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4091                         }
4092                         gen_store_gpr(dc, rd, cpu_dst);
4093                         break;
4094                     case 0x27:  /* sra */
4095                         if (IS_IMM) { /* immediate */
4096                             simm = GET_FIELDs(insn, 20, 31);
4097                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4098                         } else { /* register */
4099                             cpu_tmp0 = tcg_temp_new();
4100                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4101                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4102                         }
4103                         gen_store_gpr(dc, rd, cpu_dst);
4104                         break;
4105 #endif
4106                     case 0x30:
4107                         {
4108                             cpu_tmp0 = tcg_temp_new();
4109                             switch(rd) {
4110                             case 0: /* wry */
4111                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4112                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4113                                 break;
4114 #ifndef TARGET_SPARC64
4115                             case 0x01 ... 0x0f: /* undefined in the
4116                                                    SPARCv8 manual, nop
4117                                                    on the microSPARC
4118                                                    II */
4119                             case 0x10 ... 0x1f: /* implementation-dependent
4120                                                    in the SPARCv8
4121                                                    manual, nop on the
4122                                                    microSPARC II */
4123                                 if ((rd == 0x13) && (dc->def->features &
4124                                                      CPU_FEATURE_POWERDOWN)) {
4125                                     /* LEON3 power-down */
4126                                     save_state(dc);
4127                                     gen_helper_power_down(cpu_env);
4128                                 }
4129                                 break;
4130 #else
4131                             case 0x2: /* V9 wrccr */
4132                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4133                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4134                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4135                                 dc->cc_op = CC_OP_FLAGS;
4136                                 break;
4137                             case 0x3: /* V9 wrasi */
4138                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4139                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4140                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4141                                                 offsetof(CPUSPARCState, asi));
4142                                 /* End TB to notice changed ASI.  */
4143                                 save_state(dc);
4144                                 gen_op_next_insn();
4145                                 tcg_gen_exit_tb(NULL, 0);
4146                                 dc->base.is_jmp = DISAS_NORETURN;
4147                                 break;
4148                             case 0x6: /* V9 wrfprs */
4149                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4150                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4151                                 dc->fprs_dirty = 0;
4152                                 save_state(dc);
4153                                 gen_op_next_insn();
4154                                 tcg_gen_exit_tb(NULL, 0);
4155                                 dc->base.is_jmp = DISAS_NORETURN;
4156                                 break;
4157                             case 0xf: /* V9 sir, nop if user */
4158 #if !defined(CONFIG_USER_ONLY)
4159                                 if (supervisor(dc)) {
4160                                     ; // XXX
4161                                 }
4162 #endif
4163                                 break;
4164                             case 0x13: /* Graphics Status */
4165                                 if (gen_trap_ifnofpu(dc)) {
4166                                     goto jmp_insn;
4167                                 }
4168                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4169                                 break;
4170                             case 0x14: /* Softint set */
4171                                 if (!supervisor(dc))
4172                                     goto illegal_insn;
4173                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4174                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4175                                 break;
4176                             case 0x15: /* Softint clear */
4177                                 if (!supervisor(dc))
4178                                     goto illegal_insn;
4179                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4180                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4181                                 break;
4182                             case 0x16: /* Softint write */
4183                                 if (!supervisor(dc))
4184                                     goto illegal_insn;
4185                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4186                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4187                                 break;
4188                             case 0x17: /* Tick compare */
4189 #if !defined(CONFIG_USER_ONLY)
4190                                 if (!supervisor(dc))
4191                                     goto illegal_insn;
4192 #endif
4193                                 {
4194                                     TCGv_ptr r_tickptr;
4195 
4196                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4197                                                    cpu_src2);
4198                                     r_tickptr = tcg_temp_new_ptr();
4199                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4200                                                    offsetof(CPUSPARCState, tick));
4201                                     translator_io_start(&dc->base);
4202                                     gen_helper_tick_set_limit(r_tickptr,
4203                                                               cpu_tick_cmpr);
4204                                     /* End TB to handle timer interrupt */
4205                                     dc->base.is_jmp = DISAS_EXIT;
4206                                 }
4207                                 break;
4208                             case 0x18: /* System tick */
4209 #if !defined(CONFIG_USER_ONLY)
4210                                 if (!supervisor(dc))
4211                                     goto illegal_insn;
4212 #endif
4213                                 {
4214                                     TCGv_ptr r_tickptr;
4215 
4216                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4217                                                    cpu_src2);
4218                                     r_tickptr = tcg_temp_new_ptr();
4219                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4220                                                    offsetof(CPUSPARCState, stick));
4221                                     translator_io_start(&dc->base);
4222                                     gen_helper_tick_set_count(r_tickptr,
4223                                                               cpu_tmp0);
4224                                     /* End TB to handle timer interrupt */
4225                                     dc->base.is_jmp = DISAS_EXIT;
4226                                 }
4227                                 break;
4228                             case 0x19: /* System tick compare */
4229 #if !defined(CONFIG_USER_ONLY)
4230                                 if (!supervisor(dc))
4231                                     goto illegal_insn;
4232 #endif
4233                                 {
4234                                     TCGv_ptr r_tickptr;
4235 
4236                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4237                                                    cpu_src2);
4238                                     r_tickptr = tcg_temp_new_ptr();
4239                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4240                                                    offsetof(CPUSPARCState, stick));
4241                                     translator_io_start(&dc->base);
4242                                     gen_helper_tick_set_limit(r_tickptr,
4243                                                               cpu_stick_cmpr);
4244                                     /* End TB to handle timer interrupt */
4245                                     dc->base.is_jmp = DISAS_EXIT;
4246                                 }
4247                                 break;
4248 
4249                             case 0x10: /* Performance Control */
4250                             case 0x11: /* Performance Instrumentation
4251                                           Counter */
4252                             case 0x12: /* Dispatch Control */
4253 #endif
4254                             default:
4255                                 goto illegal_insn;
4256                             }
4257                         }
4258                         break;
4259 #if !defined(CONFIG_USER_ONLY)
4260                     case 0x31: /* wrpsr, V9 saved, restored */
4261                         {
4262                             if (!supervisor(dc))
4263                                 goto priv_insn;
4264 #ifdef TARGET_SPARC64
4265                             switch (rd) {
4266                             case 0:
4267                                 gen_helper_saved(cpu_env);
4268                                 break;
4269                             case 1:
4270                                 gen_helper_restored(cpu_env);
4271                                 break;
4272                             case 2: /* UA2005 allclean */
4273                             case 3: /* UA2005 otherw */
4274                             case 4: /* UA2005 normalw */
4275                             case 5: /* UA2005 invalw */
4276                                 // XXX
4277                             default:
4278                                 goto illegal_insn;
4279                             }
4280 #else
4281                             cpu_tmp0 = tcg_temp_new();
4282                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4283                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4284                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4285                             dc->cc_op = CC_OP_FLAGS;
4286                             save_state(dc);
4287                             gen_op_next_insn();
4288                             tcg_gen_exit_tb(NULL, 0);
4289                             dc->base.is_jmp = DISAS_NORETURN;
4290 #endif
4291                         }
4292                         break;
4293                     case 0x32: /* wrwim, V9 wrpr */
4294                         {
4295                             if (!supervisor(dc))
4296                                 goto priv_insn;
4297                             cpu_tmp0 = tcg_temp_new();
4298                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4299 #ifdef TARGET_SPARC64
4300                             switch (rd) {
4301                             case 0: // tpc
4302                                 {
4303                                     TCGv_ptr r_tsptr;
4304 
4305                                     r_tsptr = tcg_temp_new_ptr();
4306                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4307                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4308                                                   offsetof(trap_state, tpc));
4309                                 }
4310                                 break;
4311                             case 1: // tnpc
4312                                 {
4313                                     TCGv_ptr r_tsptr;
4314 
4315                                     r_tsptr = tcg_temp_new_ptr();
4316                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4317                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4318                                                   offsetof(trap_state, tnpc));
4319                                 }
4320                                 break;
4321                             case 2: // tstate
4322                                 {
4323                                     TCGv_ptr r_tsptr;
4324 
4325                                     r_tsptr = tcg_temp_new_ptr();
4326                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4327                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4328                                                   offsetof(trap_state,
4329                                                            tstate));
4330                                 }
4331                                 break;
4332                             case 3: // tt
4333                                 {
4334                                     TCGv_ptr r_tsptr;
4335 
4336                                     r_tsptr = tcg_temp_new_ptr();
4337                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4338                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4339                                                     offsetof(trap_state, tt));
4340                                 }
4341                                 break;
4342                             case 4: // tick
4343                                 {
4344                                     TCGv_ptr r_tickptr;
4345 
4346                                     r_tickptr = tcg_temp_new_ptr();
4347                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4348                                                    offsetof(CPUSPARCState, tick));
4349                                     translator_io_start(&dc->base);
4350                                     gen_helper_tick_set_count(r_tickptr,
4351                                                               cpu_tmp0);
4352                                     /* End TB to handle timer interrupt */
4353                                     dc->base.is_jmp = DISAS_EXIT;
4354                                 }
4355                                 break;
4356                             case 5: // tba
4357                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4358                                 break;
4359                             case 6: // pstate
4360                                 save_state(dc);
4361                                 if (translator_io_start(&dc->base)) {
4362                                     dc->base.is_jmp = DISAS_EXIT;
4363                                 }
4364                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4365                                 dc->npc = DYNAMIC_PC;
4366                                 break;
4367                             case 7: // tl
4368                                 save_state(dc);
4369                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4370                                                offsetof(CPUSPARCState, tl));
4371                                 dc->npc = DYNAMIC_PC;
4372                                 break;
4373                             case 8: // pil
4374                                 if (translator_io_start(&dc->base)) {
4375                                     dc->base.is_jmp = DISAS_EXIT;
4376                                 }
4377                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4378                                 break;
4379                             case 9: // cwp
4380                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4381                                 break;
4382                             case 10: // cansave
4383                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4384                                                 offsetof(CPUSPARCState,
4385                                                          cansave));
4386                                 break;
4387                             case 11: // canrestore
4388                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4389                                                 offsetof(CPUSPARCState,
4390                                                          canrestore));
4391                                 break;
4392                             case 12: // cleanwin
4393                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4394                                                 offsetof(CPUSPARCState,
4395                                                          cleanwin));
4396                                 break;
4397                             case 13: // otherwin
4398                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4399                                                 offsetof(CPUSPARCState,
4400                                                          otherwin));
4401                                 break;
4402                             case 14: // wstate
4403                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4404                                                 offsetof(CPUSPARCState,
4405                                                          wstate));
4406                                 break;
4407                             case 16: // UA2005 gl
4408                                 CHECK_IU_FEATURE(dc, GL);
4409                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4410                                 break;
4411                             case 26: // UA2005 strand status
4412                                 CHECK_IU_FEATURE(dc, HYPV);
4413                                 if (!hypervisor(dc))
4414                                     goto priv_insn;
4415                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4416                                 break;
4417                             default:
4418                                 goto illegal_insn;
4419                             }
4420 #else
4421                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4422                             if (dc->def->nwindows != 32) {
4423                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4424                                                 (1 << dc->def->nwindows) - 1);
4425                             }
4426 #endif
4427                         }
4428                         break;
4429                     case 0x33: /* wrtbr, UA2005 wrhpr */
4430                         {
4431 #ifndef TARGET_SPARC64
4432                             if (!supervisor(dc))
4433                                 goto priv_insn;
4434                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4435 #else
4436                             CHECK_IU_FEATURE(dc, HYPV);
4437                             if (!hypervisor(dc))
4438                                 goto priv_insn;
4439                             cpu_tmp0 = tcg_temp_new();
4440                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4441                             switch (rd) {
4442                             case 0: // hpstate
4443                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4444                                                offsetof(CPUSPARCState,
4445                                                         hpstate));
4446                                 save_state(dc);
4447                                 gen_op_next_insn();
4448                                 tcg_gen_exit_tb(NULL, 0);
4449                                 dc->base.is_jmp = DISAS_NORETURN;
4450                                 break;
4451                             case 1: // htstate
4452                                 // XXX gen_op_wrhtstate();
4453                                 break;
4454                             case 3: // hintp
4455                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4456                                 break;
4457                             case 5: // htba
4458                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4459                                 break;
4460                             case 31: // hstick_cmpr
4461                                 {
4462                                     TCGv_ptr r_tickptr;
4463 
4464                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4465                                     r_tickptr = tcg_temp_new_ptr();
4466                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4467                                                    offsetof(CPUSPARCState, hstick));
4468                                     translator_io_start(&dc->base);
4469                                     gen_helper_tick_set_limit(r_tickptr,
4470                                                               cpu_hstick_cmpr);
4471                                     /* End TB to handle timer interrupt */
4472                                     dc->base.is_jmp = DISAS_EXIT;
4473                                 }
4474                                 break;
4475                             case 6: // hver readonly
4476                             default:
4477                                 goto illegal_insn;
4478                             }
4479 #endif
4480                         }
4481                         break;
4482 #endif
4483 #ifdef TARGET_SPARC64
4484                     case 0x2c: /* V9 movcc */
4485                         {
4486                             int cc = GET_FIELD_SP(insn, 11, 12);
4487                             int cond = GET_FIELD_SP(insn, 14, 17);
4488                             DisasCompare cmp;
4489                             TCGv dst;
4490 
4491                             if (insn & (1 << 18)) {
4492                                 if (cc == 0) {
4493                                     gen_compare(&cmp, 0, cond, dc);
4494                                 } else if (cc == 2) {
4495                                     gen_compare(&cmp, 1, cond, dc);
4496                                 } else {
4497                                     goto illegal_insn;
4498                                 }
4499                             } else {
4500                                 gen_fcompare(&cmp, cc, cond);
4501                             }
4502 
4503                             /* The get_src2 above loaded the normal 13-bit
4504                                immediate field, not the 11-bit field we have
4505                                in movcc.  But it did handle the reg case.  */
4506                             if (IS_IMM) {
4507                                 simm = GET_FIELD_SPs(insn, 0, 10);
4508                                 tcg_gen_movi_tl(cpu_src2, simm);
4509                             }
4510 
4511                             dst = gen_load_gpr(dc, rd);
4512                             tcg_gen_movcond_tl(cmp.cond, dst,
4513                                                cmp.c1, cmp.c2,
4514                                                cpu_src2, dst);
4515                             gen_store_gpr(dc, rd, dst);
4516                             break;
4517                         }
4518                     case 0x2d: /* V9 sdivx */
4519                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4520                         gen_store_gpr(dc, rd, cpu_dst);
4521                         break;
4522                     case 0x2e: /* V9 popc */
4523                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4524                         gen_store_gpr(dc, rd, cpu_dst);
4525                         break;
4526                     case 0x2f: /* V9 movr */
4527                         {
4528                             int cond = GET_FIELD_SP(insn, 10, 12);
4529                             DisasCompare cmp;
4530                             TCGv dst;
4531 
4532                             gen_compare_reg(&cmp, cond, cpu_src1);
4533 
4534                             /* The get_src2 above loaded the normal 13-bit
4535                                immediate field, not the 10-bit field we have
4536                                in movr.  But it did handle the reg case.  */
4537                             if (IS_IMM) {
4538                                 simm = GET_FIELD_SPs(insn, 0, 9);
4539                                 tcg_gen_movi_tl(cpu_src2, simm);
4540                             }
4541 
4542                             dst = gen_load_gpr(dc, rd);
4543                             tcg_gen_movcond_tl(cmp.cond, dst,
4544                                                cmp.c1, cmp.c2,
4545                                                cpu_src2, dst);
4546                             gen_store_gpr(dc, rd, dst);
4547                             break;
4548                         }
4549 #endif
4550                     default:
4551                         goto illegal_insn;
4552                     }
4553                 }
4554             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4555 #ifdef TARGET_SPARC64
4556                 int opf = GET_FIELD_SP(insn, 5, 13);
4557                 rs1 = GET_FIELD(insn, 13, 17);
4558                 rs2 = GET_FIELD(insn, 27, 31);
4559                 if (gen_trap_ifnofpu(dc)) {
4560                     goto jmp_insn;
4561                 }
4562 
4563                 switch (opf) {
4564                 case 0x000: /* VIS I edge8cc */
4565                     CHECK_FPU_FEATURE(dc, VIS1);
4566                     cpu_src1 = gen_load_gpr(dc, rs1);
4567                     cpu_src2 = gen_load_gpr(dc, rs2);
4568                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4569                     gen_store_gpr(dc, rd, cpu_dst);
4570                     break;
4571                 case 0x001: /* VIS II edge8n */
4572                     CHECK_FPU_FEATURE(dc, VIS2);
4573                     cpu_src1 = gen_load_gpr(dc, rs1);
4574                     cpu_src2 = gen_load_gpr(dc, rs2);
4575                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4576                     gen_store_gpr(dc, rd, cpu_dst);
4577                     break;
4578                 case 0x002: /* VIS I edge8lcc */
4579                     CHECK_FPU_FEATURE(dc, VIS1);
4580                     cpu_src1 = gen_load_gpr(dc, rs1);
4581                     cpu_src2 = gen_load_gpr(dc, rs2);
4582                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4583                     gen_store_gpr(dc, rd, cpu_dst);
4584                     break;
4585                 case 0x003: /* VIS II edge8ln */
4586                     CHECK_FPU_FEATURE(dc, VIS2);
4587                     cpu_src1 = gen_load_gpr(dc, rs1);
4588                     cpu_src2 = gen_load_gpr(dc, rs2);
4589                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4590                     gen_store_gpr(dc, rd, cpu_dst);
4591                     break;
4592                 case 0x004: /* VIS I edge16cc */
4593                     CHECK_FPU_FEATURE(dc, VIS1);
4594                     cpu_src1 = gen_load_gpr(dc, rs1);
4595                     cpu_src2 = gen_load_gpr(dc, rs2);
4596                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4597                     gen_store_gpr(dc, rd, cpu_dst);
4598                     break;
4599                 case 0x005: /* VIS II edge16n */
4600                     CHECK_FPU_FEATURE(dc, VIS2);
4601                     cpu_src1 = gen_load_gpr(dc, rs1);
4602                     cpu_src2 = gen_load_gpr(dc, rs2);
4603                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4604                     gen_store_gpr(dc, rd, cpu_dst);
4605                     break;
4606                 case 0x006: /* VIS I edge16lcc */
4607                     CHECK_FPU_FEATURE(dc, VIS1);
4608                     cpu_src1 = gen_load_gpr(dc, rs1);
4609                     cpu_src2 = gen_load_gpr(dc, rs2);
4610                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4611                     gen_store_gpr(dc, rd, cpu_dst);
4612                     break;
4613                 case 0x007: /* VIS II edge16ln */
4614                     CHECK_FPU_FEATURE(dc, VIS2);
4615                     cpu_src1 = gen_load_gpr(dc, rs1);
4616                     cpu_src2 = gen_load_gpr(dc, rs2);
4617                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4618                     gen_store_gpr(dc, rd, cpu_dst);
4619                     break;
4620                 case 0x008: /* VIS I edge32cc */
4621                     CHECK_FPU_FEATURE(dc, VIS1);
4622                     cpu_src1 = gen_load_gpr(dc, rs1);
4623                     cpu_src2 = gen_load_gpr(dc, rs2);
4624                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4625                     gen_store_gpr(dc, rd, cpu_dst);
4626                     break;
4627                 case 0x009: /* VIS II edge32n */
4628                     CHECK_FPU_FEATURE(dc, VIS2);
4629                     cpu_src1 = gen_load_gpr(dc, rs1);
4630                     cpu_src2 = gen_load_gpr(dc, rs2);
4631                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4632                     gen_store_gpr(dc, rd, cpu_dst);
4633                     break;
4634                 case 0x00a: /* VIS I edge32lcc */
4635                     CHECK_FPU_FEATURE(dc, VIS1);
4636                     cpu_src1 = gen_load_gpr(dc, rs1);
4637                     cpu_src2 = gen_load_gpr(dc, rs2);
4638                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4639                     gen_store_gpr(dc, rd, cpu_dst);
4640                     break;
4641                 case 0x00b: /* VIS II edge32ln */
4642                     CHECK_FPU_FEATURE(dc, VIS2);
4643                     cpu_src1 = gen_load_gpr(dc, rs1);
4644                     cpu_src2 = gen_load_gpr(dc, rs2);
4645                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4646                     gen_store_gpr(dc, rd, cpu_dst);
4647                     break;
4648                 case 0x010: /* VIS I array8 */
4649                     CHECK_FPU_FEATURE(dc, VIS1);
4650                     cpu_src1 = gen_load_gpr(dc, rs1);
4651                     cpu_src2 = gen_load_gpr(dc, rs2);
4652                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4653                     gen_store_gpr(dc, rd, cpu_dst);
4654                     break;
4655                 case 0x012: /* VIS I array16 */
4656                     CHECK_FPU_FEATURE(dc, VIS1);
4657                     cpu_src1 = gen_load_gpr(dc, rs1);
4658                     cpu_src2 = gen_load_gpr(dc, rs2);
4659                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4660                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4661                     gen_store_gpr(dc, rd, cpu_dst);
4662                     break;
4663                 case 0x014: /* VIS I array32 */
4664                     CHECK_FPU_FEATURE(dc, VIS1);
4665                     cpu_src1 = gen_load_gpr(dc, rs1);
4666                     cpu_src2 = gen_load_gpr(dc, rs2);
4667                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4668                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4669                     gen_store_gpr(dc, rd, cpu_dst);
4670                     break;
4671                 case 0x018: /* VIS I alignaddr */
4672                     CHECK_FPU_FEATURE(dc, VIS1);
4673                     cpu_src1 = gen_load_gpr(dc, rs1);
4674                     cpu_src2 = gen_load_gpr(dc, rs2);
4675                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4676                     gen_store_gpr(dc, rd, cpu_dst);
4677                     break;
4678                 case 0x01a: /* VIS I alignaddrl */
4679                     CHECK_FPU_FEATURE(dc, VIS1);
4680                     cpu_src1 = gen_load_gpr(dc, rs1);
4681                     cpu_src2 = gen_load_gpr(dc, rs2);
4682                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4683                     gen_store_gpr(dc, rd, cpu_dst);
4684                     break;
4685                 case 0x019: /* VIS II bmask */
4686                     CHECK_FPU_FEATURE(dc, VIS2);
4687                     cpu_src1 = gen_load_gpr(dc, rs1);
4688                     cpu_src2 = gen_load_gpr(dc, rs2);
4689                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4690                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4691                     gen_store_gpr(dc, rd, cpu_dst);
4692                     break;
4693                 case 0x020: /* VIS I fcmple16 */
4694                     CHECK_FPU_FEATURE(dc, VIS1);
4695                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4696                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4697                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4698                     gen_store_gpr(dc, rd, cpu_dst);
4699                     break;
4700                 case 0x022: /* VIS I fcmpne16 */
4701                     CHECK_FPU_FEATURE(dc, VIS1);
4702                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4703                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4704                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4705                     gen_store_gpr(dc, rd, cpu_dst);
4706                     break;
4707                 case 0x024: /* VIS I fcmple32 */
4708                     CHECK_FPU_FEATURE(dc, VIS1);
4709                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4710                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4711                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4712                     gen_store_gpr(dc, rd, cpu_dst);
4713                     break;
4714                 case 0x026: /* VIS I fcmpne32 */
4715                     CHECK_FPU_FEATURE(dc, VIS1);
4716                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4717                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4718                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4719                     gen_store_gpr(dc, rd, cpu_dst);
4720                     break;
4721                 case 0x028: /* VIS I fcmpgt16 */
4722                     CHECK_FPU_FEATURE(dc, VIS1);
4723                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4724                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4725                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4726                     gen_store_gpr(dc, rd, cpu_dst);
4727                     break;
4728                 case 0x02a: /* VIS I fcmpeq16 */
4729                     CHECK_FPU_FEATURE(dc, VIS1);
4730                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4731                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4732                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4733                     gen_store_gpr(dc, rd, cpu_dst);
4734                     break;
4735                 case 0x02c: /* VIS I fcmpgt32 */
4736                     CHECK_FPU_FEATURE(dc, VIS1);
4737                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4738                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4739                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4740                     gen_store_gpr(dc, rd, cpu_dst);
4741                     break;
4742                 case 0x02e: /* VIS I fcmpeq32 */
4743                     CHECK_FPU_FEATURE(dc, VIS1);
4744                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4745                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4746                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4747                     gen_store_gpr(dc, rd, cpu_dst);
4748                     break;
4749                 case 0x031: /* VIS I fmul8x16 */
4750                     CHECK_FPU_FEATURE(dc, VIS1);
4751                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4752                     break;
4753                 case 0x033: /* VIS I fmul8x16au */
4754                     CHECK_FPU_FEATURE(dc, VIS1);
4755                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4756                     break;
4757                 case 0x035: /* VIS I fmul8x16al */
4758                     CHECK_FPU_FEATURE(dc, VIS1);
4759                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4760                     break;
4761                 case 0x036: /* VIS I fmul8sux16 */
4762                     CHECK_FPU_FEATURE(dc, VIS1);
4763                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4764                     break;
4765                 case 0x037: /* VIS I fmul8ulx16 */
4766                     CHECK_FPU_FEATURE(dc, VIS1);
4767                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4768                     break;
4769                 case 0x038: /* VIS I fmuld8sux16 */
4770                     CHECK_FPU_FEATURE(dc, VIS1);
4771                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4772                     break;
4773                 case 0x039: /* VIS I fmuld8ulx16 */
4774                     CHECK_FPU_FEATURE(dc, VIS1);
4775                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4776                     break;
4777                 case 0x03a: /* VIS I fpack32 */
4778                     CHECK_FPU_FEATURE(dc, VIS1);
4779                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4780                     break;
4781                 case 0x03b: /* VIS I fpack16 */
4782                     CHECK_FPU_FEATURE(dc, VIS1);
4783                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4784                     cpu_dst_32 = gen_dest_fpr_F(dc);
4785                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4786                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4787                     break;
4788                 case 0x03d: /* VIS I fpackfix */
4789                     CHECK_FPU_FEATURE(dc, VIS1);
4790                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4791                     cpu_dst_32 = gen_dest_fpr_F(dc);
4792                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4793                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4794                     break;
4795                 case 0x03e: /* VIS I pdist */
4796                     CHECK_FPU_FEATURE(dc, VIS1);
4797                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4798                     break;
4799                 case 0x048: /* VIS I faligndata */
4800                     CHECK_FPU_FEATURE(dc, VIS1);
4801                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4802                     break;
4803                 case 0x04b: /* VIS I fpmerge */
4804                     CHECK_FPU_FEATURE(dc, VIS1);
4805                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4806                     break;
4807                 case 0x04c: /* VIS II bshuffle */
4808                     CHECK_FPU_FEATURE(dc, VIS2);
4809                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4810                     break;
4811                 case 0x04d: /* VIS I fexpand */
4812                     CHECK_FPU_FEATURE(dc, VIS1);
4813                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4814                     break;
4815                 case 0x050: /* VIS I fpadd16 */
4816                     CHECK_FPU_FEATURE(dc, VIS1);
4817                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4818                     break;
4819                 case 0x051: /* VIS I fpadd16s */
4820                     CHECK_FPU_FEATURE(dc, VIS1);
4821                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4822                     break;
4823                 case 0x052: /* VIS I fpadd32 */
4824                     CHECK_FPU_FEATURE(dc, VIS1);
4825                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4826                     break;
4827                 case 0x053: /* VIS I fpadd32s */
4828                     CHECK_FPU_FEATURE(dc, VIS1);
4829                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4830                     break;
4831                 case 0x054: /* VIS I fpsub16 */
4832                     CHECK_FPU_FEATURE(dc, VIS1);
4833                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4834                     break;
4835                 case 0x055: /* VIS I fpsub16s */
4836                     CHECK_FPU_FEATURE(dc, VIS1);
4837                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4838                     break;
4839                 case 0x056: /* VIS I fpsub32 */
4840                     CHECK_FPU_FEATURE(dc, VIS1);
4841                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4842                     break;
4843                 case 0x057: /* VIS I fpsub32s */
4844                     CHECK_FPU_FEATURE(dc, VIS1);
4845                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4846                     break;
4847                 case 0x060: /* VIS I fzero */
4848                     CHECK_FPU_FEATURE(dc, VIS1);
4849                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4850                     tcg_gen_movi_i64(cpu_dst_64, 0);
4851                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4852                     break;
4853                 case 0x061: /* VIS I fzeros */
4854                     CHECK_FPU_FEATURE(dc, VIS1);
4855                     cpu_dst_32 = gen_dest_fpr_F(dc);
4856                     tcg_gen_movi_i32(cpu_dst_32, 0);
4857                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4858                     break;
4859                 case 0x062: /* VIS I fnor */
4860                     CHECK_FPU_FEATURE(dc, VIS1);
4861                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4862                     break;
4863                 case 0x063: /* VIS I fnors */
4864                     CHECK_FPU_FEATURE(dc, VIS1);
4865                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4866                     break;
4867                 case 0x064: /* VIS I fandnot2 */
4868                     CHECK_FPU_FEATURE(dc, VIS1);
4869                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4870                     break;
4871                 case 0x065: /* VIS I fandnot2s */
4872                     CHECK_FPU_FEATURE(dc, VIS1);
4873                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4874                     break;
4875                 case 0x066: /* VIS I fnot2 */
4876                     CHECK_FPU_FEATURE(dc, VIS1);
4877                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4878                     break;
4879                 case 0x067: /* VIS I fnot2s */
4880                     CHECK_FPU_FEATURE(dc, VIS1);
4881                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4882                     break;
4883                 case 0x068: /* VIS I fandnot1 */
4884                     CHECK_FPU_FEATURE(dc, VIS1);
4885                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4886                     break;
4887                 case 0x069: /* VIS I fandnot1s */
4888                     CHECK_FPU_FEATURE(dc, VIS1);
4889                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4890                     break;
4891                 case 0x06a: /* VIS I fnot1 */
4892                     CHECK_FPU_FEATURE(dc, VIS1);
4893                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4894                     break;
4895                 case 0x06b: /* VIS I fnot1s */
4896                     CHECK_FPU_FEATURE(dc, VIS1);
4897                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4898                     break;
4899                 case 0x06c: /* VIS I fxor */
4900                     CHECK_FPU_FEATURE(dc, VIS1);
4901                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4902                     break;
4903                 case 0x06d: /* VIS I fxors */
4904                     CHECK_FPU_FEATURE(dc, VIS1);
4905                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4906                     break;
4907                 case 0x06e: /* VIS I fnand */
4908                     CHECK_FPU_FEATURE(dc, VIS1);
4909                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4910                     break;
4911                 case 0x06f: /* VIS I fnands */
4912                     CHECK_FPU_FEATURE(dc, VIS1);
4913                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4914                     break;
4915                 case 0x070: /* VIS I fand */
4916                     CHECK_FPU_FEATURE(dc, VIS1);
4917                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4918                     break;
4919                 case 0x071: /* VIS I fands */
4920                     CHECK_FPU_FEATURE(dc, VIS1);
4921                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4922                     break;
4923                 case 0x072: /* VIS I fxnor */
4924                     CHECK_FPU_FEATURE(dc, VIS1);
4925                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4926                     break;
4927                 case 0x073: /* VIS I fxnors */
4928                     CHECK_FPU_FEATURE(dc, VIS1);
4929                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4930                     break;
4931                 case 0x074: /* VIS I fsrc1 */
4932                     CHECK_FPU_FEATURE(dc, VIS1);
4933                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4934                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4935                     break;
4936                 case 0x075: /* VIS I fsrc1s */
4937                     CHECK_FPU_FEATURE(dc, VIS1);
4938                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4939                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4940                     break;
4941                 case 0x076: /* VIS I fornot2 */
4942                     CHECK_FPU_FEATURE(dc, VIS1);
4943                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4944                     break;
4945                 case 0x077: /* VIS I fornot2s */
4946                     CHECK_FPU_FEATURE(dc, VIS1);
4947                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4948                     break;
4949                 case 0x078: /* VIS I fsrc2 */
4950                     CHECK_FPU_FEATURE(dc, VIS1);
4951                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4952                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4953                     break;
4954                 case 0x079: /* VIS I fsrc2s */
4955                     CHECK_FPU_FEATURE(dc, VIS1);
4956                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4957                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4958                     break;
4959                 case 0x07a: /* VIS I fornot1 */
4960                     CHECK_FPU_FEATURE(dc, VIS1);
4961                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4962                     break;
4963                 case 0x07b: /* VIS I fornot1s */
4964                     CHECK_FPU_FEATURE(dc, VIS1);
4965                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4966                     break;
4967                 case 0x07c: /* VIS I for */
4968                     CHECK_FPU_FEATURE(dc, VIS1);
4969                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4970                     break;
4971                 case 0x07d: /* VIS I fors */
4972                     CHECK_FPU_FEATURE(dc, VIS1);
4973                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4974                     break;
4975                 case 0x07e: /* VIS I fone */
4976                     CHECK_FPU_FEATURE(dc, VIS1);
4977                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4978                     tcg_gen_movi_i64(cpu_dst_64, -1);
4979                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4980                     break;
4981                 case 0x07f: /* VIS I fones */
4982                     CHECK_FPU_FEATURE(dc, VIS1);
4983                     cpu_dst_32 = gen_dest_fpr_F(dc);
4984                     tcg_gen_movi_i32(cpu_dst_32, -1);
4985                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4986                     break;
4987                 case 0x080: /* VIS I shutdown */
4988                 case 0x081: /* VIS II siam */
4989                     // XXX
4990                     goto illegal_insn;
4991                 default:
4992                     goto illegal_insn;
4993                 }
4994 #else
4995                 goto ncp_insn;
4996 #endif
4997             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4998 #ifdef TARGET_SPARC64
4999                 goto illegal_insn;
5000 #else
5001                 goto ncp_insn;
5002 #endif
5003 #ifdef TARGET_SPARC64
5004             } else if (xop == 0x39) { /* V9 return */
5005                 save_state(dc);
5006                 cpu_src1 = get_src1(dc, insn);
5007                 cpu_tmp0 = tcg_temp_new();
5008                 if (IS_IMM) {   /* immediate */
5009                     simm = GET_FIELDs(insn, 19, 31);
5010                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5011                 } else {                /* register */
5012                     rs2 = GET_FIELD(insn, 27, 31);
5013                     if (rs2) {
5014                         cpu_src2 = gen_load_gpr(dc, rs2);
5015                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5016                     } else {
5017                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5018                     }
5019                 }
5020                 gen_helper_restore(cpu_env);
5021                 gen_mov_pc_npc(dc);
5022                 gen_check_align(cpu_tmp0, 3);
5023                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5024                 dc->npc = DYNAMIC_PC;
5025                 goto jmp_insn;
5026 #endif
5027             } else {
5028                 cpu_src1 = get_src1(dc, insn);
5029                 cpu_tmp0 = tcg_temp_new();
5030                 if (IS_IMM) {   /* immediate */
5031                     simm = GET_FIELDs(insn, 19, 31);
5032                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5033                 } else {                /* register */
5034                     rs2 = GET_FIELD(insn, 27, 31);
5035                     if (rs2) {
5036                         cpu_src2 = gen_load_gpr(dc, rs2);
5037                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5038                     } else {
5039                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5040                     }
5041                 }
5042                 switch (xop) {
5043                 case 0x38:      /* jmpl */
5044                     {
5045                         TCGv t = gen_dest_gpr(dc, rd);
5046                         tcg_gen_movi_tl(t, dc->pc);
5047                         gen_store_gpr(dc, rd, t);
5048 
5049                         gen_mov_pc_npc(dc);
5050                         gen_check_align(cpu_tmp0, 3);
5051                         gen_address_mask(dc, cpu_tmp0);
5052                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5053                         dc->npc = DYNAMIC_PC;
5054                     }
5055                     goto jmp_insn;
5056 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5057                 case 0x39:      /* rett, V9 return */
5058                     {
5059                         if (!supervisor(dc))
5060                             goto priv_insn;
5061                         gen_mov_pc_npc(dc);
5062                         gen_check_align(cpu_tmp0, 3);
5063                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5064                         dc->npc = DYNAMIC_PC;
5065                         gen_helper_rett(cpu_env);
5066                     }
5067                     goto jmp_insn;
5068 #endif
5069                 case 0x3b: /* flush */
5070                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5071                         goto unimp_flush;
5072                     /* nop */
5073                     break;
5074                 case 0x3c:      /* save */
5075                     gen_helper_save(cpu_env);
5076                     gen_store_gpr(dc, rd, cpu_tmp0);
5077                     break;
5078                 case 0x3d:      /* restore */
5079                     gen_helper_restore(cpu_env);
5080                     gen_store_gpr(dc, rd, cpu_tmp0);
5081                     break;
5082 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5083                 case 0x3e:      /* V9 done/retry */
5084                     {
5085                         switch (rd) {
5086                         case 0:
5087                             if (!supervisor(dc))
5088                                 goto priv_insn;
5089                             dc->npc = DYNAMIC_PC;
5090                             dc->pc = DYNAMIC_PC;
5091                             translator_io_start(&dc->base);
5092                             gen_helper_done(cpu_env);
5093                             goto jmp_insn;
5094                         case 1:
5095                             if (!supervisor(dc))
5096                                 goto priv_insn;
5097                             dc->npc = DYNAMIC_PC;
5098                             dc->pc = DYNAMIC_PC;
5099                             translator_io_start(&dc->base);
5100                             gen_helper_retry(cpu_env);
5101                             goto jmp_insn;
5102                         default:
5103                             goto illegal_insn;
5104                         }
5105                     }
5106                     break;
5107 #endif
5108                 default:
5109                     goto illegal_insn;
5110                 }
5111             }
5112             break;
5113         }
5114         break;
5115     case 3:                     /* load/store instructions */
5116         {
5117             unsigned int xop = GET_FIELD(insn, 7, 12);
5118             /* ??? gen_address_mask prevents us from using a source
5119                register directly.  Always generate a temporary.  */
5120             TCGv cpu_addr = tcg_temp_new();
5121 
5122             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5123             if (xop == 0x3c || xop == 0x3e) {
5124                 /* V9 casa/casxa : no offset */
5125             } else if (IS_IMM) {     /* immediate */
5126                 simm = GET_FIELDs(insn, 19, 31);
5127                 if (simm != 0) {
5128                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5129                 }
5130             } else {            /* register */
5131                 rs2 = GET_FIELD(insn, 27, 31);
5132                 if (rs2 != 0) {
5133                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5134                 }
5135             }
5136             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5137                 (xop > 0x17 && xop <= 0x1d ) ||
5138                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5139                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5140 
5141                 switch (xop) {
5142                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5143                     gen_address_mask(dc, cpu_addr);
5144                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5145                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5146                     break;
5147                 case 0x1:       /* ldub, load unsigned byte */
5148                     gen_address_mask(dc, cpu_addr);
5149                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5150                                        dc->mem_idx, MO_UB);
5151                     break;
5152                 case 0x2:       /* lduh, load unsigned halfword */
5153                     gen_address_mask(dc, cpu_addr);
5154                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5155                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5156                     break;
5157                 case 0x3:       /* ldd, load double word */
5158                     if (rd & 1)
5159                         goto illegal_insn;
5160                     else {
5161                         TCGv_i64 t64;
5162 
5163                         gen_address_mask(dc, cpu_addr);
5164                         t64 = tcg_temp_new_i64();
5165                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5166                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5167                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5168                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5169                         gen_store_gpr(dc, rd + 1, cpu_val);
5170                         tcg_gen_shri_i64(t64, t64, 32);
5171                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5172                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5173                     }
5174                     break;
5175                 case 0x9:       /* ldsb, load signed byte */
5176                     gen_address_mask(dc, cpu_addr);
5177                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5178                     break;
5179                 case 0xa:       /* ldsh, load signed halfword */
5180                     gen_address_mask(dc, cpu_addr);
5181                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5182                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5183                     break;
5184                 case 0xd:       /* ldstub */
5185                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5186                     break;
5187                 case 0x0f:
5188                     /* swap, swap register with memory. Also atomically */
5189                     CHECK_IU_FEATURE(dc, SWAP);
5190                     cpu_src1 = gen_load_gpr(dc, rd);
5191                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5192                              dc->mem_idx, MO_TEUL);
5193                     break;
5194 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5195                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5196                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5197                     break;
5198                 case 0x11:      /* lduba, load unsigned byte alternate */
5199                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5200                     break;
5201                 case 0x12:      /* lduha, load unsigned halfword alternate */
5202                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5203                     break;
5204                 case 0x13:      /* ldda, load double word alternate */
5205                     if (rd & 1) {
5206                         goto illegal_insn;
5207                     }
5208                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5209                     goto skip_move;
5210                 case 0x19:      /* ldsba, load signed byte alternate */
5211                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5212                     break;
5213                 case 0x1a:      /* ldsha, load signed halfword alternate */
5214                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5215                     break;
5216                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5217                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5218                     break;
5219                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5220                                    atomically */
5221                     CHECK_IU_FEATURE(dc, SWAP);
5222                     cpu_src1 = gen_load_gpr(dc, rd);
5223                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5224                     break;
5225 
5226 #ifndef TARGET_SPARC64
5227                 case 0x30: /* ldc */
5228                 case 0x31: /* ldcsr */
5229                 case 0x33: /* lddc */
5230                     goto ncp_insn;
5231 #endif
5232 #endif
5233 #ifdef TARGET_SPARC64
5234                 case 0x08: /* V9 ldsw */
5235                     gen_address_mask(dc, cpu_addr);
5236                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5237                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5238                     break;
5239                 case 0x0b: /* V9 ldx */
5240                     gen_address_mask(dc, cpu_addr);
5241                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5242                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5243                     break;
5244                 case 0x18: /* V9 ldswa */
5245                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5246                     break;
5247                 case 0x1b: /* V9 ldxa */
5248                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5249                     break;
5250                 case 0x2d: /* V9 prefetch, no effect */
5251                     goto skip_move;
5252                 case 0x30: /* V9 ldfa */
5253                     if (gen_trap_ifnofpu(dc)) {
5254                         goto jmp_insn;
5255                     }
5256                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5257                     gen_update_fprs_dirty(dc, rd);
5258                     goto skip_move;
5259                 case 0x33: /* V9 lddfa */
5260                     if (gen_trap_ifnofpu(dc)) {
5261                         goto jmp_insn;
5262                     }
5263                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5264                     gen_update_fprs_dirty(dc, DFPREG(rd));
5265                     goto skip_move;
5266                 case 0x3d: /* V9 prefetcha, no effect */
5267                     goto skip_move;
5268                 case 0x32: /* V9 ldqfa */
5269                     CHECK_FPU_FEATURE(dc, FLOAT128);
5270                     if (gen_trap_ifnofpu(dc)) {
5271                         goto jmp_insn;
5272                     }
5273                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5274                     gen_update_fprs_dirty(dc, QFPREG(rd));
5275                     goto skip_move;
5276 #endif
5277                 default:
5278                     goto illegal_insn;
5279                 }
5280                 gen_store_gpr(dc, rd, cpu_val);
5281 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5282             skip_move: ;
5283 #endif
5284             } else if (xop >= 0x20 && xop < 0x24) {
5285                 if (gen_trap_ifnofpu(dc)) {
5286                     goto jmp_insn;
5287                 }
5288                 switch (xop) {
5289                 case 0x20:      /* ldf, load fpreg */
5290                     gen_address_mask(dc, cpu_addr);
5291                     cpu_dst_32 = gen_dest_fpr_F(dc);
5292                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5293                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5294                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5295                     break;
5296                 case 0x21:      /* ldfsr, V9 ldxfsr */
5297 #ifdef TARGET_SPARC64
5298                     gen_address_mask(dc, cpu_addr);
5299                     if (rd == 1) {
5300                         TCGv_i64 t64 = tcg_temp_new_i64();
5301                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5302                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5303                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5304                         break;
5305                     }
5306 #endif
5307                     cpu_dst_32 = tcg_temp_new_i32();
5308                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5309                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5310                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5311                     break;
5312                 case 0x22:      /* ldqf, load quad fpreg */
5313                     CHECK_FPU_FEATURE(dc, FLOAT128);
5314                     gen_address_mask(dc, cpu_addr);
5315                     cpu_src1_64 = tcg_temp_new_i64();
5316                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5317                                         MO_TEUQ | MO_ALIGN_4);
5318                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5319                     cpu_src2_64 = tcg_temp_new_i64();
5320                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5321                                         MO_TEUQ | MO_ALIGN_4);
5322                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5323                     break;
5324                 case 0x23:      /* lddf, load double fpreg */
5325                     gen_address_mask(dc, cpu_addr);
5326                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5327                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5328                                         MO_TEUQ | MO_ALIGN_4);
5329                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5330                     break;
5331                 default:
5332                     goto illegal_insn;
5333                 }
5334             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5335                        xop == 0xe || xop == 0x1e) {
5336                 TCGv cpu_val = gen_load_gpr(dc, rd);
5337 
5338                 switch (xop) {
5339                 case 0x4: /* st, store word */
5340                     gen_address_mask(dc, cpu_addr);
5341                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5342                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5343                     break;
5344                 case 0x5: /* stb, store byte */
5345                     gen_address_mask(dc, cpu_addr);
5346                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5347                     break;
5348                 case 0x6: /* sth, store halfword */
5349                     gen_address_mask(dc, cpu_addr);
5350                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5351                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5352                     break;
5353                 case 0x7: /* std, store double word */
5354                     if (rd & 1)
5355                         goto illegal_insn;
5356                     else {
5357                         TCGv_i64 t64;
5358                         TCGv lo;
5359 
5360                         gen_address_mask(dc, cpu_addr);
5361                         lo = gen_load_gpr(dc, rd + 1);
5362                         t64 = tcg_temp_new_i64();
5363                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5364                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5365                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5366                     }
5367                     break;
5368 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5369                 case 0x14: /* sta, V9 stwa, store word alternate */
5370                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5371                     break;
5372                 case 0x15: /* stba, store byte alternate */
5373                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5374                     break;
5375                 case 0x16: /* stha, store halfword alternate */
5376                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5377                     break;
5378                 case 0x17: /* stda, store double word alternate */
5379                     if (rd & 1) {
5380                         goto illegal_insn;
5381                     }
5382                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5383                     break;
5384 #endif
5385 #ifdef TARGET_SPARC64
5386                 case 0x0e: /* V9 stx */
5387                     gen_address_mask(dc, cpu_addr);
5388                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5389                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5390                     break;
5391                 case 0x1e: /* V9 stxa */
5392                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5393                     break;
5394 #endif
5395                 default:
5396                     goto illegal_insn;
5397                 }
5398             } else if (xop > 0x23 && xop < 0x28) {
5399                 if (gen_trap_ifnofpu(dc)) {
5400                     goto jmp_insn;
5401                 }
5402                 switch (xop) {
5403                 case 0x24: /* stf, store fpreg */
5404                     gen_address_mask(dc, cpu_addr);
5405                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5406                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5407                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5408                     break;
5409                 case 0x25: /* stfsr, V9 stxfsr */
5410                     {
5411 #ifdef TARGET_SPARC64
5412                         gen_address_mask(dc, cpu_addr);
5413                         if (rd == 1) {
5414                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5415                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5416                             break;
5417                         }
5418 #endif
5419                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5420                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5421                     }
5422                     break;
5423                 case 0x26:
5424 #ifdef TARGET_SPARC64
5425                     /* V9 stqf, store quad fpreg */
5426                     CHECK_FPU_FEATURE(dc, FLOAT128);
5427                     gen_address_mask(dc, cpu_addr);
5428                     /* ??? While stqf only requires 4-byte alignment, it is
5429                        legal for the cpu to signal the unaligned exception.
5430                        The OS trap handler is then required to fix it up.
5431                        For qemu, this avoids having to probe the second page
5432                        before performing the first write.  */
5433                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5434                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5435                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5436                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5437                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5438                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5439                                         dc->mem_idx, MO_TEUQ);
5440                     break;
5441 #else /* !TARGET_SPARC64 */
5442                     /* stdfq, store floating point queue */
5443 #if defined(CONFIG_USER_ONLY)
5444                     goto illegal_insn;
5445 #else
5446                     if (!supervisor(dc))
5447                         goto priv_insn;
5448                     if (gen_trap_ifnofpu(dc)) {
5449                         goto jmp_insn;
5450                     }
5451                     goto nfq_insn;
5452 #endif
5453 #endif
5454                 case 0x27: /* stdf, store double fpreg */
5455                     gen_address_mask(dc, cpu_addr);
5456                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5457                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5458                                         MO_TEUQ | MO_ALIGN_4);
5459                     break;
5460                 default:
5461                     goto illegal_insn;
5462                 }
5463             } else if (xop > 0x33 && xop < 0x3f) {
5464                 switch (xop) {
5465 #ifdef TARGET_SPARC64
5466                 case 0x34: /* V9 stfa */
5467                     if (gen_trap_ifnofpu(dc)) {
5468                         goto jmp_insn;
5469                     }
5470                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5471                     break;
5472                 case 0x36: /* V9 stqfa */
5473                     {
5474                         CHECK_FPU_FEATURE(dc, FLOAT128);
5475                         if (gen_trap_ifnofpu(dc)) {
5476                             goto jmp_insn;
5477                         }
5478                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5479                     }
5480                     break;
5481                 case 0x37: /* V9 stdfa */
5482                     if (gen_trap_ifnofpu(dc)) {
5483                         goto jmp_insn;
5484                     }
5485                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5486                     break;
5487                 case 0x3e: /* V9 casxa */
5488                     rs2 = GET_FIELD(insn, 27, 31);
5489                     cpu_src2 = gen_load_gpr(dc, rs2);
5490                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5491                     break;
5492 #else
5493                 case 0x34: /* stc */
5494                 case 0x35: /* stcsr */
5495                 case 0x36: /* stdcq */
5496                 case 0x37: /* stdc */
5497                     goto ncp_insn;
5498 #endif
5499 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5500                 case 0x3c: /* V9 or LEON3 casa */
5501 #ifndef TARGET_SPARC64
5502                     CHECK_IU_FEATURE(dc, CASA);
5503 #endif
5504                     rs2 = GET_FIELD(insn, 27, 31);
5505                     cpu_src2 = gen_load_gpr(dc, rs2);
5506                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5507                     break;
5508 #endif
5509                 default:
5510                     goto illegal_insn;
5511                 }
5512             } else {
5513                 goto illegal_insn;
5514             }
5515         }
5516         break;
5517     }
5518     /* default case for non jump instructions */
5519     if (dc->npc == DYNAMIC_PC) {
5520         dc->pc = DYNAMIC_PC;
5521         gen_op_next_insn();
5522     } else if (dc->npc == JUMP_PC) {
5523         /* we can do a static jump */
5524         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5525         dc->base.is_jmp = DISAS_NORETURN;
5526     } else {
5527         dc->pc = dc->npc;
5528         dc->npc = dc->npc + 4;
5529     }
5530  jmp_insn:
5531     return;
5532  illegal_insn:
5533     gen_exception(dc, TT_ILL_INSN);
5534     return;
5535  unimp_flush:
5536     gen_exception(dc, TT_UNIMP_FLUSH);
5537     return;
5538 #if !defined(CONFIG_USER_ONLY)
5539  priv_insn:
5540     gen_exception(dc, TT_PRIV_INSN);
5541     return;
5542 #endif
5543  nfpu_insn:
5544     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5545     return;
5546 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5547  nfq_insn:
5548     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5549     return;
5550 #endif
5551 #ifndef TARGET_SPARC64
5552  ncp_insn:
5553     gen_exception(dc, TT_NCP_INSN);
5554     return;
5555 #endif
5556 }
5557 
5558 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5559 {
5560     DisasContext *dc = container_of(dcbase, DisasContext, base);
5561     CPUSPARCState *env = cs->env_ptr;
5562     int bound;
5563 
5564     dc->pc = dc->base.pc_first;
5565     dc->npc = (target_ulong)dc->base.tb->cs_base;
5566     dc->cc_op = CC_OP_DYNAMIC;
5567     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5568     dc->def = &env->def;
5569     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5570     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5571 #ifndef CONFIG_USER_ONLY
5572     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5573 #endif
5574 #ifdef TARGET_SPARC64
5575     dc->fprs_dirty = 0;
5576     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5577 #ifndef CONFIG_USER_ONLY
5578     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5579 #endif
5580 #endif
5581     /*
5582      * if we reach a page boundary, we stop generation so that the
5583      * PC of a TT_TFAULT exception is always in the right page
5584      */
5585     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5586     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5587 }
5588 
5589 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5590 {
5591 }
5592 
5593 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5594 {
5595     DisasContext *dc = container_of(dcbase, DisasContext, base);
5596 
5597     if (dc->npc & JUMP_PC) {
5598         assert(dc->jump_pc[1] == dc->pc + 4);
5599         tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5600     } else {
5601         tcg_gen_insn_start(dc->pc, dc->npc);
5602     }
5603 }
5604 
5605 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5606 {
5607     DisasContext *dc = container_of(dcbase, DisasContext, base);
5608     CPUSPARCState *env = cs->env_ptr;
5609     unsigned int insn;
5610 
5611     insn = translator_ldl(env, &dc->base, dc->pc);
5612     dc->base.pc_next += 4;
5613     disas_sparc_insn(dc, insn);
5614 
5615     if (dc->base.is_jmp == DISAS_NORETURN) {
5616         return;
5617     }
5618     if (dc->pc != dc->base.pc_next) {
5619         dc->base.is_jmp = DISAS_TOO_MANY;
5620     }
5621 }
5622 
5623 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5624 {
5625     DisasContext *dc = container_of(dcbase, DisasContext, base);
5626 
5627     switch (dc->base.is_jmp) {
5628     case DISAS_NEXT:
5629     case DISAS_TOO_MANY:
5630         if (dc->pc != DYNAMIC_PC &&
5631             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5632             /* static PC and NPC: we can use direct chaining */
5633             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5634         } else {
5635             if (dc->pc != DYNAMIC_PC) {
5636                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5637             }
5638             save_npc(dc);
5639             tcg_gen_exit_tb(NULL, 0);
5640         }
5641         break;
5642 
5643     case DISAS_NORETURN:
5644        break;
5645 
5646     case DISAS_EXIT:
5647         /* Exit TB */
5648         save_state(dc);
5649         tcg_gen_exit_tb(NULL, 0);
5650         break;
5651 
5652     default:
5653         g_assert_not_reached();
5654     }
5655 }
5656 
5657 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5658                                CPUState *cpu, FILE *logfile)
5659 {
5660     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5661     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5662 }
5663 
5664 static const TranslatorOps sparc_tr_ops = {
5665     .init_disas_context = sparc_tr_init_disas_context,
5666     .tb_start           = sparc_tr_tb_start,
5667     .insn_start         = sparc_tr_insn_start,
5668     .translate_insn     = sparc_tr_translate_insn,
5669     .tb_stop            = sparc_tr_tb_stop,
5670     .disas_log          = sparc_tr_disas_log,
5671 };
5672 
5673 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5674                            target_ulong pc, void *host_pc)
5675 {
5676     DisasContext dc = {};
5677 
5678     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5679 }
5680 
5681 void sparc_tcg_init(void)
5682 {
5683     static const char gregnames[32][4] = {
5684         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5685         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5686         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5687         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5688     };
5689     static const char fregnames[32][4] = {
5690         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5691         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5692         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5693         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5694     };
5695 
5696     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5697 #ifdef TARGET_SPARC64
5698         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5699         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5700 #else
5701         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5702 #endif
5703         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5704         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5705     };
5706 
5707     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5708 #ifdef TARGET_SPARC64
5709         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5710         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5711         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5712         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5713           "hstick_cmpr" },
5714         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5715         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5716         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5717         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5718         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5719 #endif
5720         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5721         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5722         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5723         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5724         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5725         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5726         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5727         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5728 #ifndef CONFIG_USER_ONLY
5729         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5730 #endif
5731     };
5732 
5733     unsigned int i;
5734 
5735     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5736                                          offsetof(CPUSPARCState, regwptr),
5737                                          "regwptr");
5738 
5739     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5740         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5741     }
5742 
5743     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5744         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5745     }
5746 
5747     cpu_regs[0] = NULL;
5748     for (i = 1; i < 8; ++i) {
5749         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5750                                          offsetof(CPUSPARCState, gregs[i]),
5751                                          gregnames[i]);
5752     }
5753 
5754     for (i = 8; i < 32; ++i) {
5755         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5756                                          (i - 8) * sizeof(target_ulong),
5757                                          gregnames[i]);
5758     }
5759 
5760     for (i = 0; i < TARGET_DPREGS; i++) {
5761         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5762                                             offsetof(CPUSPARCState, fpr[i]),
5763                                             fregnames[i]);
5764     }
5765 }
5766 
5767 void sparc_restore_state_to_opc(CPUState *cs,
5768                                 const TranslationBlock *tb,
5769                                 const uint64_t *data)
5770 {
5771     SPARCCPU *cpu = SPARC_CPU(cs);
5772     CPUSPARCState *env = &cpu->env;
5773     target_ulong pc = data[0];
5774     target_ulong npc = data[1];
5775 
5776     env->pc = pc;
5777     if (npc == DYNAMIC_PC) {
5778         /* dynamic NPC: already stored */
5779     } else if (npc & JUMP_PC) {
5780         /* jump PC: use 'cond' and the jump targets of the translation */
5781         if (env->cond) {
5782             env->npc = npc & ~3;
5783         } else {
5784             env->npc = pc + 4;
5785         }
5786     } else {
5787         env->npc = npc;
5788     }
5789 }
5790