xref: /openbmc/qemu/target/sparc/translate.c (revision 08d64e0d)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 
30 #include "exec/helper-gen.h"
31 
32 #include "trace-tcg.h"
33 #include "exec/log.h"
34 #include "asi.h"
35 
36 
37 #define DEBUG_DISAS
38 
39 #define DYNAMIC_PC  1 /* dynamic pc value */
40 #define JUMP_PC     2 /* dynamic pc value which takes only two values
41                          according to jump_pc[T2] */
42 
43 /* global register indexes */
44 static TCGv_env cpu_env;
45 static TCGv_ptr cpu_regwptr;
46 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
47 static TCGv_i32 cpu_cc_op;
48 static TCGv_i32 cpu_psr;
49 static TCGv cpu_fsr, cpu_pc, cpu_npc;
50 static TCGv cpu_regs[32];
51 static TCGv cpu_y;
52 #ifndef CONFIG_USER_ONLY
53 static TCGv cpu_tbr;
54 #endif
55 static TCGv cpu_cond;
56 #ifdef TARGET_SPARC64
57 static TCGv_i32 cpu_xcc, cpu_fprs;
58 static TCGv cpu_gsr;
59 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
60 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
61 #else
62 static TCGv cpu_wim;
63 #endif
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
66 
67 #include "exec/gen-icount.h"
68 
69 typedef struct DisasContext {
70     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
71     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
72     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
73     int is_br;
74     int mem_idx;
75     bool fpu_enabled;
76     bool address_mask_32bit;
77     bool singlestep;
78 #ifndef CONFIG_USER_ONLY
79     bool supervisor;
80 #ifdef TARGET_SPARC64
81     bool hypervisor;
82 #endif
83 #endif
84 
85     uint32_t cc_op;  /* current CC operation */
86     struct TranslationBlock *tb;
87     sparc_def_t *def;
88     TCGv_i32 t32[3];
89     TCGv ttl[5];
90     int n_t32;
91     int n_ttl;
92 #ifdef TARGET_SPARC64
93     int fprs_dirty;
94     int asi;
95 #endif
96 } DisasContext;
97 
98 typedef struct {
99     TCGCond cond;
100     bool is_bool;
101     bool g1, g2;
102     TCGv c1, c2;
103 } DisasCompare;
104 
105 // This function uses non-native bit order
106 #define GET_FIELD(X, FROM, TO)                                  \
107     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
108 
109 // This function uses the order in the manuals, i.e. bit 0 is 2^0
110 #define GET_FIELD_SP(X, FROM, TO)               \
111     GET_FIELD(X, 31 - (TO), 31 - (FROM))
112 
113 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
114 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
115 
116 #ifdef TARGET_SPARC64
117 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
118 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
119 #else
120 #define DFPREG(r) (r & 0x1e)
121 #define QFPREG(r) (r & 0x1c)
122 #endif
123 
124 #define UA2005_HTRAP_MASK 0xff
125 #define V8_TRAP_MASK 0x7f
126 
127 static int sign_extend(int x, int len)
128 {
129     len = 32 - len;
130     return (x << len) >> len;
131 }
132 
133 #define IS_IMM (insn & (1<<13))
134 
135 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
136 {
137     TCGv_i32 t;
138     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
139     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
140     return t;
141 }
142 
143 static inline TCGv get_temp_tl(DisasContext *dc)
144 {
145     TCGv t;
146     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
147     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
148     return t;
149 }
150 
151 static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
152 {
153 #if defined(TARGET_SPARC64)
154     int bit = (rd < 32) ? 1 : 2;
155     /* If we know we've already set this bit within the TB,
156        we can avoid setting it again.  */
157     if (!(dc->fprs_dirty & bit)) {
158         dc->fprs_dirty |= bit;
159         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
160     }
161 #endif
162 }
163 
164 /* floating point registers moves */
165 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
166 {
167 #if TCG_TARGET_REG_BITS == 32
168     if (src & 1) {
169         return TCGV_LOW(cpu_fpr[src / 2]);
170     } else {
171         return TCGV_HIGH(cpu_fpr[src / 2]);
172     }
173 #else
174     if (src & 1) {
175         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
176     } else {
177         TCGv_i32 ret = get_temp_i32(dc);
178         TCGv_i64 t = tcg_temp_new_i64();
179 
180         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
181         tcg_gen_extrl_i64_i32(ret, t);
182         tcg_temp_free_i64(t);
183 
184         return ret;
185     }
186 #endif
187 }
188 
189 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
190 {
191 #if TCG_TARGET_REG_BITS == 32
192     if (dst & 1) {
193         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
194     } else {
195         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
196     }
197 #else
198     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
199     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
200                         (dst & 1 ? 0 : 32), 32);
201 #endif
202     gen_update_fprs_dirty(dc, dst);
203 }
204 
205 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
206 {
207     return get_temp_i32(dc);
208 }
209 
210 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
211 {
212     src = DFPREG(src);
213     return cpu_fpr[src / 2];
214 }
215 
216 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
217 {
218     dst = DFPREG(dst);
219     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
220     gen_update_fprs_dirty(dc, dst);
221 }
222 
223 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
224 {
225     return cpu_fpr[DFPREG(dst) / 2];
226 }
227 
228 static void gen_op_load_fpr_QT0(unsigned int src)
229 {
230     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
231                    offsetof(CPU_QuadU, ll.upper));
232     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
233                    offsetof(CPU_QuadU, ll.lower));
234 }
235 
236 static void gen_op_load_fpr_QT1(unsigned int src)
237 {
238     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
239                    offsetof(CPU_QuadU, ll.upper));
240     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
241                    offsetof(CPU_QuadU, ll.lower));
242 }
243 
244 static void gen_op_store_QT0_fpr(unsigned int dst)
245 {
246     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
247                    offsetof(CPU_QuadU, ll.upper));
248     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
249                    offsetof(CPU_QuadU, ll.lower));
250 }
251 
252 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
253                             TCGv_i64 v1, TCGv_i64 v2)
254 {
255     dst = QFPREG(dst);
256 
257     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
258     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
259     gen_update_fprs_dirty(dc, dst);
260 }
261 
262 #ifdef TARGET_SPARC64
263 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
264 {
265     src = QFPREG(src);
266     return cpu_fpr[src / 2];
267 }
268 
269 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
270 {
271     src = QFPREG(src);
272     return cpu_fpr[src / 2 + 1];
273 }
274 
275 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
276 {
277     rd = QFPREG(rd);
278     rs = QFPREG(rs);
279 
280     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
281     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
282     gen_update_fprs_dirty(dc, rd);
283 }
284 #endif
285 
286 /* moves */
287 #ifdef CONFIG_USER_ONLY
288 #define supervisor(dc) 0
289 #ifdef TARGET_SPARC64
290 #define hypervisor(dc) 0
291 #endif
292 #else
293 #ifdef TARGET_SPARC64
294 #define hypervisor(dc) (dc->hypervisor)
295 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
296 #else
297 #define supervisor(dc) (dc->supervisor)
298 #endif
299 #endif
300 
301 #ifdef TARGET_SPARC64
302 #ifndef TARGET_ABI32
303 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
304 #else
305 #define AM_CHECK(dc) (1)
306 #endif
307 #endif
308 
309 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
310 {
311 #ifdef TARGET_SPARC64
312     if (AM_CHECK(dc))
313         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
314 #endif
315 }
316 
317 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
318 {
319     if (reg > 0) {
320         assert(reg < 32);
321         return cpu_regs[reg];
322     } else {
323         TCGv t = get_temp_tl(dc);
324         tcg_gen_movi_tl(t, 0);
325         return t;
326     }
327 }
328 
329 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
330 {
331     if (reg > 0) {
332         assert(reg < 32);
333         tcg_gen_mov_tl(cpu_regs[reg], v);
334     }
335 }
336 
337 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
338 {
339     if (reg > 0) {
340         assert(reg < 32);
341         return cpu_regs[reg];
342     } else {
343         return get_temp_tl(dc);
344     }
345 }
346 
347 static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
348                                target_ulong npc)
349 {
350     if (unlikely(s->singlestep)) {
351         return false;
352     }
353 
354 #ifndef CONFIG_USER_ONLY
355     return (pc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK) &&
356            (npc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK);
357 #else
358     return true;
359 #endif
360 }
361 
362 static inline void gen_goto_tb(DisasContext *s, int tb_num,
363                                target_ulong pc, target_ulong npc)
364 {
365     if (use_goto_tb(s, pc, npc))  {
366         /* jump to same page: we can use a direct jump */
367         tcg_gen_goto_tb(tb_num);
368         tcg_gen_movi_tl(cpu_pc, pc);
369         tcg_gen_movi_tl(cpu_npc, npc);
370         tcg_gen_exit_tb((uintptr_t)s->tb + tb_num);
371     } else {
372         /* jump to another page: currently not optimized */
373         tcg_gen_movi_tl(cpu_pc, pc);
374         tcg_gen_movi_tl(cpu_npc, npc);
375         tcg_gen_exit_tb(0);
376     }
377 }
378 
379 // XXX suboptimal
380 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
381 {
382     tcg_gen_extu_i32_tl(reg, src);
383     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
384 }
385 
386 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
387 {
388     tcg_gen_extu_i32_tl(reg, src);
389     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
390 }
391 
392 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
393 {
394     tcg_gen_extu_i32_tl(reg, src);
395     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
396 }
397 
398 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
399 {
400     tcg_gen_extu_i32_tl(reg, src);
401     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
402 }
403 
404 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
405 {
406     tcg_gen_mov_tl(cpu_cc_src, src1);
407     tcg_gen_mov_tl(cpu_cc_src2, src2);
408     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
409     tcg_gen_mov_tl(dst, cpu_cc_dst);
410 }
411 
412 static TCGv_i32 gen_add32_carry32(void)
413 {
414     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
415 
416     /* Carry is computed from a previous add: (dst < src)  */
417 #if TARGET_LONG_BITS == 64
418     cc_src1_32 = tcg_temp_new_i32();
419     cc_src2_32 = tcg_temp_new_i32();
420     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
421     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
422 #else
423     cc_src1_32 = cpu_cc_dst;
424     cc_src2_32 = cpu_cc_src;
425 #endif
426 
427     carry_32 = tcg_temp_new_i32();
428     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
429 
430 #if TARGET_LONG_BITS == 64
431     tcg_temp_free_i32(cc_src1_32);
432     tcg_temp_free_i32(cc_src2_32);
433 #endif
434 
435     return carry_32;
436 }
437 
438 static TCGv_i32 gen_sub32_carry32(void)
439 {
440     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
441 
442     /* Carry is computed from a previous borrow: (src1 < src2)  */
443 #if TARGET_LONG_BITS == 64
444     cc_src1_32 = tcg_temp_new_i32();
445     cc_src2_32 = tcg_temp_new_i32();
446     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
447     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
448 #else
449     cc_src1_32 = cpu_cc_src;
450     cc_src2_32 = cpu_cc_src2;
451 #endif
452 
453     carry_32 = tcg_temp_new_i32();
454     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
455 
456 #if TARGET_LONG_BITS == 64
457     tcg_temp_free_i32(cc_src1_32);
458     tcg_temp_free_i32(cc_src2_32);
459 #endif
460 
461     return carry_32;
462 }
463 
464 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
465                             TCGv src2, int update_cc)
466 {
467     TCGv_i32 carry_32;
468     TCGv carry;
469 
470     switch (dc->cc_op) {
471     case CC_OP_DIV:
472     case CC_OP_LOGIC:
473         /* Carry is known to be zero.  Fall back to plain ADD.  */
474         if (update_cc) {
475             gen_op_add_cc(dst, src1, src2);
476         } else {
477             tcg_gen_add_tl(dst, src1, src2);
478         }
479         return;
480 
481     case CC_OP_ADD:
482     case CC_OP_TADD:
483     case CC_OP_TADDTV:
484         if (TARGET_LONG_BITS == 32) {
485             /* We can re-use the host's hardware carry generation by using
486                an ADD2 opcode.  We discard the low part of the output.
487                Ideally we'd combine this operation with the add that
488                generated the carry in the first place.  */
489             carry = tcg_temp_new();
490             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
491             tcg_temp_free(carry);
492             goto add_done;
493         }
494         carry_32 = gen_add32_carry32();
495         break;
496 
497     case CC_OP_SUB:
498     case CC_OP_TSUB:
499     case CC_OP_TSUBTV:
500         carry_32 = gen_sub32_carry32();
501         break;
502 
503     default:
504         /* We need external help to produce the carry.  */
505         carry_32 = tcg_temp_new_i32();
506         gen_helper_compute_C_icc(carry_32, cpu_env);
507         break;
508     }
509 
510 #if TARGET_LONG_BITS == 64
511     carry = tcg_temp_new();
512     tcg_gen_extu_i32_i64(carry, carry_32);
513 #else
514     carry = carry_32;
515 #endif
516 
517     tcg_gen_add_tl(dst, src1, src2);
518     tcg_gen_add_tl(dst, dst, carry);
519 
520     tcg_temp_free_i32(carry_32);
521 #if TARGET_LONG_BITS == 64
522     tcg_temp_free(carry);
523 #endif
524 
525  add_done:
526     if (update_cc) {
527         tcg_gen_mov_tl(cpu_cc_src, src1);
528         tcg_gen_mov_tl(cpu_cc_src2, src2);
529         tcg_gen_mov_tl(cpu_cc_dst, dst);
530         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
531         dc->cc_op = CC_OP_ADDX;
532     }
533 }
534 
535 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
536 {
537     tcg_gen_mov_tl(cpu_cc_src, src1);
538     tcg_gen_mov_tl(cpu_cc_src2, src2);
539     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
540     tcg_gen_mov_tl(dst, cpu_cc_dst);
541 }
542 
543 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
544                             TCGv src2, int update_cc)
545 {
546     TCGv_i32 carry_32;
547     TCGv carry;
548 
549     switch (dc->cc_op) {
550     case CC_OP_DIV:
551     case CC_OP_LOGIC:
552         /* Carry is known to be zero.  Fall back to plain SUB.  */
553         if (update_cc) {
554             gen_op_sub_cc(dst, src1, src2);
555         } else {
556             tcg_gen_sub_tl(dst, src1, src2);
557         }
558         return;
559 
560     case CC_OP_ADD:
561     case CC_OP_TADD:
562     case CC_OP_TADDTV:
563         carry_32 = gen_add32_carry32();
564         break;
565 
566     case CC_OP_SUB:
567     case CC_OP_TSUB:
568     case CC_OP_TSUBTV:
569         if (TARGET_LONG_BITS == 32) {
570             /* We can re-use the host's hardware carry generation by using
571                a SUB2 opcode.  We discard the low part of the output.
572                Ideally we'd combine this operation with the add that
573                generated the carry in the first place.  */
574             carry = tcg_temp_new();
575             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
576             tcg_temp_free(carry);
577             goto sub_done;
578         }
579         carry_32 = gen_sub32_carry32();
580         break;
581 
582     default:
583         /* We need external help to produce the carry.  */
584         carry_32 = tcg_temp_new_i32();
585         gen_helper_compute_C_icc(carry_32, cpu_env);
586         break;
587     }
588 
589 #if TARGET_LONG_BITS == 64
590     carry = tcg_temp_new();
591     tcg_gen_extu_i32_i64(carry, carry_32);
592 #else
593     carry = carry_32;
594 #endif
595 
596     tcg_gen_sub_tl(dst, src1, src2);
597     tcg_gen_sub_tl(dst, dst, carry);
598 
599     tcg_temp_free_i32(carry_32);
600 #if TARGET_LONG_BITS == 64
601     tcg_temp_free(carry);
602 #endif
603 
604  sub_done:
605     if (update_cc) {
606         tcg_gen_mov_tl(cpu_cc_src, src1);
607         tcg_gen_mov_tl(cpu_cc_src2, src2);
608         tcg_gen_mov_tl(cpu_cc_dst, dst);
609         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
610         dc->cc_op = CC_OP_SUBX;
611     }
612 }
613 
614 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
615 {
616     TCGv r_temp, zero, t0;
617 
618     r_temp = tcg_temp_new();
619     t0 = tcg_temp_new();
620 
621     /* old op:
622     if (!(env->y & 1))
623         T1 = 0;
624     */
625     zero = tcg_const_tl(0);
626     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
627     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
628     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
629     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
630                        zero, cpu_cc_src2);
631     tcg_temp_free(zero);
632 
633     // b2 = T0 & 1;
634     // env->y = (b2 << 31) | (env->y >> 1);
635     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
636     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
637 
638     // b1 = N ^ V;
639     gen_mov_reg_N(t0, cpu_psr);
640     gen_mov_reg_V(r_temp, cpu_psr);
641     tcg_gen_xor_tl(t0, t0, r_temp);
642     tcg_temp_free(r_temp);
643 
644     // T0 = (b1 << 31) | (T0 >> 1);
645     // src1 = T0;
646     tcg_gen_shli_tl(t0, t0, 31);
647     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
648     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
649     tcg_temp_free(t0);
650 
651     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
652 
653     tcg_gen_mov_tl(dst, cpu_cc_dst);
654 }
655 
656 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
657 {
658 #if TARGET_LONG_BITS == 32
659     if (sign_ext) {
660         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
661     } else {
662         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
663     }
664 #else
665     TCGv t0 = tcg_temp_new_i64();
666     TCGv t1 = tcg_temp_new_i64();
667 
668     if (sign_ext) {
669         tcg_gen_ext32s_i64(t0, src1);
670         tcg_gen_ext32s_i64(t1, src2);
671     } else {
672         tcg_gen_ext32u_i64(t0, src1);
673         tcg_gen_ext32u_i64(t1, src2);
674     }
675 
676     tcg_gen_mul_i64(dst, t0, t1);
677     tcg_temp_free(t0);
678     tcg_temp_free(t1);
679 
680     tcg_gen_shri_i64(cpu_y, dst, 32);
681 #endif
682 }
683 
684 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
685 {
686     /* zero-extend truncated operands before multiplication */
687     gen_op_multiply(dst, src1, src2, 0);
688 }
689 
690 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
691 {
692     /* sign-extend truncated operands before multiplication */
693     gen_op_multiply(dst, src1, src2, 1);
694 }
695 
696 // 1
697 static inline void gen_op_eval_ba(TCGv dst)
698 {
699     tcg_gen_movi_tl(dst, 1);
700 }
701 
702 // Z
703 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
704 {
705     gen_mov_reg_Z(dst, src);
706 }
707 
708 // Z | (N ^ V)
709 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
710 {
711     TCGv t0 = tcg_temp_new();
712     gen_mov_reg_N(t0, src);
713     gen_mov_reg_V(dst, src);
714     tcg_gen_xor_tl(dst, dst, t0);
715     gen_mov_reg_Z(t0, src);
716     tcg_gen_or_tl(dst, dst, t0);
717     tcg_temp_free(t0);
718 }
719 
720 // N ^ V
721 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
722 {
723     TCGv t0 = tcg_temp_new();
724     gen_mov_reg_V(t0, src);
725     gen_mov_reg_N(dst, src);
726     tcg_gen_xor_tl(dst, dst, t0);
727     tcg_temp_free(t0);
728 }
729 
730 // C | Z
731 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
732 {
733     TCGv t0 = tcg_temp_new();
734     gen_mov_reg_Z(t0, src);
735     gen_mov_reg_C(dst, src);
736     tcg_gen_or_tl(dst, dst, t0);
737     tcg_temp_free(t0);
738 }
739 
740 // C
741 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
742 {
743     gen_mov_reg_C(dst, src);
744 }
745 
746 // V
747 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
748 {
749     gen_mov_reg_V(dst, src);
750 }
751 
752 // 0
753 static inline void gen_op_eval_bn(TCGv dst)
754 {
755     tcg_gen_movi_tl(dst, 0);
756 }
757 
758 // N
759 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
760 {
761     gen_mov_reg_N(dst, src);
762 }
763 
764 // !Z
765 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
766 {
767     gen_mov_reg_Z(dst, src);
768     tcg_gen_xori_tl(dst, dst, 0x1);
769 }
770 
771 // !(Z | (N ^ V))
772 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
773 {
774     gen_op_eval_ble(dst, src);
775     tcg_gen_xori_tl(dst, dst, 0x1);
776 }
777 
778 // !(N ^ V)
779 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
780 {
781     gen_op_eval_bl(dst, src);
782     tcg_gen_xori_tl(dst, dst, 0x1);
783 }
784 
785 // !(C | Z)
786 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
787 {
788     gen_op_eval_bleu(dst, src);
789     tcg_gen_xori_tl(dst, dst, 0x1);
790 }
791 
792 // !C
793 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
794 {
795     gen_mov_reg_C(dst, src);
796     tcg_gen_xori_tl(dst, dst, 0x1);
797 }
798 
799 // !N
800 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
801 {
802     gen_mov_reg_N(dst, src);
803     tcg_gen_xori_tl(dst, dst, 0x1);
804 }
805 
806 // !V
807 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
808 {
809     gen_mov_reg_V(dst, src);
810     tcg_gen_xori_tl(dst, dst, 0x1);
811 }
812 
813 /*
814   FPSR bit field FCC1 | FCC0:
815    0 =
816    1 <
817    2 >
818    3 unordered
819 */
820 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
821                                     unsigned int fcc_offset)
822 {
823     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
824     tcg_gen_andi_tl(reg, reg, 0x1);
825 }
826 
827 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
828                                     unsigned int fcc_offset)
829 {
830     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
831     tcg_gen_andi_tl(reg, reg, 0x1);
832 }
833 
834 // !0: FCC0 | FCC1
835 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
836                                     unsigned int fcc_offset)
837 {
838     TCGv t0 = tcg_temp_new();
839     gen_mov_reg_FCC0(dst, src, fcc_offset);
840     gen_mov_reg_FCC1(t0, src, fcc_offset);
841     tcg_gen_or_tl(dst, dst, t0);
842     tcg_temp_free(t0);
843 }
844 
845 // 1 or 2: FCC0 ^ FCC1
846 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
847                                     unsigned int fcc_offset)
848 {
849     TCGv t0 = tcg_temp_new();
850     gen_mov_reg_FCC0(dst, src, fcc_offset);
851     gen_mov_reg_FCC1(t0, src, fcc_offset);
852     tcg_gen_xor_tl(dst, dst, t0);
853     tcg_temp_free(t0);
854 }
855 
856 // 1 or 3: FCC0
857 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
858                                     unsigned int fcc_offset)
859 {
860     gen_mov_reg_FCC0(dst, src, fcc_offset);
861 }
862 
863 // 1: FCC0 & !FCC1
864 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
865                                     unsigned int fcc_offset)
866 {
867     TCGv t0 = tcg_temp_new();
868     gen_mov_reg_FCC0(dst, src, fcc_offset);
869     gen_mov_reg_FCC1(t0, src, fcc_offset);
870     tcg_gen_andc_tl(dst, dst, t0);
871     tcg_temp_free(t0);
872 }
873 
874 // 2 or 3: FCC1
875 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
876                                     unsigned int fcc_offset)
877 {
878     gen_mov_reg_FCC1(dst, src, fcc_offset);
879 }
880 
881 // 2: !FCC0 & FCC1
882 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
883                                     unsigned int fcc_offset)
884 {
885     TCGv t0 = tcg_temp_new();
886     gen_mov_reg_FCC0(dst, src, fcc_offset);
887     gen_mov_reg_FCC1(t0, src, fcc_offset);
888     tcg_gen_andc_tl(dst, t0, dst);
889     tcg_temp_free(t0);
890 }
891 
892 // 3: FCC0 & FCC1
893 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
894                                     unsigned int fcc_offset)
895 {
896     TCGv t0 = tcg_temp_new();
897     gen_mov_reg_FCC0(dst, src, fcc_offset);
898     gen_mov_reg_FCC1(t0, src, fcc_offset);
899     tcg_gen_and_tl(dst, dst, t0);
900     tcg_temp_free(t0);
901 }
902 
903 // 0: !(FCC0 | FCC1)
904 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
905                                     unsigned int fcc_offset)
906 {
907     TCGv t0 = tcg_temp_new();
908     gen_mov_reg_FCC0(dst, src, fcc_offset);
909     gen_mov_reg_FCC1(t0, src, fcc_offset);
910     tcg_gen_or_tl(dst, dst, t0);
911     tcg_gen_xori_tl(dst, dst, 0x1);
912     tcg_temp_free(t0);
913 }
914 
915 // 0 or 3: !(FCC0 ^ FCC1)
916 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
917                                     unsigned int fcc_offset)
918 {
919     TCGv t0 = tcg_temp_new();
920     gen_mov_reg_FCC0(dst, src, fcc_offset);
921     gen_mov_reg_FCC1(t0, src, fcc_offset);
922     tcg_gen_xor_tl(dst, dst, t0);
923     tcg_gen_xori_tl(dst, dst, 0x1);
924     tcg_temp_free(t0);
925 }
926 
927 // 0 or 2: !FCC0
928 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
929                                     unsigned int fcc_offset)
930 {
931     gen_mov_reg_FCC0(dst, src, fcc_offset);
932     tcg_gen_xori_tl(dst, dst, 0x1);
933 }
934 
935 // !1: !(FCC0 & !FCC1)
936 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
937                                     unsigned int fcc_offset)
938 {
939     TCGv t0 = tcg_temp_new();
940     gen_mov_reg_FCC0(dst, src, fcc_offset);
941     gen_mov_reg_FCC1(t0, src, fcc_offset);
942     tcg_gen_andc_tl(dst, dst, t0);
943     tcg_gen_xori_tl(dst, dst, 0x1);
944     tcg_temp_free(t0);
945 }
946 
947 // 0 or 1: !FCC1
948 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
949                                     unsigned int fcc_offset)
950 {
951     gen_mov_reg_FCC1(dst, src, fcc_offset);
952     tcg_gen_xori_tl(dst, dst, 0x1);
953 }
954 
955 // !2: !(!FCC0 & FCC1)
956 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
957                                     unsigned int fcc_offset)
958 {
959     TCGv t0 = tcg_temp_new();
960     gen_mov_reg_FCC0(dst, src, fcc_offset);
961     gen_mov_reg_FCC1(t0, src, fcc_offset);
962     tcg_gen_andc_tl(dst, t0, dst);
963     tcg_gen_xori_tl(dst, dst, 0x1);
964     tcg_temp_free(t0);
965 }
966 
967 // !3: !(FCC0 & FCC1)
968 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
969                                     unsigned int fcc_offset)
970 {
971     TCGv t0 = tcg_temp_new();
972     gen_mov_reg_FCC0(dst, src, fcc_offset);
973     gen_mov_reg_FCC1(t0, src, fcc_offset);
974     tcg_gen_and_tl(dst, dst, t0);
975     tcg_gen_xori_tl(dst, dst, 0x1);
976     tcg_temp_free(t0);
977 }
978 
979 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
980                                target_ulong pc2, TCGv r_cond)
981 {
982     TCGLabel *l1 = gen_new_label();
983 
984     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
985 
986     gen_goto_tb(dc, 0, pc1, pc1 + 4);
987 
988     gen_set_label(l1);
989     gen_goto_tb(dc, 1, pc2, pc2 + 4);
990 }
991 
992 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
993 {
994     TCGLabel *l1 = gen_new_label();
995     target_ulong npc = dc->npc;
996 
997     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
998 
999     gen_goto_tb(dc, 0, npc, pc1);
1000 
1001     gen_set_label(l1);
1002     gen_goto_tb(dc, 1, npc + 4, npc + 8);
1003 
1004     dc->is_br = 1;
1005 }
1006 
1007 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
1008 {
1009     target_ulong npc = dc->npc;
1010 
1011     if (likely(npc != DYNAMIC_PC)) {
1012         dc->pc = npc;
1013         dc->jump_pc[0] = pc1;
1014         dc->jump_pc[1] = npc + 4;
1015         dc->npc = JUMP_PC;
1016     } else {
1017         TCGv t, z;
1018 
1019         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1020 
1021         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1022         t = tcg_const_tl(pc1);
1023         z = tcg_const_tl(0);
1024         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
1025         tcg_temp_free(t);
1026         tcg_temp_free(z);
1027 
1028         dc->pc = DYNAMIC_PC;
1029     }
1030 }
1031 
1032 static inline void gen_generic_branch(DisasContext *dc)
1033 {
1034     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1035     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1036     TCGv zero = tcg_const_tl(0);
1037 
1038     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1039 
1040     tcg_temp_free(npc0);
1041     tcg_temp_free(npc1);
1042     tcg_temp_free(zero);
1043 }
1044 
1045 /* call this function before using the condition register as it may
1046    have been set for a jump */
1047 static inline void flush_cond(DisasContext *dc)
1048 {
1049     if (dc->npc == JUMP_PC) {
1050         gen_generic_branch(dc);
1051         dc->npc = DYNAMIC_PC;
1052     }
1053 }
1054 
1055 static inline void save_npc(DisasContext *dc)
1056 {
1057     if (dc->npc == JUMP_PC) {
1058         gen_generic_branch(dc);
1059         dc->npc = DYNAMIC_PC;
1060     } else if (dc->npc != DYNAMIC_PC) {
1061         tcg_gen_movi_tl(cpu_npc, dc->npc);
1062     }
1063 }
1064 
1065 static inline void update_psr(DisasContext *dc)
1066 {
1067     if (dc->cc_op != CC_OP_FLAGS) {
1068         dc->cc_op = CC_OP_FLAGS;
1069         gen_helper_compute_psr(cpu_env);
1070     }
1071 }
1072 
1073 static inline void save_state(DisasContext *dc)
1074 {
1075     tcg_gen_movi_tl(cpu_pc, dc->pc);
1076     save_npc(dc);
1077 }
1078 
1079 static void gen_exception(DisasContext *dc, int which)
1080 {
1081     TCGv_i32 t;
1082 
1083     save_state(dc);
1084     t = tcg_const_i32(which);
1085     gen_helper_raise_exception(cpu_env, t);
1086     tcg_temp_free_i32(t);
1087     dc->is_br = 1;
1088 }
1089 
1090 static void gen_check_align(TCGv addr, int mask)
1091 {
1092     TCGv_i32 r_mask = tcg_const_i32(mask);
1093     gen_helper_check_align(cpu_env, addr, r_mask);
1094     tcg_temp_free_i32(r_mask);
1095 }
1096 
1097 static inline void gen_mov_pc_npc(DisasContext *dc)
1098 {
1099     if (dc->npc == JUMP_PC) {
1100         gen_generic_branch(dc);
1101         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1102         dc->pc = DYNAMIC_PC;
1103     } else if (dc->npc == DYNAMIC_PC) {
1104         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1105         dc->pc = DYNAMIC_PC;
1106     } else {
1107         dc->pc = dc->npc;
1108     }
1109 }
1110 
1111 static inline void gen_op_next_insn(void)
1112 {
1113     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1114     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1115 }
1116 
1117 static void free_compare(DisasCompare *cmp)
1118 {
1119     if (!cmp->g1) {
1120         tcg_temp_free(cmp->c1);
1121     }
1122     if (!cmp->g2) {
1123         tcg_temp_free(cmp->c2);
1124     }
1125 }
1126 
1127 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1128                         DisasContext *dc)
1129 {
1130     static int subcc_cond[16] = {
1131         TCG_COND_NEVER,
1132         TCG_COND_EQ,
1133         TCG_COND_LE,
1134         TCG_COND_LT,
1135         TCG_COND_LEU,
1136         TCG_COND_LTU,
1137         -1, /* neg */
1138         -1, /* overflow */
1139         TCG_COND_ALWAYS,
1140         TCG_COND_NE,
1141         TCG_COND_GT,
1142         TCG_COND_GE,
1143         TCG_COND_GTU,
1144         TCG_COND_GEU,
1145         -1, /* pos */
1146         -1, /* no overflow */
1147     };
1148 
1149     static int logic_cond[16] = {
1150         TCG_COND_NEVER,
1151         TCG_COND_EQ,     /* eq:  Z */
1152         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1153         TCG_COND_LT,     /* lt:  N ^ V -> N */
1154         TCG_COND_EQ,     /* leu: C | Z -> Z */
1155         TCG_COND_NEVER,  /* ltu: C -> 0 */
1156         TCG_COND_LT,     /* neg: N */
1157         TCG_COND_NEVER,  /* vs:  V -> 0 */
1158         TCG_COND_ALWAYS,
1159         TCG_COND_NE,     /* ne:  !Z */
1160         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1161         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1162         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1163         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1164         TCG_COND_GE,     /* pos: !N */
1165         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1166     };
1167 
1168     TCGv_i32 r_src;
1169     TCGv r_dst;
1170 
1171 #ifdef TARGET_SPARC64
1172     if (xcc) {
1173         r_src = cpu_xcc;
1174     } else {
1175         r_src = cpu_psr;
1176     }
1177 #else
1178     r_src = cpu_psr;
1179 #endif
1180 
1181     switch (dc->cc_op) {
1182     case CC_OP_LOGIC:
1183         cmp->cond = logic_cond[cond];
1184     do_compare_dst_0:
1185         cmp->is_bool = false;
1186         cmp->g2 = false;
1187         cmp->c2 = tcg_const_tl(0);
1188 #ifdef TARGET_SPARC64
1189         if (!xcc) {
1190             cmp->g1 = false;
1191             cmp->c1 = tcg_temp_new();
1192             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1193             break;
1194         }
1195 #endif
1196         cmp->g1 = true;
1197         cmp->c1 = cpu_cc_dst;
1198         break;
1199 
1200     case CC_OP_SUB:
1201         switch (cond) {
1202         case 6:  /* neg */
1203         case 14: /* pos */
1204             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1205             goto do_compare_dst_0;
1206 
1207         case 7: /* overflow */
1208         case 15: /* !overflow */
1209             goto do_dynamic;
1210 
1211         default:
1212             cmp->cond = subcc_cond[cond];
1213             cmp->is_bool = false;
1214 #ifdef TARGET_SPARC64
1215             if (!xcc) {
1216                 /* Note that sign-extension works for unsigned compares as
1217                    long as both operands are sign-extended.  */
1218                 cmp->g1 = cmp->g2 = false;
1219                 cmp->c1 = tcg_temp_new();
1220                 cmp->c2 = tcg_temp_new();
1221                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1222                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1223                 break;
1224             }
1225 #endif
1226             cmp->g1 = cmp->g2 = true;
1227             cmp->c1 = cpu_cc_src;
1228             cmp->c2 = cpu_cc_src2;
1229             break;
1230         }
1231         break;
1232 
1233     default:
1234     do_dynamic:
1235         gen_helper_compute_psr(cpu_env);
1236         dc->cc_op = CC_OP_FLAGS;
1237         /* FALLTHRU */
1238 
1239     case CC_OP_FLAGS:
1240         /* We're going to generate a boolean result.  */
1241         cmp->cond = TCG_COND_NE;
1242         cmp->is_bool = true;
1243         cmp->g1 = cmp->g2 = false;
1244         cmp->c1 = r_dst = tcg_temp_new();
1245         cmp->c2 = tcg_const_tl(0);
1246 
1247         switch (cond) {
1248         case 0x0:
1249             gen_op_eval_bn(r_dst);
1250             break;
1251         case 0x1:
1252             gen_op_eval_be(r_dst, r_src);
1253             break;
1254         case 0x2:
1255             gen_op_eval_ble(r_dst, r_src);
1256             break;
1257         case 0x3:
1258             gen_op_eval_bl(r_dst, r_src);
1259             break;
1260         case 0x4:
1261             gen_op_eval_bleu(r_dst, r_src);
1262             break;
1263         case 0x5:
1264             gen_op_eval_bcs(r_dst, r_src);
1265             break;
1266         case 0x6:
1267             gen_op_eval_bneg(r_dst, r_src);
1268             break;
1269         case 0x7:
1270             gen_op_eval_bvs(r_dst, r_src);
1271             break;
1272         case 0x8:
1273             gen_op_eval_ba(r_dst);
1274             break;
1275         case 0x9:
1276             gen_op_eval_bne(r_dst, r_src);
1277             break;
1278         case 0xa:
1279             gen_op_eval_bg(r_dst, r_src);
1280             break;
1281         case 0xb:
1282             gen_op_eval_bge(r_dst, r_src);
1283             break;
1284         case 0xc:
1285             gen_op_eval_bgu(r_dst, r_src);
1286             break;
1287         case 0xd:
1288             gen_op_eval_bcc(r_dst, r_src);
1289             break;
1290         case 0xe:
1291             gen_op_eval_bpos(r_dst, r_src);
1292             break;
1293         case 0xf:
1294             gen_op_eval_bvc(r_dst, r_src);
1295             break;
1296         }
1297         break;
1298     }
1299 }
1300 
1301 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1302 {
1303     unsigned int offset;
1304     TCGv r_dst;
1305 
1306     /* For now we still generate a straight boolean result.  */
1307     cmp->cond = TCG_COND_NE;
1308     cmp->is_bool = true;
1309     cmp->g1 = cmp->g2 = false;
1310     cmp->c1 = r_dst = tcg_temp_new();
1311     cmp->c2 = tcg_const_tl(0);
1312 
1313     switch (cc) {
1314     default:
1315     case 0x0:
1316         offset = 0;
1317         break;
1318     case 0x1:
1319         offset = 32 - 10;
1320         break;
1321     case 0x2:
1322         offset = 34 - 10;
1323         break;
1324     case 0x3:
1325         offset = 36 - 10;
1326         break;
1327     }
1328 
1329     switch (cond) {
1330     case 0x0:
1331         gen_op_eval_bn(r_dst);
1332         break;
1333     case 0x1:
1334         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1335         break;
1336     case 0x2:
1337         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1338         break;
1339     case 0x3:
1340         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1341         break;
1342     case 0x4:
1343         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1344         break;
1345     case 0x5:
1346         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1347         break;
1348     case 0x6:
1349         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1350         break;
1351     case 0x7:
1352         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1353         break;
1354     case 0x8:
1355         gen_op_eval_ba(r_dst);
1356         break;
1357     case 0x9:
1358         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1359         break;
1360     case 0xa:
1361         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1362         break;
1363     case 0xb:
1364         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1365         break;
1366     case 0xc:
1367         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1368         break;
1369     case 0xd:
1370         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1371         break;
1372     case 0xe:
1373         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1374         break;
1375     case 0xf:
1376         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1377         break;
1378     }
1379 }
1380 
1381 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1382                      DisasContext *dc)
1383 {
1384     DisasCompare cmp;
1385     gen_compare(&cmp, cc, cond, dc);
1386 
1387     /* The interface is to return a boolean in r_dst.  */
1388     if (cmp.is_bool) {
1389         tcg_gen_mov_tl(r_dst, cmp.c1);
1390     } else {
1391         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1392     }
1393 
1394     free_compare(&cmp);
1395 }
1396 
1397 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1398 {
1399     DisasCompare cmp;
1400     gen_fcompare(&cmp, cc, cond);
1401 
1402     /* The interface is to return a boolean in r_dst.  */
1403     if (cmp.is_bool) {
1404         tcg_gen_mov_tl(r_dst, cmp.c1);
1405     } else {
1406         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1407     }
1408 
1409     free_compare(&cmp);
1410 }
1411 
1412 #ifdef TARGET_SPARC64
1413 // Inverted logic
1414 static const int gen_tcg_cond_reg[8] = {
1415     -1,
1416     TCG_COND_NE,
1417     TCG_COND_GT,
1418     TCG_COND_GE,
1419     -1,
1420     TCG_COND_EQ,
1421     TCG_COND_LE,
1422     TCG_COND_LT,
1423 };
1424 
1425 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1426 {
1427     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1428     cmp->is_bool = false;
1429     cmp->g1 = true;
1430     cmp->g2 = false;
1431     cmp->c1 = r_src;
1432     cmp->c2 = tcg_const_tl(0);
1433 }
1434 
1435 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1436 {
1437     DisasCompare cmp;
1438     gen_compare_reg(&cmp, cond, r_src);
1439 
1440     /* The interface is to return a boolean in r_dst.  */
1441     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1442 
1443     free_compare(&cmp);
1444 }
1445 #endif
1446 
1447 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1448 {
1449     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1450     target_ulong target = dc->pc + offset;
1451 
1452 #ifdef TARGET_SPARC64
1453     if (unlikely(AM_CHECK(dc))) {
1454         target &= 0xffffffffULL;
1455     }
1456 #endif
1457     if (cond == 0x0) {
1458         /* unconditional not taken */
1459         if (a) {
1460             dc->pc = dc->npc + 4;
1461             dc->npc = dc->pc + 4;
1462         } else {
1463             dc->pc = dc->npc;
1464             dc->npc = dc->pc + 4;
1465         }
1466     } else if (cond == 0x8) {
1467         /* unconditional taken */
1468         if (a) {
1469             dc->pc = target;
1470             dc->npc = dc->pc + 4;
1471         } else {
1472             dc->pc = dc->npc;
1473             dc->npc = target;
1474             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1475         }
1476     } else {
1477         flush_cond(dc);
1478         gen_cond(cpu_cond, cc, cond, dc);
1479         if (a) {
1480             gen_branch_a(dc, target);
1481         } else {
1482             gen_branch_n(dc, target);
1483         }
1484     }
1485 }
1486 
1487 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1488 {
1489     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1490     target_ulong target = dc->pc + offset;
1491 
1492 #ifdef TARGET_SPARC64
1493     if (unlikely(AM_CHECK(dc))) {
1494         target &= 0xffffffffULL;
1495     }
1496 #endif
1497     if (cond == 0x0) {
1498         /* unconditional not taken */
1499         if (a) {
1500             dc->pc = dc->npc + 4;
1501             dc->npc = dc->pc + 4;
1502         } else {
1503             dc->pc = dc->npc;
1504             dc->npc = dc->pc + 4;
1505         }
1506     } else if (cond == 0x8) {
1507         /* unconditional taken */
1508         if (a) {
1509             dc->pc = target;
1510             dc->npc = dc->pc + 4;
1511         } else {
1512             dc->pc = dc->npc;
1513             dc->npc = target;
1514             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1515         }
1516     } else {
1517         flush_cond(dc);
1518         gen_fcond(cpu_cond, cc, cond);
1519         if (a) {
1520             gen_branch_a(dc, target);
1521         } else {
1522             gen_branch_n(dc, target);
1523         }
1524     }
1525 }
1526 
1527 #ifdef TARGET_SPARC64
1528 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1529                           TCGv r_reg)
1530 {
1531     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1532     target_ulong target = dc->pc + offset;
1533 
1534     if (unlikely(AM_CHECK(dc))) {
1535         target &= 0xffffffffULL;
1536     }
1537     flush_cond(dc);
1538     gen_cond_reg(cpu_cond, cond, r_reg);
1539     if (a) {
1540         gen_branch_a(dc, target);
1541     } else {
1542         gen_branch_n(dc, target);
1543     }
1544 }
1545 
1546 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1547 {
1548     switch (fccno) {
1549     case 0:
1550         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1551         break;
1552     case 1:
1553         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1554         break;
1555     case 2:
1556         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1557         break;
1558     case 3:
1559         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1560         break;
1561     }
1562 }
1563 
1564 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1565 {
1566     switch (fccno) {
1567     case 0:
1568         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1569         break;
1570     case 1:
1571         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1572         break;
1573     case 2:
1574         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1575         break;
1576     case 3:
1577         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1578         break;
1579     }
1580 }
1581 
1582 static inline void gen_op_fcmpq(int fccno)
1583 {
1584     switch (fccno) {
1585     case 0:
1586         gen_helper_fcmpq(cpu_fsr, cpu_env);
1587         break;
1588     case 1:
1589         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1590         break;
1591     case 2:
1592         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1593         break;
1594     case 3:
1595         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1596         break;
1597     }
1598 }
1599 
1600 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1601 {
1602     switch (fccno) {
1603     case 0:
1604         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1605         break;
1606     case 1:
1607         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1608         break;
1609     case 2:
1610         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1611         break;
1612     case 3:
1613         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1614         break;
1615     }
1616 }
1617 
1618 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1619 {
1620     switch (fccno) {
1621     case 0:
1622         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1623         break;
1624     case 1:
1625         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1626         break;
1627     case 2:
1628         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1629         break;
1630     case 3:
1631         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1632         break;
1633     }
1634 }
1635 
1636 static inline void gen_op_fcmpeq(int fccno)
1637 {
1638     switch (fccno) {
1639     case 0:
1640         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1641         break;
1642     case 1:
1643         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1644         break;
1645     case 2:
1646         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1647         break;
1648     case 3:
1649         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1650         break;
1651     }
1652 }
1653 
1654 #else
1655 
1656 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1657 {
1658     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1659 }
1660 
1661 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1662 {
1663     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1664 }
1665 
1666 static inline void gen_op_fcmpq(int fccno)
1667 {
1668     gen_helper_fcmpq(cpu_fsr, cpu_env);
1669 }
1670 
1671 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1672 {
1673     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1674 }
1675 
1676 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1677 {
1678     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1679 }
1680 
1681 static inline void gen_op_fcmpeq(int fccno)
1682 {
1683     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1684 }
1685 #endif
1686 
1687 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1688 {
1689     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1690     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1691     gen_exception(dc, TT_FP_EXCP);
1692 }
1693 
1694 static int gen_trap_ifnofpu(DisasContext *dc)
1695 {
1696 #if !defined(CONFIG_USER_ONLY)
1697     if (!dc->fpu_enabled) {
1698         gen_exception(dc, TT_NFPU_INSN);
1699         return 1;
1700     }
1701 #endif
1702     return 0;
1703 }
1704 
1705 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1706 {
1707     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1708 }
1709 
1710 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1711                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1712 {
1713     TCGv_i32 dst, src;
1714 
1715     src = gen_load_fpr_F(dc, rs);
1716     dst = gen_dest_fpr_F(dc);
1717 
1718     gen(dst, cpu_env, src);
1719     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1720 
1721     gen_store_fpr_F(dc, rd, dst);
1722 }
1723 
1724 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1725                                  void (*gen)(TCGv_i32, TCGv_i32))
1726 {
1727     TCGv_i32 dst, src;
1728 
1729     src = gen_load_fpr_F(dc, rs);
1730     dst = gen_dest_fpr_F(dc);
1731 
1732     gen(dst, src);
1733 
1734     gen_store_fpr_F(dc, rd, dst);
1735 }
1736 
1737 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1738                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1739 {
1740     TCGv_i32 dst, src1, src2;
1741 
1742     src1 = gen_load_fpr_F(dc, rs1);
1743     src2 = gen_load_fpr_F(dc, rs2);
1744     dst = gen_dest_fpr_F(dc);
1745 
1746     gen(dst, cpu_env, src1, src2);
1747     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1748 
1749     gen_store_fpr_F(dc, rd, dst);
1750 }
1751 
1752 #ifdef TARGET_SPARC64
1753 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1754                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1755 {
1756     TCGv_i32 dst, src1, src2;
1757 
1758     src1 = gen_load_fpr_F(dc, rs1);
1759     src2 = gen_load_fpr_F(dc, rs2);
1760     dst = gen_dest_fpr_F(dc);
1761 
1762     gen(dst, src1, src2);
1763 
1764     gen_store_fpr_F(dc, rd, dst);
1765 }
1766 #endif
1767 
1768 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1769                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1770 {
1771     TCGv_i64 dst, src;
1772 
1773     src = gen_load_fpr_D(dc, rs);
1774     dst = gen_dest_fpr_D(dc, rd);
1775 
1776     gen(dst, cpu_env, src);
1777     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1778 
1779     gen_store_fpr_D(dc, rd, dst);
1780 }
1781 
1782 #ifdef TARGET_SPARC64
1783 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1784                                  void (*gen)(TCGv_i64, TCGv_i64))
1785 {
1786     TCGv_i64 dst, src;
1787 
1788     src = gen_load_fpr_D(dc, rs);
1789     dst = gen_dest_fpr_D(dc, rd);
1790 
1791     gen(dst, src);
1792 
1793     gen_store_fpr_D(dc, rd, dst);
1794 }
1795 #endif
1796 
1797 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1798                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1799 {
1800     TCGv_i64 dst, src1, src2;
1801 
1802     src1 = gen_load_fpr_D(dc, rs1);
1803     src2 = gen_load_fpr_D(dc, rs2);
1804     dst = gen_dest_fpr_D(dc, rd);
1805 
1806     gen(dst, cpu_env, src1, src2);
1807     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1808 
1809     gen_store_fpr_D(dc, rd, dst);
1810 }
1811 
1812 #ifdef TARGET_SPARC64
1813 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1814                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1815 {
1816     TCGv_i64 dst, src1, src2;
1817 
1818     src1 = gen_load_fpr_D(dc, rs1);
1819     src2 = gen_load_fpr_D(dc, rs2);
1820     dst = gen_dest_fpr_D(dc, rd);
1821 
1822     gen(dst, src1, src2);
1823 
1824     gen_store_fpr_D(dc, rd, dst);
1825 }
1826 
1827 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1828                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1829 {
1830     TCGv_i64 dst, src1, src2;
1831 
1832     src1 = gen_load_fpr_D(dc, rs1);
1833     src2 = gen_load_fpr_D(dc, rs2);
1834     dst = gen_dest_fpr_D(dc, rd);
1835 
1836     gen(dst, cpu_gsr, src1, src2);
1837 
1838     gen_store_fpr_D(dc, rd, dst);
1839 }
1840 
1841 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1842                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1843 {
1844     TCGv_i64 dst, src0, src1, src2;
1845 
1846     src1 = gen_load_fpr_D(dc, rs1);
1847     src2 = gen_load_fpr_D(dc, rs2);
1848     src0 = gen_load_fpr_D(dc, rd);
1849     dst = gen_dest_fpr_D(dc, rd);
1850 
1851     gen(dst, src0, src1, src2);
1852 
1853     gen_store_fpr_D(dc, rd, dst);
1854 }
1855 #endif
1856 
1857 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1858                               void (*gen)(TCGv_ptr))
1859 {
1860     gen_op_load_fpr_QT1(QFPREG(rs));
1861 
1862     gen(cpu_env);
1863     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1864 
1865     gen_op_store_QT0_fpr(QFPREG(rd));
1866     gen_update_fprs_dirty(dc, QFPREG(rd));
1867 }
1868 
1869 #ifdef TARGET_SPARC64
1870 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1871                                  void (*gen)(TCGv_ptr))
1872 {
1873     gen_op_load_fpr_QT1(QFPREG(rs));
1874 
1875     gen(cpu_env);
1876 
1877     gen_op_store_QT0_fpr(QFPREG(rd));
1878     gen_update_fprs_dirty(dc, QFPREG(rd));
1879 }
1880 #endif
1881 
1882 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1883                                void (*gen)(TCGv_ptr))
1884 {
1885     gen_op_load_fpr_QT0(QFPREG(rs1));
1886     gen_op_load_fpr_QT1(QFPREG(rs2));
1887 
1888     gen(cpu_env);
1889     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1890 
1891     gen_op_store_QT0_fpr(QFPREG(rd));
1892     gen_update_fprs_dirty(dc, QFPREG(rd));
1893 }
1894 
1895 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1896                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1897 {
1898     TCGv_i64 dst;
1899     TCGv_i32 src1, src2;
1900 
1901     src1 = gen_load_fpr_F(dc, rs1);
1902     src2 = gen_load_fpr_F(dc, rs2);
1903     dst = gen_dest_fpr_D(dc, rd);
1904 
1905     gen(dst, cpu_env, src1, src2);
1906     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1907 
1908     gen_store_fpr_D(dc, rd, dst);
1909 }
1910 
1911 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1912                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1913 {
1914     TCGv_i64 src1, src2;
1915 
1916     src1 = gen_load_fpr_D(dc, rs1);
1917     src2 = gen_load_fpr_D(dc, rs2);
1918 
1919     gen(cpu_env, src1, src2);
1920     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1921 
1922     gen_op_store_QT0_fpr(QFPREG(rd));
1923     gen_update_fprs_dirty(dc, QFPREG(rd));
1924 }
1925 
1926 #ifdef TARGET_SPARC64
1927 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1928                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1929 {
1930     TCGv_i64 dst;
1931     TCGv_i32 src;
1932 
1933     src = gen_load_fpr_F(dc, rs);
1934     dst = gen_dest_fpr_D(dc, rd);
1935 
1936     gen(dst, cpu_env, src);
1937     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1938 
1939     gen_store_fpr_D(dc, rd, dst);
1940 }
1941 #endif
1942 
1943 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1944                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1945 {
1946     TCGv_i64 dst;
1947     TCGv_i32 src;
1948 
1949     src = gen_load_fpr_F(dc, rs);
1950     dst = gen_dest_fpr_D(dc, rd);
1951 
1952     gen(dst, cpu_env, src);
1953 
1954     gen_store_fpr_D(dc, rd, dst);
1955 }
1956 
1957 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1958                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1959 {
1960     TCGv_i32 dst;
1961     TCGv_i64 src;
1962 
1963     src = gen_load_fpr_D(dc, rs);
1964     dst = gen_dest_fpr_F(dc);
1965 
1966     gen(dst, cpu_env, src);
1967     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1968 
1969     gen_store_fpr_F(dc, rd, dst);
1970 }
1971 
1972 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1973                               void (*gen)(TCGv_i32, TCGv_ptr))
1974 {
1975     TCGv_i32 dst;
1976 
1977     gen_op_load_fpr_QT1(QFPREG(rs));
1978     dst = gen_dest_fpr_F(dc);
1979 
1980     gen(dst, cpu_env);
1981     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1982 
1983     gen_store_fpr_F(dc, rd, dst);
1984 }
1985 
1986 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1987                               void (*gen)(TCGv_i64, TCGv_ptr))
1988 {
1989     TCGv_i64 dst;
1990 
1991     gen_op_load_fpr_QT1(QFPREG(rs));
1992     dst = gen_dest_fpr_D(dc, rd);
1993 
1994     gen(dst, cpu_env);
1995     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1996 
1997     gen_store_fpr_D(dc, rd, dst);
1998 }
1999 
2000 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
2001                                  void (*gen)(TCGv_ptr, TCGv_i32))
2002 {
2003     TCGv_i32 src;
2004 
2005     src = gen_load_fpr_F(dc, rs);
2006 
2007     gen(cpu_env, src);
2008 
2009     gen_op_store_QT0_fpr(QFPREG(rd));
2010     gen_update_fprs_dirty(dc, QFPREG(rd));
2011 }
2012 
2013 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2014                                  void (*gen)(TCGv_ptr, TCGv_i64))
2015 {
2016     TCGv_i64 src;
2017 
2018     src = gen_load_fpr_D(dc, rs);
2019 
2020     gen(cpu_env, src);
2021 
2022     gen_op_store_QT0_fpr(QFPREG(rd));
2023     gen_update_fprs_dirty(dc, QFPREG(rd));
2024 }
2025 
2026 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
2027                      TCGv addr, int mmu_idx, TCGMemOp memop)
2028 {
2029     gen_address_mask(dc, addr);
2030     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
2031 }
2032 
2033 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
2034 {
2035     TCGv m1 = tcg_const_tl(0xff);
2036     gen_address_mask(dc, addr);
2037     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
2038     tcg_temp_free(m1);
2039 }
2040 
2041 /* asi moves */
2042 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2043 typedef enum {
2044     GET_ASI_HELPER,
2045     GET_ASI_EXCP,
2046     GET_ASI_DIRECT,
2047     GET_ASI_DTWINX,
2048     GET_ASI_BLOCK,
2049     GET_ASI_SHORT,
2050     GET_ASI_BCOPY,
2051     GET_ASI_BFILL,
2052 } ASIType;
2053 
2054 typedef struct {
2055     ASIType type;
2056     int asi;
2057     int mem_idx;
2058     TCGMemOp memop;
2059 } DisasASI;
2060 
2061 static DisasASI get_asi(DisasContext *dc, int insn, TCGMemOp memop)
2062 {
2063     int asi = GET_FIELD(insn, 19, 26);
2064     ASIType type = GET_ASI_HELPER;
2065     int mem_idx = dc->mem_idx;
2066 
2067 #ifndef TARGET_SPARC64
2068     /* Before v9, all asis are immediate and privileged.  */
2069     if (IS_IMM) {
2070         gen_exception(dc, TT_ILL_INSN);
2071         type = GET_ASI_EXCP;
2072     } else if (supervisor(dc)
2073                /* Note that LEON accepts ASI_USERDATA in user mode, for
2074                   use with CASA.  Also note that previous versions of
2075                   QEMU allowed (and old versions of gcc emitted) ASI_P
2076                   for LEON, which is incorrect.  */
2077                || (asi == ASI_USERDATA
2078                    && (dc->def->features & CPU_FEATURE_CASA))) {
2079         switch (asi) {
2080         case ASI_USERDATA:   /* User data access */
2081             mem_idx = MMU_USER_IDX;
2082             type = GET_ASI_DIRECT;
2083             break;
2084         case ASI_KERNELDATA: /* Supervisor data access */
2085             mem_idx = MMU_KERNEL_IDX;
2086             type = GET_ASI_DIRECT;
2087             break;
2088         case ASI_M_BYPASS:    /* MMU passthrough */
2089         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2090             mem_idx = MMU_PHYS_IDX;
2091             type = GET_ASI_DIRECT;
2092             break;
2093         case ASI_M_BCOPY: /* Block copy, sta access */
2094             mem_idx = MMU_KERNEL_IDX;
2095             type = GET_ASI_BCOPY;
2096             break;
2097         case ASI_M_BFILL: /* Block fill, stda access */
2098             mem_idx = MMU_KERNEL_IDX;
2099             type = GET_ASI_BFILL;
2100             break;
2101         }
2102     } else {
2103         gen_exception(dc, TT_PRIV_INSN);
2104         type = GET_ASI_EXCP;
2105     }
2106 #else
2107     if (IS_IMM) {
2108         asi = dc->asi;
2109     }
2110     /* With v9, all asis below 0x80 are privileged.  */
2111     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2112        down that bit into DisasContext.  For the moment that's ok,
2113        since the direct implementations below doesn't have any ASIs
2114        in the restricted [0x30, 0x7f] range, and the check will be
2115        done properly in the helper.  */
2116     if (!supervisor(dc) && asi < 0x80) {
2117         gen_exception(dc, TT_PRIV_ACT);
2118         type = GET_ASI_EXCP;
2119     } else {
2120         switch (asi) {
2121         case ASI_REAL:      /* Bypass */
2122         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2123         case ASI_REAL_L:    /* Bypass LE */
2124         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2125         case ASI_TWINX_REAL:   /* Real address, twinx */
2126         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2127         case ASI_QUAD_LDD_PHYS:
2128         case ASI_QUAD_LDD_PHYS_L:
2129             mem_idx = MMU_PHYS_IDX;
2130             break;
2131         case ASI_N:  /* Nucleus */
2132         case ASI_NL: /* Nucleus LE */
2133         case ASI_TWINX_N:
2134         case ASI_TWINX_NL:
2135         case ASI_NUCLEUS_QUAD_LDD:
2136         case ASI_NUCLEUS_QUAD_LDD_L:
2137             if (hypervisor(dc)) {
2138                 mem_idx = MMU_PHYS_IDX;
2139             } else {
2140                 mem_idx = MMU_NUCLEUS_IDX;
2141             }
2142             break;
2143         case ASI_AIUP:  /* As if user primary */
2144         case ASI_AIUPL: /* As if user primary LE */
2145         case ASI_TWINX_AIUP:
2146         case ASI_TWINX_AIUP_L:
2147         case ASI_BLK_AIUP_4V:
2148         case ASI_BLK_AIUP_L_4V:
2149         case ASI_BLK_AIUP:
2150         case ASI_BLK_AIUPL:
2151             mem_idx = MMU_USER_IDX;
2152             break;
2153         case ASI_AIUS:  /* As if user secondary */
2154         case ASI_AIUSL: /* As if user secondary LE */
2155         case ASI_TWINX_AIUS:
2156         case ASI_TWINX_AIUS_L:
2157         case ASI_BLK_AIUS_4V:
2158         case ASI_BLK_AIUS_L_4V:
2159         case ASI_BLK_AIUS:
2160         case ASI_BLK_AIUSL:
2161             mem_idx = MMU_USER_SECONDARY_IDX;
2162             break;
2163         case ASI_S:  /* Secondary */
2164         case ASI_SL: /* Secondary LE */
2165         case ASI_TWINX_S:
2166         case ASI_TWINX_SL:
2167         case ASI_BLK_COMMIT_S:
2168         case ASI_BLK_S:
2169         case ASI_BLK_SL:
2170         case ASI_FL8_S:
2171         case ASI_FL8_SL:
2172         case ASI_FL16_S:
2173         case ASI_FL16_SL:
2174             if (mem_idx == MMU_USER_IDX) {
2175                 mem_idx = MMU_USER_SECONDARY_IDX;
2176             } else if (mem_idx == MMU_KERNEL_IDX) {
2177                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2178             }
2179             break;
2180         case ASI_P:  /* Primary */
2181         case ASI_PL: /* Primary LE */
2182         case ASI_TWINX_P:
2183         case ASI_TWINX_PL:
2184         case ASI_BLK_COMMIT_P:
2185         case ASI_BLK_P:
2186         case ASI_BLK_PL:
2187         case ASI_FL8_P:
2188         case ASI_FL8_PL:
2189         case ASI_FL16_P:
2190         case ASI_FL16_PL:
2191             break;
2192         }
2193         switch (asi) {
2194         case ASI_REAL:
2195         case ASI_REAL_IO:
2196         case ASI_REAL_L:
2197         case ASI_REAL_IO_L:
2198         case ASI_N:
2199         case ASI_NL:
2200         case ASI_AIUP:
2201         case ASI_AIUPL:
2202         case ASI_AIUS:
2203         case ASI_AIUSL:
2204         case ASI_S:
2205         case ASI_SL:
2206         case ASI_P:
2207         case ASI_PL:
2208             type = GET_ASI_DIRECT;
2209             break;
2210         case ASI_TWINX_REAL:
2211         case ASI_TWINX_REAL_L:
2212         case ASI_TWINX_N:
2213         case ASI_TWINX_NL:
2214         case ASI_TWINX_AIUP:
2215         case ASI_TWINX_AIUP_L:
2216         case ASI_TWINX_AIUS:
2217         case ASI_TWINX_AIUS_L:
2218         case ASI_TWINX_P:
2219         case ASI_TWINX_PL:
2220         case ASI_TWINX_S:
2221         case ASI_TWINX_SL:
2222         case ASI_QUAD_LDD_PHYS:
2223         case ASI_QUAD_LDD_PHYS_L:
2224         case ASI_NUCLEUS_QUAD_LDD:
2225         case ASI_NUCLEUS_QUAD_LDD_L:
2226             type = GET_ASI_DTWINX;
2227             break;
2228         case ASI_BLK_COMMIT_P:
2229         case ASI_BLK_COMMIT_S:
2230         case ASI_BLK_AIUP_4V:
2231         case ASI_BLK_AIUP_L_4V:
2232         case ASI_BLK_AIUP:
2233         case ASI_BLK_AIUPL:
2234         case ASI_BLK_AIUS_4V:
2235         case ASI_BLK_AIUS_L_4V:
2236         case ASI_BLK_AIUS:
2237         case ASI_BLK_AIUSL:
2238         case ASI_BLK_S:
2239         case ASI_BLK_SL:
2240         case ASI_BLK_P:
2241         case ASI_BLK_PL:
2242             type = GET_ASI_BLOCK;
2243             break;
2244         case ASI_FL8_S:
2245         case ASI_FL8_SL:
2246         case ASI_FL8_P:
2247         case ASI_FL8_PL:
2248             memop = MO_UB;
2249             type = GET_ASI_SHORT;
2250             break;
2251         case ASI_FL16_S:
2252         case ASI_FL16_SL:
2253         case ASI_FL16_P:
2254         case ASI_FL16_PL:
2255             memop = MO_TEUW;
2256             type = GET_ASI_SHORT;
2257             break;
2258         }
2259         /* The little-endian asis all have bit 3 set.  */
2260         if (asi & 8) {
2261             memop ^= MO_BSWAP;
2262         }
2263     }
2264 #endif
2265 
2266     return (DisasASI){ type, asi, mem_idx, memop };
2267 }
2268 
2269 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2270                        int insn, TCGMemOp memop)
2271 {
2272     DisasASI da = get_asi(dc, insn, memop);
2273 
2274     switch (da.type) {
2275     case GET_ASI_EXCP:
2276         break;
2277     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2278         gen_exception(dc, TT_ILL_INSN);
2279         break;
2280     case GET_ASI_DIRECT:
2281         gen_address_mask(dc, addr);
2282         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2283         break;
2284     default:
2285         {
2286             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2287             TCGv_i32 r_mop = tcg_const_i32(memop);
2288 
2289             save_state(dc);
2290 #ifdef TARGET_SPARC64
2291             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2292 #else
2293             {
2294                 TCGv_i64 t64 = tcg_temp_new_i64();
2295                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2296                 tcg_gen_trunc_i64_tl(dst, t64);
2297                 tcg_temp_free_i64(t64);
2298             }
2299 #endif
2300             tcg_temp_free_i32(r_mop);
2301             tcg_temp_free_i32(r_asi);
2302         }
2303         break;
2304     }
2305 }
2306 
2307 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2308                        int insn, TCGMemOp memop)
2309 {
2310     DisasASI da = get_asi(dc, insn, memop);
2311 
2312     switch (da.type) {
2313     case GET_ASI_EXCP:
2314         break;
2315     case GET_ASI_DTWINX: /* Reserved for stda.  */
2316 #ifndef TARGET_SPARC64
2317         gen_exception(dc, TT_ILL_INSN);
2318         break;
2319 #else
2320         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2321             /* Pre OpenSPARC CPUs don't have these */
2322             gen_exception(dc, TT_ILL_INSN);
2323             return;
2324         }
2325         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2326          * are ST_BLKINIT_ ASIs */
2327         /* fall through */
2328 #endif
2329     case GET_ASI_DIRECT:
2330         gen_address_mask(dc, addr);
2331         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2332         break;
2333 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2334     case GET_ASI_BCOPY:
2335         /* Copy 32 bytes from the address in SRC to ADDR.  */
2336         /* ??? The original qemu code suggests 4-byte alignment, dropping
2337            the low bits, but the only place I can see this used is in the
2338            Linux kernel with 32 byte alignment, which would make more sense
2339            as a cacheline-style operation.  */
2340         {
2341             TCGv saddr = tcg_temp_new();
2342             TCGv daddr = tcg_temp_new();
2343             TCGv four = tcg_const_tl(4);
2344             TCGv_i32 tmp = tcg_temp_new_i32();
2345             int i;
2346 
2347             tcg_gen_andi_tl(saddr, src, -4);
2348             tcg_gen_andi_tl(daddr, addr, -4);
2349             for (i = 0; i < 32; i += 4) {
2350                 /* Since the loads and stores are paired, allow the
2351                    copy to happen in the host endianness.  */
2352                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2353                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2354                 tcg_gen_add_tl(saddr, saddr, four);
2355                 tcg_gen_add_tl(daddr, daddr, four);
2356             }
2357 
2358             tcg_temp_free(saddr);
2359             tcg_temp_free(daddr);
2360             tcg_temp_free(four);
2361             tcg_temp_free_i32(tmp);
2362         }
2363         break;
2364 #endif
2365     default:
2366         {
2367             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2368             TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2369 
2370             save_state(dc);
2371 #ifdef TARGET_SPARC64
2372             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2373 #else
2374             {
2375                 TCGv_i64 t64 = tcg_temp_new_i64();
2376                 tcg_gen_extu_tl_i64(t64, src);
2377                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2378                 tcg_temp_free_i64(t64);
2379             }
2380 #endif
2381             tcg_temp_free_i32(r_mop);
2382             tcg_temp_free_i32(r_asi);
2383 
2384             /* A write to a TLB register may alter page maps.  End the TB. */
2385             dc->npc = DYNAMIC_PC;
2386         }
2387         break;
2388     }
2389 }
2390 
2391 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2392                          TCGv addr, int insn)
2393 {
2394     DisasASI da = get_asi(dc, insn, MO_TEUL);
2395 
2396     switch (da.type) {
2397     case GET_ASI_EXCP:
2398         break;
2399     case GET_ASI_DIRECT:
2400         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2401         break;
2402     default:
2403         /* ??? Should be DAE_invalid_asi.  */
2404         gen_exception(dc, TT_DATA_ACCESS);
2405         break;
2406     }
2407 }
2408 
2409 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2410                         int insn, int rd)
2411 {
2412     DisasASI da = get_asi(dc, insn, MO_TEUL);
2413     TCGv oldv;
2414 
2415     switch (da.type) {
2416     case GET_ASI_EXCP:
2417         return;
2418     case GET_ASI_DIRECT:
2419         oldv = tcg_temp_new();
2420         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2421                                   da.mem_idx, da.memop);
2422         gen_store_gpr(dc, rd, oldv);
2423         tcg_temp_free(oldv);
2424         break;
2425     default:
2426         /* ??? Should be DAE_invalid_asi.  */
2427         gen_exception(dc, TT_DATA_ACCESS);
2428         break;
2429     }
2430 }
2431 
2432 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2433 {
2434     DisasASI da = get_asi(dc, insn, MO_UB);
2435 
2436     switch (da.type) {
2437     case GET_ASI_EXCP:
2438         break;
2439     case GET_ASI_DIRECT:
2440         gen_ldstub(dc, dst, addr, da.mem_idx);
2441         break;
2442     default:
2443         /* ??? In theory, this should be raise DAE_invalid_asi.
2444            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2445         if (parallel_cpus) {
2446             gen_helper_exit_atomic(cpu_env);
2447         } else {
2448             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2449             TCGv_i32 r_mop = tcg_const_i32(MO_UB);
2450             TCGv_i64 s64, t64;
2451 
2452             save_state(dc);
2453             t64 = tcg_temp_new_i64();
2454             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2455 
2456             s64 = tcg_const_i64(0xff);
2457             gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2458             tcg_temp_free_i64(s64);
2459             tcg_temp_free_i32(r_mop);
2460             tcg_temp_free_i32(r_asi);
2461 
2462             tcg_gen_trunc_i64_tl(dst, t64);
2463             tcg_temp_free_i64(t64);
2464 
2465             /* End the TB.  */
2466             dc->npc = DYNAMIC_PC;
2467         }
2468         break;
2469     }
2470 }
2471 #endif
2472 
2473 #ifdef TARGET_SPARC64
2474 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2475                         int insn, int size, int rd)
2476 {
2477     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2478     TCGv_i32 d32;
2479     TCGv_i64 d64;
2480 
2481     switch (da.type) {
2482     case GET_ASI_EXCP:
2483         break;
2484 
2485     case GET_ASI_DIRECT:
2486         gen_address_mask(dc, addr);
2487         switch (size) {
2488         case 4:
2489             d32 = gen_dest_fpr_F(dc);
2490             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2491             gen_store_fpr_F(dc, rd, d32);
2492             break;
2493         case 8:
2494             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2495                                 da.memop | MO_ALIGN_4);
2496             break;
2497         case 16:
2498             d64 = tcg_temp_new_i64();
2499             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2500             tcg_gen_addi_tl(addr, addr, 8);
2501             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2502                                 da.memop | MO_ALIGN_4);
2503             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2504             tcg_temp_free_i64(d64);
2505             break;
2506         default:
2507             g_assert_not_reached();
2508         }
2509         break;
2510 
2511     case GET_ASI_BLOCK:
2512         /* Valid for lddfa on aligned registers only.  */
2513         if (size == 8 && (rd & 7) == 0) {
2514             TCGMemOp memop;
2515             TCGv eight;
2516             int i;
2517 
2518             gen_address_mask(dc, addr);
2519 
2520             /* The first operation checks required alignment.  */
2521             memop = da.memop | MO_ALIGN_64;
2522             eight = tcg_const_tl(8);
2523             for (i = 0; ; ++i) {
2524                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2525                                     da.mem_idx, memop);
2526                 if (i == 7) {
2527                     break;
2528                 }
2529                 tcg_gen_add_tl(addr, addr, eight);
2530                 memop = da.memop;
2531             }
2532             tcg_temp_free(eight);
2533         } else {
2534             gen_exception(dc, TT_ILL_INSN);
2535         }
2536         break;
2537 
2538     case GET_ASI_SHORT:
2539         /* Valid for lddfa only.  */
2540         if (size == 8) {
2541             gen_address_mask(dc, addr);
2542             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2543         } else {
2544             gen_exception(dc, TT_ILL_INSN);
2545         }
2546         break;
2547 
2548     default:
2549         {
2550             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2551             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2552 
2553             save_state(dc);
2554             /* According to the table in the UA2011 manual, the only
2555                other asis that are valid for ldfa/lddfa/ldqfa are
2556                the NO_FAULT asis.  We still need a helper for these,
2557                but we can just use the integer asi helper for them.  */
2558             switch (size) {
2559             case 4:
2560                 d64 = tcg_temp_new_i64();
2561                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2562                 d32 = gen_dest_fpr_F(dc);
2563                 tcg_gen_extrl_i64_i32(d32, d64);
2564                 tcg_temp_free_i64(d64);
2565                 gen_store_fpr_F(dc, rd, d32);
2566                 break;
2567             case 8:
2568                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2569                 break;
2570             case 16:
2571                 d64 = tcg_temp_new_i64();
2572                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2573                 tcg_gen_addi_tl(addr, addr, 8);
2574                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2575                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2576                 tcg_temp_free_i64(d64);
2577                 break;
2578             default:
2579                 g_assert_not_reached();
2580             }
2581             tcg_temp_free_i32(r_mop);
2582             tcg_temp_free_i32(r_asi);
2583         }
2584         break;
2585     }
2586 }
2587 
2588 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2589                         int insn, int size, int rd)
2590 {
2591     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2592     TCGv_i32 d32;
2593 
2594     switch (da.type) {
2595     case GET_ASI_EXCP:
2596         break;
2597 
2598     case GET_ASI_DIRECT:
2599         gen_address_mask(dc, addr);
2600         switch (size) {
2601         case 4:
2602             d32 = gen_load_fpr_F(dc, rd);
2603             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2604             break;
2605         case 8:
2606             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2607                                 da.memop | MO_ALIGN_4);
2608             break;
2609         case 16:
2610             /* Only 4-byte alignment required.  However, it is legal for the
2611                cpu to signal the alignment fault, and the OS trap handler is
2612                required to fix it up.  Requiring 16-byte alignment here avoids
2613                having to probe the second page before performing the first
2614                write.  */
2615             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2616                                 da.memop | MO_ALIGN_16);
2617             tcg_gen_addi_tl(addr, addr, 8);
2618             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2619             break;
2620         default:
2621             g_assert_not_reached();
2622         }
2623         break;
2624 
2625     case GET_ASI_BLOCK:
2626         /* Valid for stdfa on aligned registers only.  */
2627         if (size == 8 && (rd & 7) == 0) {
2628             TCGMemOp memop;
2629             TCGv eight;
2630             int i;
2631 
2632             gen_address_mask(dc, addr);
2633 
2634             /* The first operation checks required alignment.  */
2635             memop = da.memop | MO_ALIGN_64;
2636             eight = tcg_const_tl(8);
2637             for (i = 0; ; ++i) {
2638                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2639                                     da.mem_idx, memop);
2640                 if (i == 7) {
2641                     break;
2642                 }
2643                 tcg_gen_add_tl(addr, addr, eight);
2644                 memop = da.memop;
2645             }
2646             tcg_temp_free(eight);
2647         } else {
2648             gen_exception(dc, TT_ILL_INSN);
2649         }
2650         break;
2651 
2652     case GET_ASI_SHORT:
2653         /* Valid for stdfa only.  */
2654         if (size == 8) {
2655             gen_address_mask(dc, addr);
2656             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2657         } else {
2658             gen_exception(dc, TT_ILL_INSN);
2659         }
2660         break;
2661 
2662     default:
2663         /* According to the table in the UA2011 manual, the only
2664            other asis that are valid for ldfa/lddfa/ldqfa are
2665            the PST* asis, which aren't currently handled.  */
2666         gen_exception(dc, TT_ILL_INSN);
2667         break;
2668     }
2669 }
2670 
2671 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2672 {
2673     DisasASI da = get_asi(dc, insn, MO_TEQ);
2674     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2675     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2676 
2677     switch (da.type) {
2678     case GET_ASI_EXCP:
2679         return;
2680 
2681     case GET_ASI_DTWINX:
2682         gen_address_mask(dc, addr);
2683         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2684         tcg_gen_addi_tl(addr, addr, 8);
2685         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2686         break;
2687 
2688     case GET_ASI_DIRECT:
2689         {
2690             TCGv_i64 tmp = tcg_temp_new_i64();
2691 
2692             gen_address_mask(dc, addr);
2693             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2694 
2695             /* Note that LE ldda acts as if each 32-bit register
2696                result is byte swapped.  Having just performed one
2697                64-bit bswap, we need now to swap the writebacks.  */
2698             if ((da.memop & MO_BSWAP) == MO_TE) {
2699                 tcg_gen_extr32_i64(lo, hi, tmp);
2700             } else {
2701                 tcg_gen_extr32_i64(hi, lo, tmp);
2702             }
2703             tcg_temp_free_i64(tmp);
2704         }
2705         break;
2706 
2707     default:
2708         /* ??? In theory we've handled all of the ASIs that are valid
2709            for ldda, and this should raise DAE_invalid_asi.  However,
2710            real hardware allows others.  This can be seen with e.g.
2711            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2712         {
2713             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2714             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2715             TCGv_i64 tmp = tcg_temp_new_i64();
2716 
2717             save_state(dc);
2718             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2719             tcg_temp_free_i32(r_asi);
2720             tcg_temp_free_i32(r_mop);
2721 
2722             /* See above.  */
2723             if ((da.memop & MO_BSWAP) == MO_TE) {
2724                 tcg_gen_extr32_i64(lo, hi, tmp);
2725             } else {
2726                 tcg_gen_extr32_i64(hi, lo, tmp);
2727             }
2728             tcg_temp_free_i64(tmp);
2729         }
2730         break;
2731     }
2732 
2733     gen_store_gpr(dc, rd, hi);
2734     gen_store_gpr(dc, rd + 1, lo);
2735 }
2736 
2737 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2738                          int insn, int rd)
2739 {
2740     DisasASI da = get_asi(dc, insn, MO_TEQ);
2741     TCGv lo = gen_load_gpr(dc, rd + 1);
2742 
2743     switch (da.type) {
2744     case GET_ASI_EXCP:
2745         break;
2746 
2747     case GET_ASI_DTWINX:
2748         gen_address_mask(dc, addr);
2749         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2750         tcg_gen_addi_tl(addr, addr, 8);
2751         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2752         break;
2753 
2754     case GET_ASI_DIRECT:
2755         {
2756             TCGv_i64 t64 = tcg_temp_new_i64();
2757 
2758             /* Note that LE stda acts as if each 32-bit register result is
2759                byte swapped.  We will perform one 64-bit LE store, so now
2760                we must swap the order of the construction.  */
2761             if ((da.memop & MO_BSWAP) == MO_TE) {
2762                 tcg_gen_concat32_i64(t64, lo, hi);
2763             } else {
2764                 tcg_gen_concat32_i64(t64, hi, lo);
2765             }
2766             gen_address_mask(dc, addr);
2767             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2768             tcg_temp_free_i64(t64);
2769         }
2770         break;
2771 
2772     default:
2773         /* ??? In theory we've handled all of the ASIs that are valid
2774            for stda, and this should raise DAE_invalid_asi.  */
2775         {
2776             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2777             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2778             TCGv_i64 t64 = tcg_temp_new_i64();
2779 
2780             /* See above.  */
2781             if ((da.memop & MO_BSWAP) == MO_TE) {
2782                 tcg_gen_concat32_i64(t64, lo, hi);
2783             } else {
2784                 tcg_gen_concat32_i64(t64, hi, lo);
2785             }
2786 
2787             save_state(dc);
2788             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2789             tcg_temp_free_i32(r_mop);
2790             tcg_temp_free_i32(r_asi);
2791             tcg_temp_free_i64(t64);
2792         }
2793         break;
2794     }
2795 }
2796 
2797 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2798                          int insn, int rd)
2799 {
2800     DisasASI da = get_asi(dc, insn, MO_TEQ);
2801     TCGv oldv;
2802 
2803     switch (da.type) {
2804     case GET_ASI_EXCP:
2805         return;
2806     case GET_ASI_DIRECT:
2807         oldv = tcg_temp_new();
2808         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2809                                   da.mem_idx, da.memop);
2810         gen_store_gpr(dc, rd, oldv);
2811         tcg_temp_free(oldv);
2812         break;
2813     default:
2814         /* ??? Should be DAE_invalid_asi.  */
2815         gen_exception(dc, TT_DATA_ACCESS);
2816         break;
2817     }
2818 }
2819 
2820 #elif !defined(CONFIG_USER_ONLY)
2821 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2822 {
2823     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2824        whereby "rd + 1" elicits "error: array subscript is above array".
2825        Since we have already asserted that rd is even, the semantics
2826        are unchanged.  */
2827     TCGv lo = gen_dest_gpr(dc, rd | 1);
2828     TCGv hi = gen_dest_gpr(dc, rd);
2829     TCGv_i64 t64 = tcg_temp_new_i64();
2830     DisasASI da = get_asi(dc, insn, MO_TEQ);
2831 
2832     switch (da.type) {
2833     case GET_ASI_EXCP:
2834         tcg_temp_free_i64(t64);
2835         return;
2836     case GET_ASI_DIRECT:
2837         gen_address_mask(dc, addr);
2838         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2839         break;
2840     default:
2841         {
2842             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2843             TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2844 
2845             save_state(dc);
2846             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2847             tcg_temp_free_i32(r_mop);
2848             tcg_temp_free_i32(r_asi);
2849         }
2850         break;
2851     }
2852 
2853     tcg_gen_extr_i64_i32(lo, hi, t64);
2854     tcg_temp_free_i64(t64);
2855     gen_store_gpr(dc, rd | 1, lo);
2856     gen_store_gpr(dc, rd, hi);
2857 }
2858 
2859 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2860                          int insn, int rd)
2861 {
2862     DisasASI da = get_asi(dc, insn, MO_TEQ);
2863     TCGv lo = gen_load_gpr(dc, rd + 1);
2864     TCGv_i64 t64 = tcg_temp_new_i64();
2865 
2866     tcg_gen_concat_tl_i64(t64, lo, hi);
2867 
2868     switch (da.type) {
2869     case GET_ASI_EXCP:
2870         break;
2871     case GET_ASI_DIRECT:
2872         gen_address_mask(dc, addr);
2873         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2874         break;
2875     case GET_ASI_BFILL:
2876         /* Store 32 bytes of T64 to ADDR.  */
2877         /* ??? The original qemu code suggests 8-byte alignment, dropping
2878            the low bits, but the only place I can see this used is in the
2879            Linux kernel with 32 byte alignment, which would make more sense
2880            as a cacheline-style operation.  */
2881         {
2882             TCGv d_addr = tcg_temp_new();
2883             TCGv eight = tcg_const_tl(8);
2884             int i;
2885 
2886             tcg_gen_andi_tl(d_addr, addr, -8);
2887             for (i = 0; i < 32; i += 8) {
2888                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2889                 tcg_gen_add_tl(d_addr, d_addr, eight);
2890             }
2891 
2892             tcg_temp_free(d_addr);
2893             tcg_temp_free(eight);
2894         }
2895         break;
2896     default:
2897         {
2898             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2899             TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2900 
2901             save_state(dc);
2902             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2903             tcg_temp_free_i32(r_mop);
2904             tcg_temp_free_i32(r_asi);
2905         }
2906         break;
2907     }
2908 
2909     tcg_temp_free_i64(t64);
2910 }
2911 #endif
2912 
2913 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2914 {
2915     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2916     return gen_load_gpr(dc, rs1);
2917 }
2918 
2919 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2920 {
2921     if (IS_IMM) { /* immediate */
2922         target_long simm = GET_FIELDs(insn, 19, 31);
2923         TCGv t = get_temp_tl(dc);
2924         tcg_gen_movi_tl(t, simm);
2925         return t;
2926     } else {      /* register */
2927         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2928         return gen_load_gpr(dc, rs2);
2929     }
2930 }
2931 
2932 #ifdef TARGET_SPARC64
2933 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2934 {
2935     TCGv_i32 c32, zero, dst, s1, s2;
2936 
2937     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2938        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2939        the later.  */
2940     c32 = tcg_temp_new_i32();
2941     if (cmp->is_bool) {
2942         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2943     } else {
2944         TCGv_i64 c64 = tcg_temp_new_i64();
2945         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2946         tcg_gen_extrl_i64_i32(c32, c64);
2947         tcg_temp_free_i64(c64);
2948     }
2949 
2950     s1 = gen_load_fpr_F(dc, rs);
2951     s2 = gen_load_fpr_F(dc, rd);
2952     dst = gen_dest_fpr_F(dc);
2953     zero = tcg_const_i32(0);
2954 
2955     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2956 
2957     tcg_temp_free_i32(c32);
2958     tcg_temp_free_i32(zero);
2959     gen_store_fpr_F(dc, rd, dst);
2960 }
2961 
2962 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2963 {
2964     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2965     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2966                         gen_load_fpr_D(dc, rs),
2967                         gen_load_fpr_D(dc, rd));
2968     gen_store_fpr_D(dc, rd, dst);
2969 }
2970 
2971 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2972 {
2973     int qd = QFPREG(rd);
2974     int qs = QFPREG(rs);
2975 
2976     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2977                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2978     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2979                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2980 
2981     gen_update_fprs_dirty(dc, qd);
2982 }
2983 
2984 #ifndef CONFIG_USER_ONLY
2985 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2986 {
2987     TCGv_i32 r_tl = tcg_temp_new_i32();
2988 
2989     /* load env->tl into r_tl */
2990     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2991 
2992     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2993     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2994 
2995     /* calculate offset to current trap state from env->ts, reuse r_tl */
2996     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2997     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2998 
2999     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
3000     {
3001         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
3002         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
3003         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
3004         tcg_temp_free_ptr(r_tl_tmp);
3005     }
3006 
3007     tcg_temp_free_i32(r_tl);
3008 }
3009 #endif
3010 
3011 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
3012                      int width, bool cc, bool left)
3013 {
3014     TCGv lo1, lo2, t1, t2;
3015     uint64_t amask, tabl, tabr;
3016     int shift, imask, omask;
3017 
3018     if (cc) {
3019         tcg_gen_mov_tl(cpu_cc_src, s1);
3020         tcg_gen_mov_tl(cpu_cc_src2, s2);
3021         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
3022         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3023         dc->cc_op = CC_OP_SUB;
3024     }
3025 
3026     /* Theory of operation: there are two tables, left and right (not to
3027        be confused with the left and right versions of the opcode).  These
3028        are indexed by the low 3 bits of the inputs.  To make things "easy",
3029        these tables are loaded into two constants, TABL and TABR below.
3030        The operation index = (input & imask) << shift calculates the index
3031        into the constant, while val = (table >> index) & omask calculates
3032        the value we're looking for.  */
3033     switch (width) {
3034     case 8:
3035         imask = 0x7;
3036         shift = 3;
3037         omask = 0xff;
3038         if (left) {
3039             tabl = 0x80c0e0f0f8fcfeffULL;
3040             tabr = 0xff7f3f1f0f070301ULL;
3041         } else {
3042             tabl = 0x0103070f1f3f7fffULL;
3043             tabr = 0xfffefcf8f0e0c080ULL;
3044         }
3045         break;
3046     case 16:
3047         imask = 0x6;
3048         shift = 1;
3049         omask = 0xf;
3050         if (left) {
3051             tabl = 0x8cef;
3052             tabr = 0xf731;
3053         } else {
3054             tabl = 0x137f;
3055             tabr = 0xfec8;
3056         }
3057         break;
3058     case 32:
3059         imask = 0x4;
3060         shift = 0;
3061         omask = 0x3;
3062         if (left) {
3063             tabl = (2 << 2) | 3;
3064             tabr = (3 << 2) | 1;
3065         } else {
3066             tabl = (1 << 2) | 3;
3067             tabr = (3 << 2) | 2;
3068         }
3069         break;
3070     default:
3071         abort();
3072     }
3073 
3074     lo1 = tcg_temp_new();
3075     lo2 = tcg_temp_new();
3076     tcg_gen_andi_tl(lo1, s1, imask);
3077     tcg_gen_andi_tl(lo2, s2, imask);
3078     tcg_gen_shli_tl(lo1, lo1, shift);
3079     tcg_gen_shli_tl(lo2, lo2, shift);
3080 
3081     t1 = tcg_const_tl(tabl);
3082     t2 = tcg_const_tl(tabr);
3083     tcg_gen_shr_tl(lo1, t1, lo1);
3084     tcg_gen_shr_tl(lo2, t2, lo2);
3085     tcg_gen_andi_tl(dst, lo1, omask);
3086     tcg_gen_andi_tl(lo2, lo2, omask);
3087 
3088     amask = -8;
3089     if (AM_CHECK(dc)) {
3090         amask &= 0xffffffffULL;
3091     }
3092     tcg_gen_andi_tl(s1, s1, amask);
3093     tcg_gen_andi_tl(s2, s2, amask);
3094 
3095     /* We want to compute
3096         dst = (s1 == s2 ? lo1 : lo1 & lo2).
3097        We've already done dst = lo1, so this reduces to
3098         dst &= (s1 == s2 ? -1 : lo2)
3099        Which we perform by
3100         lo2 |= -(s1 == s2)
3101         dst &= lo2
3102     */
3103     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
3104     tcg_gen_neg_tl(t1, t1);
3105     tcg_gen_or_tl(lo2, lo2, t1);
3106     tcg_gen_and_tl(dst, dst, lo2);
3107 
3108     tcg_temp_free(lo1);
3109     tcg_temp_free(lo2);
3110     tcg_temp_free(t1);
3111     tcg_temp_free(t2);
3112 }
3113 
3114 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
3115 {
3116     TCGv tmp = tcg_temp_new();
3117 
3118     tcg_gen_add_tl(tmp, s1, s2);
3119     tcg_gen_andi_tl(dst, tmp, -8);
3120     if (left) {
3121         tcg_gen_neg_tl(tmp, tmp);
3122     }
3123     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
3124 
3125     tcg_temp_free(tmp);
3126 }
3127 
3128 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
3129 {
3130     TCGv t1, t2, shift;
3131 
3132     t1 = tcg_temp_new();
3133     t2 = tcg_temp_new();
3134     shift = tcg_temp_new();
3135 
3136     tcg_gen_andi_tl(shift, gsr, 7);
3137     tcg_gen_shli_tl(shift, shift, 3);
3138     tcg_gen_shl_tl(t1, s1, shift);
3139 
3140     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
3141        shift of (up to 63) followed by a constant shift of 1.  */
3142     tcg_gen_xori_tl(shift, shift, 63);
3143     tcg_gen_shr_tl(t2, s2, shift);
3144     tcg_gen_shri_tl(t2, t2, 1);
3145 
3146     tcg_gen_or_tl(dst, t1, t2);
3147 
3148     tcg_temp_free(t1);
3149     tcg_temp_free(t2);
3150     tcg_temp_free(shift);
3151 }
3152 #endif
3153 
3154 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3155     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3156         goto illegal_insn;
3157 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3158     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3159         goto nfpu_insn;
3160 
3161 /* before an instruction, dc->pc must be static */
3162 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3163 {
3164     unsigned int opc, rs1, rs2, rd;
3165     TCGv cpu_src1, cpu_src2;
3166     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3167     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3168     target_long simm;
3169 
3170     opc = GET_FIELD(insn, 0, 1);
3171     rd = GET_FIELD(insn, 2, 6);
3172 
3173     switch (opc) {
3174     case 0:                     /* branches/sethi */
3175         {
3176             unsigned int xop = GET_FIELD(insn, 7, 9);
3177             int32_t target;
3178             switch (xop) {
3179 #ifdef TARGET_SPARC64
3180             case 0x1:           /* V9 BPcc */
3181                 {
3182                     int cc;
3183 
3184                     target = GET_FIELD_SP(insn, 0, 18);
3185                     target = sign_extend(target, 19);
3186                     target <<= 2;
3187                     cc = GET_FIELD_SP(insn, 20, 21);
3188                     if (cc == 0)
3189                         do_branch(dc, target, insn, 0);
3190                     else if (cc == 2)
3191                         do_branch(dc, target, insn, 1);
3192                     else
3193                         goto illegal_insn;
3194                     goto jmp_insn;
3195                 }
3196             case 0x3:           /* V9 BPr */
3197                 {
3198                     target = GET_FIELD_SP(insn, 0, 13) |
3199                         (GET_FIELD_SP(insn, 20, 21) << 14);
3200                     target = sign_extend(target, 16);
3201                     target <<= 2;
3202                     cpu_src1 = get_src1(dc, insn);
3203                     do_branch_reg(dc, target, insn, cpu_src1);
3204                     goto jmp_insn;
3205                 }
3206             case 0x5:           /* V9 FBPcc */
3207                 {
3208                     int cc = GET_FIELD_SP(insn, 20, 21);
3209                     if (gen_trap_ifnofpu(dc)) {
3210                         goto jmp_insn;
3211                     }
3212                     target = GET_FIELD_SP(insn, 0, 18);
3213                     target = sign_extend(target, 19);
3214                     target <<= 2;
3215                     do_fbranch(dc, target, insn, cc);
3216                     goto jmp_insn;
3217                 }
3218 #else
3219             case 0x7:           /* CBN+x */
3220                 {
3221                     goto ncp_insn;
3222                 }
3223 #endif
3224             case 0x2:           /* BN+x */
3225                 {
3226                     target = GET_FIELD(insn, 10, 31);
3227                     target = sign_extend(target, 22);
3228                     target <<= 2;
3229                     do_branch(dc, target, insn, 0);
3230                     goto jmp_insn;
3231                 }
3232             case 0x6:           /* FBN+x */
3233                 {
3234                     if (gen_trap_ifnofpu(dc)) {
3235                         goto jmp_insn;
3236                     }
3237                     target = GET_FIELD(insn, 10, 31);
3238                     target = sign_extend(target, 22);
3239                     target <<= 2;
3240                     do_fbranch(dc, target, insn, 0);
3241                     goto jmp_insn;
3242                 }
3243             case 0x4:           /* SETHI */
3244                 /* Special-case %g0 because that's the canonical nop.  */
3245                 if (rd) {
3246                     uint32_t value = GET_FIELD(insn, 10, 31);
3247                     TCGv t = gen_dest_gpr(dc, rd);
3248                     tcg_gen_movi_tl(t, value << 10);
3249                     gen_store_gpr(dc, rd, t);
3250                 }
3251                 break;
3252             case 0x0:           /* UNIMPL */
3253             default:
3254                 goto illegal_insn;
3255             }
3256             break;
3257         }
3258         break;
3259     case 1:                     /*CALL*/
3260         {
3261             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3262             TCGv o7 = gen_dest_gpr(dc, 15);
3263 
3264             tcg_gen_movi_tl(o7, dc->pc);
3265             gen_store_gpr(dc, 15, o7);
3266             target += dc->pc;
3267             gen_mov_pc_npc(dc);
3268 #ifdef TARGET_SPARC64
3269             if (unlikely(AM_CHECK(dc))) {
3270                 target &= 0xffffffffULL;
3271             }
3272 #endif
3273             dc->npc = target;
3274         }
3275         goto jmp_insn;
3276     case 2:                     /* FPU & Logical Operations */
3277         {
3278             unsigned int xop = GET_FIELD(insn, 7, 12);
3279             TCGv cpu_dst = get_temp_tl(dc);
3280             TCGv cpu_tmp0;
3281 
3282             if (xop == 0x3a) {  /* generate trap */
3283                 int cond = GET_FIELD(insn, 3, 6);
3284                 TCGv_i32 trap;
3285                 TCGLabel *l1 = NULL;
3286                 int mask;
3287 
3288                 if (cond == 0) {
3289                     /* Trap never.  */
3290                     break;
3291                 }
3292 
3293                 save_state(dc);
3294 
3295                 if (cond != 8) {
3296                     /* Conditional trap.  */
3297                     DisasCompare cmp;
3298 #ifdef TARGET_SPARC64
3299                     /* V9 icc/xcc */
3300                     int cc = GET_FIELD_SP(insn, 11, 12);
3301                     if (cc == 0) {
3302                         gen_compare(&cmp, 0, cond, dc);
3303                     } else if (cc == 2) {
3304                         gen_compare(&cmp, 1, cond, dc);
3305                     } else {
3306                         goto illegal_insn;
3307                     }
3308 #else
3309                     gen_compare(&cmp, 0, cond, dc);
3310 #endif
3311                     l1 = gen_new_label();
3312                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3313                                       cmp.c1, cmp.c2, l1);
3314                     free_compare(&cmp);
3315                 }
3316 
3317                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3318                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3319 
3320                 /* Don't use the normal temporaries, as they may well have
3321                    gone out of scope with the branch above.  While we're
3322                    doing that we might as well pre-truncate to 32-bit.  */
3323                 trap = tcg_temp_new_i32();
3324 
3325                 rs1 = GET_FIELD_SP(insn, 14, 18);
3326                 if (IS_IMM) {
3327                     rs2 = GET_FIELD_SP(insn, 0, 7);
3328                     if (rs1 == 0) {
3329                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3330                         /* Signal that the trap value is fully constant.  */
3331                         mask = 0;
3332                     } else {
3333                         TCGv t1 = gen_load_gpr(dc, rs1);
3334                         tcg_gen_trunc_tl_i32(trap, t1);
3335                         tcg_gen_addi_i32(trap, trap, rs2);
3336                     }
3337                 } else {
3338                     TCGv t1, t2;
3339                     rs2 = GET_FIELD_SP(insn, 0, 4);
3340                     t1 = gen_load_gpr(dc, rs1);
3341                     t2 = gen_load_gpr(dc, rs2);
3342                     tcg_gen_add_tl(t1, t1, t2);
3343                     tcg_gen_trunc_tl_i32(trap, t1);
3344                 }
3345                 if (mask != 0) {
3346                     tcg_gen_andi_i32(trap, trap, mask);
3347                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3348                 }
3349 
3350                 gen_helper_raise_exception(cpu_env, trap);
3351                 tcg_temp_free_i32(trap);
3352 
3353                 if (cond == 8) {
3354                     /* An unconditional trap ends the TB.  */
3355                     dc->is_br = 1;
3356                     goto jmp_insn;
3357                 } else {
3358                     /* A conditional trap falls through to the next insn.  */
3359                     gen_set_label(l1);
3360                     break;
3361                 }
3362             } else if (xop == 0x28) {
3363                 rs1 = GET_FIELD(insn, 13, 17);
3364                 switch(rs1) {
3365                 case 0: /* rdy */
3366 #ifndef TARGET_SPARC64
3367                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3368                                        manual, rdy on the microSPARC
3369                                        II */
3370                 case 0x0f:          /* stbar in the SPARCv8 manual,
3371                                        rdy on the microSPARC II */
3372                 case 0x10 ... 0x1f: /* implementation-dependent in the
3373                                        SPARCv8 manual, rdy on the
3374                                        microSPARC II */
3375                     /* Read Asr17 */
3376                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3377                         TCGv t = gen_dest_gpr(dc, rd);
3378                         /* Read Asr17 for a Leon3 monoprocessor */
3379                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3380                         gen_store_gpr(dc, rd, t);
3381                         break;
3382                     }
3383 #endif
3384                     gen_store_gpr(dc, rd, cpu_y);
3385                     break;
3386 #ifdef TARGET_SPARC64
3387                 case 0x2: /* V9 rdccr */
3388                     update_psr(dc);
3389                     gen_helper_rdccr(cpu_dst, cpu_env);
3390                     gen_store_gpr(dc, rd, cpu_dst);
3391                     break;
3392                 case 0x3: /* V9 rdasi */
3393                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3394                     gen_store_gpr(dc, rd, cpu_dst);
3395                     break;
3396                 case 0x4: /* V9 rdtick */
3397                     {
3398                         TCGv_ptr r_tickptr;
3399                         TCGv_i32 r_const;
3400 
3401                         r_tickptr = tcg_temp_new_ptr();
3402                         r_const = tcg_const_i32(dc->mem_idx);
3403                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3404                                        offsetof(CPUSPARCState, tick));
3405                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3406                                                   r_const);
3407                         tcg_temp_free_ptr(r_tickptr);
3408                         tcg_temp_free_i32(r_const);
3409                         gen_store_gpr(dc, rd, cpu_dst);
3410                     }
3411                     break;
3412                 case 0x5: /* V9 rdpc */
3413                     {
3414                         TCGv t = gen_dest_gpr(dc, rd);
3415                         if (unlikely(AM_CHECK(dc))) {
3416                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3417                         } else {
3418                             tcg_gen_movi_tl(t, dc->pc);
3419                         }
3420                         gen_store_gpr(dc, rd, t);
3421                     }
3422                     break;
3423                 case 0x6: /* V9 rdfprs */
3424                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3425                     gen_store_gpr(dc, rd, cpu_dst);
3426                     break;
3427                 case 0xf: /* V9 membar */
3428                     break; /* no effect */
3429                 case 0x13: /* Graphics Status */
3430                     if (gen_trap_ifnofpu(dc)) {
3431                         goto jmp_insn;
3432                     }
3433                     gen_store_gpr(dc, rd, cpu_gsr);
3434                     break;
3435                 case 0x16: /* Softint */
3436                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3437                                      offsetof(CPUSPARCState, softint));
3438                     gen_store_gpr(dc, rd, cpu_dst);
3439                     break;
3440                 case 0x17: /* Tick compare */
3441                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3442                     break;
3443                 case 0x18: /* System tick */
3444                     {
3445                         TCGv_ptr r_tickptr;
3446                         TCGv_i32 r_const;
3447 
3448                         r_tickptr = tcg_temp_new_ptr();
3449                         r_const = tcg_const_i32(dc->mem_idx);
3450                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3451                                        offsetof(CPUSPARCState, stick));
3452                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3453                                                   r_const);
3454                         tcg_temp_free_ptr(r_tickptr);
3455                         tcg_temp_free_i32(r_const);
3456                         gen_store_gpr(dc, rd, cpu_dst);
3457                     }
3458                     break;
3459                 case 0x19: /* System tick compare */
3460                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3461                     break;
3462                 case 0x1a: /* UltraSPARC-T1 Strand status */
3463                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3464                      * this ASR as impl. dep
3465                      */
3466                     CHECK_IU_FEATURE(dc, HYPV);
3467                     {
3468                         TCGv t = gen_dest_gpr(dc, rd);
3469                         tcg_gen_movi_tl(t, 1UL);
3470                         gen_store_gpr(dc, rd, t);
3471                     }
3472                     break;
3473                 case 0x10: /* Performance Control */
3474                 case 0x11: /* Performance Instrumentation Counter */
3475                 case 0x12: /* Dispatch Control */
3476                 case 0x14: /* Softint set, WO */
3477                 case 0x15: /* Softint clear, WO */
3478 #endif
3479                 default:
3480                     goto illegal_insn;
3481                 }
3482 #if !defined(CONFIG_USER_ONLY)
3483             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3484 #ifndef TARGET_SPARC64
3485                 if (!supervisor(dc)) {
3486                     goto priv_insn;
3487                 }
3488                 update_psr(dc);
3489                 gen_helper_rdpsr(cpu_dst, cpu_env);
3490 #else
3491                 CHECK_IU_FEATURE(dc, HYPV);
3492                 if (!hypervisor(dc))
3493                     goto priv_insn;
3494                 rs1 = GET_FIELD(insn, 13, 17);
3495                 switch (rs1) {
3496                 case 0: // hpstate
3497                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3498                                    offsetof(CPUSPARCState, hpstate));
3499                     break;
3500                 case 1: // htstate
3501                     // gen_op_rdhtstate();
3502                     break;
3503                 case 3: // hintp
3504                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3505                     break;
3506                 case 5: // htba
3507                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3508                     break;
3509                 case 6: // hver
3510                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3511                     break;
3512                 case 31: // hstick_cmpr
3513                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3514                     break;
3515                 default:
3516                     goto illegal_insn;
3517                 }
3518 #endif
3519                 gen_store_gpr(dc, rd, cpu_dst);
3520                 break;
3521             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3522                 if (!supervisor(dc)) {
3523                     goto priv_insn;
3524                 }
3525                 cpu_tmp0 = get_temp_tl(dc);
3526 #ifdef TARGET_SPARC64
3527                 rs1 = GET_FIELD(insn, 13, 17);
3528                 switch (rs1) {
3529                 case 0: // tpc
3530                     {
3531                         TCGv_ptr r_tsptr;
3532 
3533                         r_tsptr = tcg_temp_new_ptr();
3534                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3535                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3536                                       offsetof(trap_state, tpc));
3537                         tcg_temp_free_ptr(r_tsptr);
3538                     }
3539                     break;
3540                 case 1: // tnpc
3541                     {
3542                         TCGv_ptr r_tsptr;
3543 
3544                         r_tsptr = tcg_temp_new_ptr();
3545                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3546                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3547                                       offsetof(trap_state, tnpc));
3548                         tcg_temp_free_ptr(r_tsptr);
3549                     }
3550                     break;
3551                 case 2: // tstate
3552                     {
3553                         TCGv_ptr r_tsptr;
3554 
3555                         r_tsptr = tcg_temp_new_ptr();
3556                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3557                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3558                                       offsetof(trap_state, tstate));
3559                         tcg_temp_free_ptr(r_tsptr);
3560                     }
3561                     break;
3562                 case 3: // tt
3563                     {
3564                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3565 
3566                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3567                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3568                                          offsetof(trap_state, tt));
3569                         tcg_temp_free_ptr(r_tsptr);
3570                     }
3571                     break;
3572                 case 4: // tick
3573                     {
3574                         TCGv_ptr r_tickptr;
3575                         TCGv_i32 r_const;
3576 
3577                         r_tickptr = tcg_temp_new_ptr();
3578                         r_const = tcg_const_i32(dc->mem_idx);
3579                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3580                                        offsetof(CPUSPARCState, tick));
3581                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3582                                                   r_tickptr, r_const);
3583                         tcg_temp_free_ptr(r_tickptr);
3584                         tcg_temp_free_i32(r_const);
3585                     }
3586                     break;
3587                 case 5: // tba
3588                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3589                     break;
3590                 case 6: // pstate
3591                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3592                                      offsetof(CPUSPARCState, pstate));
3593                     break;
3594                 case 7: // tl
3595                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3596                                      offsetof(CPUSPARCState, tl));
3597                     break;
3598                 case 8: // pil
3599                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3600                                      offsetof(CPUSPARCState, psrpil));
3601                     break;
3602                 case 9: // cwp
3603                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3604                     break;
3605                 case 10: // cansave
3606                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3607                                      offsetof(CPUSPARCState, cansave));
3608                     break;
3609                 case 11: // canrestore
3610                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3611                                      offsetof(CPUSPARCState, canrestore));
3612                     break;
3613                 case 12: // cleanwin
3614                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3615                                      offsetof(CPUSPARCState, cleanwin));
3616                     break;
3617                 case 13: // otherwin
3618                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3619                                      offsetof(CPUSPARCState, otherwin));
3620                     break;
3621                 case 14: // wstate
3622                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3623                                      offsetof(CPUSPARCState, wstate));
3624                     break;
3625                 case 16: // UA2005 gl
3626                     CHECK_IU_FEATURE(dc, GL);
3627                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3628                                      offsetof(CPUSPARCState, gl));
3629                     break;
3630                 case 26: // UA2005 strand status
3631                     CHECK_IU_FEATURE(dc, HYPV);
3632                     if (!hypervisor(dc))
3633                         goto priv_insn;
3634                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3635                     break;
3636                 case 31: // ver
3637                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3638                     break;
3639                 case 15: // fq
3640                 default:
3641                     goto illegal_insn;
3642                 }
3643 #else
3644                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3645 #endif
3646                 gen_store_gpr(dc, rd, cpu_tmp0);
3647                 break;
3648             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3649 #ifdef TARGET_SPARC64
3650                 gen_helper_flushw(cpu_env);
3651 #else
3652                 if (!supervisor(dc))
3653                     goto priv_insn;
3654                 gen_store_gpr(dc, rd, cpu_tbr);
3655 #endif
3656                 break;
3657 #endif
3658             } else if (xop == 0x34) {   /* FPU Operations */
3659                 if (gen_trap_ifnofpu(dc)) {
3660                     goto jmp_insn;
3661                 }
3662                 gen_op_clear_ieee_excp_and_FTT();
3663                 rs1 = GET_FIELD(insn, 13, 17);
3664                 rs2 = GET_FIELD(insn, 27, 31);
3665                 xop = GET_FIELD(insn, 18, 26);
3666 
3667                 switch (xop) {
3668                 case 0x1: /* fmovs */
3669                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3670                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3671                     break;
3672                 case 0x5: /* fnegs */
3673                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3674                     break;
3675                 case 0x9: /* fabss */
3676                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3677                     break;
3678                 case 0x29: /* fsqrts */
3679                     CHECK_FPU_FEATURE(dc, FSQRT);
3680                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3681                     break;
3682                 case 0x2a: /* fsqrtd */
3683                     CHECK_FPU_FEATURE(dc, FSQRT);
3684                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3685                     break;
3686                 case 0x2b: /* fsqrtq */
3687                     CHECK_FPU_FEATURE(dc, FLOAT128);
3688                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3689                     break;
3690                 case 0x41: /* fadds */
3691                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3692                     break;
3693                 case 0x42: /* faddd */
3694                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3695                     break;
3696                 case 0x43: /* faddq */
3697                     CHECK_FPU_FEATURE(dc, FLOAT128);
3698                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3699                     break;
3700                 case 0x45: /* fsubs */
3701                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3702                     break;
3703                 case 0x46: /* fsubd */
3704                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3705                     break;
3706                 case 0x47: /* fsubq */
3707                     CHECK_FPU_FEATURE(dc, FLOAT128);
3708                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3709                     break;
3710                 case 0x49: /* fmuls */
3711                     CHECK_FPU_FEATURE(dc, FMUL);
3712                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3713                     break;
3714                 case 0x4a: /* fmuld */
3715                     CHECK_FPU_FEATURE(dc, FMUL);
3716                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3717                     break;
3718                 case 0x4b: /* fmulq */
3719                     CHECK_FPU_FEATURE(dc, FLOAT128);
3720                     CHECK_FPU_FEATURE(dc, FMUL);
3721                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3722                     break;
3723                 case 0x4d: /* fdivs */
3724                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3725                     break;
3726                 case 0x4e: /* fdivd */
3727                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3728                     break;
3729                 case 0x4f: /* fdivq */
3730                     CHECK_FPU_FEATURE(dc, FLOAT128);
3731                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3732                     break;
3733                 case 0x69: /* fsmuld */
3734                     CHECK_FPU_FEATURE(dc, FSMULD);
3735                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3736                     break;
3737                 case 0x6e: /* fdmulq */
3738                     CHECK_FPU_FEATURE(dc, FLOAT128);
3739                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3740                     break;
3741                 case 0xc4: /* fitos */
3742                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3743                     break;
3744                 case 0xc6: /* fdtos */
3745                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3746                     break;
3747                 case 0xc7: /* fqtos */
3748                     CHECK_FPU_FEATURE(dc, FLOAT128);
3749                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3750                     break;
3751                 case 0xc8: /* fitod */
3752                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3753                     break;
3754                 case 0xc9: /* fstod */
3755                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3756                     break;
3757                 case 0xcb: /* fqtod */
3758                     CHECK_FPU_FEATURE(dc, FLOAT128);
3759                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3760                     break;
3761                 case 0xcc: /* fitoq */
3762                     CHECK_FPU_FEATURE(dc, FLOAT128);
3763                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3764                     break;
3765                 case 0xcd: /* fstoq */
3766                     CHECK_FPU_FEATURE(dc, FLOAT128);
3767                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3768                     break;
3769                 case 0xce: /* fdtoq */
3770                     CHECK_FPU_FEATURE(dc, FLOAT128);
3771                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3772                     break;
3773                 case 0xd1: /* fstoi */
3774                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3775                     break;
3776                 case 0xd2: /* fdtoi */
3777                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3778                     break;
3779                 case 0xd3: /* fqtoi */
3780                     CHECK_FPU_FEATURE(dc, FLOAT128);
3781                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3782                     break;
3783 #ifdef TARGET_SPARC64
3784                 case 0x2: /* V9 fmovd */
3785                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3786                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3787                     break;
3788                 case 0x3: /* V9 fmovq */
3789                     CHECK_FPU_FEATURE(dc, FLOAT128);
3790                     gen_move_Q(dc, rd, rs2);
3791                     break;
3792                 case 0x6: /* V9 fnegd */
3793                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3794                     break;
3795                 case 0x7: /* V9 fnegq */
3796                     CHECK_FPU_FEATURE(dc, FLOAT128);
3797                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3798                     break;
3799                 case 0xa: /* V9 fabsd */
3800                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3801                     break;
3802                 case 0xb: /* V9 fabsq */
3803                     CHECK_FPU_FEATURE(dc, FLOAT128);
3804                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3805                     break;
3806                 case 0x81: /* V9 fstox */
3807                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3808                     break;
3809                 case 0x82: /* V9 fdtox */
3810                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3811                     break;
3812                 case 0x83: /* V9 fqtox */
3813                     CHECK_FPU_FEATURE(dc, FLOAT128);
3814                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3815                     break;
3816                 case 0x84: /* V9 fxtos */
3817                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3818                     break;
3819                 case 0x88: /* V9 fxtod */
3820                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3821                     break;
3822                 case 0x8c: /* V9 fxtoq */
3823                     CHECK_FPU_FEATURE(dc, FLOAT128);
3824                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3825                     break;
3826 #endif
3827                 default:
3828                     goto illegal_insn;
3829                 }
3830             } else if (xop == 0x35) {   /* FPU Operations */
3831 #ifdef TARGET_SPARC64
3832                 int cond;
3833 #endif
3834                 if (gen_trap_ifnofpu(dc)) {
3835                     goto jmp_insn;
3836                 }
3837                 gen_op_clear_ieee_excp_and_FTT();
3838                 rs1 = GET_FIELD(insn, 13, 17);
3839                 rs2 = GET_FIELD(insn, 27, 31);
3840                 xop = GET_FIELD(insn, 18, 26);
3841 
3842 #ifdef TARGET_SPARC64
3843 #define FMOVR(sz)                                                  \
3844                 do {                                               \
3845                     DisasCompare cmp;                              \
3846                     cond = GET_FIELD_SP(insn, 10, 12);             \
3847                     cpu_src1 = get_src1(dc, insn);                 \
3848                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3849                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3850                     free_compare(&cmp);                            \
3851                 } while (0)
3852 
3853                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3854                     FMOVR(s);
3855                     break;
3856                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3857                     FMOVR(d);
3858                     break;
3859                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3860                     CHECK_FPU_FEATURE(dc, FLOAT128);
3861                     FMOVR(q);
3862                     break;
3863                 }
3864 #undef FMOVR
3865 #endif
3866                 switch (xop) {
3867 #ifdef TARGET_SPARC64
3868 #define FMOVCC(fcc, sz)                                                 \
3869                     do {                                                \
3870                         DisasCompare cmp;                               \
3871                         cond = GET_FIELD_SP(insn, 14, 17);              \
3872                         gen_fcompare(&cmp, fcc, cond);                  \
3873                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3874                         free_compare(&cmp);                             \
3875                     } while (0)
3876 
3877                     case 0x001: /* V9 fmovscc %fcc0 */
3878                         FMOVCC(0, s);
3879                         break;
3880                     case 0x002: /* V9 fmovdcc %fcc0 */
3881                         FMOVCC(0, d);
3882                         break;
3883                     case 0x003: /* V9 fmovqcc %fcc0 */
3884                         CHECK_FPU_FEATURE(dc, FLOAT128);
3885                         FMOVCC(0, q);
3886                         break;
3887                     case 0x041: /* V9 fmovscc %fcc1 */
3888                         FMOVCC(1, s);
3889                         break;
3890                     case 0x042: /* V9 fmovdcc %fcc1 */
3891                         FMOVCC(1, d);
3892                         break;
3893                     case 0x043: /* V9 fmovqcc %fcc1 */
3894                         CHECK_FPU_FEATURE(dc, FLOAT128);
3895                         FMOVCC(1, q);
3896                         break;
3897                     case 0x081: /* V9 fmovscc %fcc2 */
3898                         FMOVCC(2, s);
3899                         break;
3900                     case 0x082: /* V9 fmovdcc %fcc2 */
3901                         FMOVCC(2, d);
3902                         break;
3903                     case 0x083: /* V9 fmovqcc %fcc2 */
3904                         CHECK_FPU_FEATURE(dc, FLOAT128);
3905                         FMOVCC(2, q);
3906                         break;
3907                     case 0x0c1: /* V9 fmovscc %fcc3 */
3908                         FMOVCC(3, s);
3909                         break;
3910                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3911                         FMOVCC(3, d);
3912                         break;
3913                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3914                         CHECK_FPU_FEATURE(dc, FLOAT128);
3915                         FMOVCC(3, q);
3916                         break;
3917 #undef FMOVCC
3918 #define FMOVCC(xcc, sz)                                                 \
3919                     do {                                                \
3920                         DisasCompare cmp;                               \
3921                         cond = GET_FIELD_SP(insn, 14, 17);              \
3922                         gen_compare(&cmp, xcc, cond, dc);               \
3923                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3924                         free_compare(&cmp);                             \
3925                     } while (0)
3926 
3927                     case 0x101: /* V9 fmovscc %icc */
3928                         FMOVCC(0, s);
3929                         break;
3930                     case 0x102: /* V9 fmovdcc %icc */
3931                         FMOVCC(0, d);
3932                         break;
3933                     case 0x103: /* V9 fmovqcc %icc */
3934                         CHECK_FPU_FEATURE(dc, FLOAT128);
3935                         FMOVCC(0, q);
3936                         break;
3937                     case 0x181: /* V9 fmovscc %xcc */
3938                         FMOVCC(1, s);
3939                         break;
3940                     case 0x182: /* V9 fmovdcc %xcc */
3941                         FMOVCC(1, d);
3942                         break;
3943                     case 0x183: /* V9 fmovqcc %xcc */
3944                         CHECK_FPU_FEATURE(dc, FLOAT128);
3945                         FMOVCC(1, q);
3946                         break;
3947 #undef FMOVCC
3948 #endif
3949                     case 0x51: /* fcmps, V9 %fcc */
3950                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3951                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3952                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3953                         break;
3954                     case 0x52: /* fcmpd, V9 %fcc */
3955                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3956                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3957                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3958                         break;
3959                     case 0x53: /* fcmpq, V9 %fcc */
3960                         CHECK_FPU_FEATURE(dc, FLOAT128);
3961                         gen_op_load_fpr_QT0(QFPREG(rs1));
3962                         gen_op_load_fpr_QT1(QFPREG(rs2));
3963                         gen_op_fcmpq(rd & 3);
3964                         break;
3965                     case 0x55: /* fcmpes, V9 %fcc */
3966                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3967                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3968                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3969                         break;
3970                     case 0x56: /* fcmped, V9 %fcc */
3971                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3972                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3973                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3974                         break;
3975                     case 0x57: /* fcmpeq, V9 %fcc */
3976                         CHECK_FPU_FEATURE(dc, FLOAT128);
3977                         gen_op_load_fpr_QT0(QFPREG(rs1));
3978                         gen_op_load_fpr_QT1(QFPREG(rs2));
3979                         gen_op_fcmpeq(rd & 3);
3980                         break;
3981                     default:
3982                         goto illegal_insn;
3983                 }
3984             } else if (xop == 0x2) {
3985                 TCGv dst = gen_dest_gpr(dc, rd);
3986                 rs1 = GET_FIELD(insn, 13, 17);
3987                 if (rs1 == 0) {
3988                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3989                     if (IS_IMM) {       /* immediate */
3990                         simm = GET_FIELDs(insn, 19, 31);
3991                         tcg_gen_movi_tl(dst, simm);
3992                         gen_store_gpr(dc, rd, dst);
3993                     } else {            /* register */
3994                         rs2 = GET_FIELD(insn, 27, 31);
3995                         if (rs2 == 0) {
3996                             tcg_gen_movi_tl(dst, 0);
3997                             gen_store_gpr(dc, rd, dst);
3998                         } else {
3999                             cpu_src2 = gen_load_gpr(dc, rs2);
4000                             gen_store_gpr(dc, rd, cpu_src2);
4001                         }
4002                     }
4003                 } else {
4004                     cpu_src1 = get_src1(dc, insn);
4005                     if (IS_IMM) {       /* immediate */
4006                         simm = GET_FIELDs(insn, 19, 31);
4007                         tcg_gen_ori_tl(dst, cpu_src1, simm);
4008                         gen_store_gpr(dc, rd, dst);
4009                     } else {            /* register */
4010                         rs2 = GET_FIELD(insn, 27, 31);
4011                         if (rs2 == 0) {
4012                             /* mov shortcut:  or x, %g0, y -> mov x, y */
4013                             gen_store_gpr(dc, rd, cpu_src1);
4014                         } else {
4015                             cpu_src2 = gen_load_gpr(dc, rs2);
4016                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
4017                             gen_store_gpr(dc, rd, dst);
4018                         }
4019                     }
4020                 }
4021 #ifdef TARGET_SPARC64
4022             } else if (xop == 0x25) { /* sll, V9 sllx */
4023                 cpu_src1 = get_src1(dc, insn);
4024                 if (IS_IMM) {   /* immediate */
4025                     simm = GET_FIELDs(insn, 20, 31);
4026                     if (insn & (1 << 12)) {
4027                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
4028                     } else {
4029                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
4030                     }
4031                 } else {                /* register */
4032                     rs2 = GET_FIELD(insn, 27, 31);
4033                     cpu_src2 = gen_load_gpr(dc, rs2);
4034                     cpu_tmp0 = get_temp_tl(dc);
4035                     if (insn & (1 << 12)) {
4036                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4037                     } else {
4038                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4039                     }
4040                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
4041                 }
4042                 gen_store_gpr(dc, rd, cpu_dst);
4043             } else if (xop == 0x26) { /* srl, V9 srlx */
4044                 cpu_src1 = get_src1(dc, insn);
4045                 if (IS_IMM) {   /* immediate */
4046                     simm = GET_FIELDs(insn, 20, 31);
4047                     if (insn & (1 << 12)) {
4048                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
4049                     } else {
4050                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4051                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
4052                     }
4053                 } else {                /* register */
4054                     rs2 = GET_FIELD(insn, 27, 31);
4055                     cpu_src2 = gen_load_gpr(dc, rs2);
4056                     cpu_tmp0 = get_temp_tl(dc);
4057                     if (insn & (1 << 12)) {
4058                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4059                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
4060                     } else {
4061                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4062                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4063                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
4064                     }
4065                 }
4066                 gen_store_gpr(dc, rd, cpu_dst);
4067             } else if (xop == 0x27) { /* sra, V9 srax */
4068                 cpu_src1 = get_src1(dc, insn);
4069                 if (IS_IMM) {   /* immediate */
4070                     simm = GET_FIELDs(insn, 20, 31);
4071                     if (insn & (1 << 12)) {
4072                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
4073                     } else {
4074                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4075                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
4076                     }
4077                 } else {                /* register */
4078                     rs2 = GET_FIELD(insn, 27, 31);
4079                     cpu_src2 = gen_load_gpr(dc, rs2);
4080                     cpu_tmp0 = get_temp_tl(dc);
4081                     if (insn & (1 << 12)) {
4082                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4083                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
4084                     } else {
4085                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4086                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4087                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
4088                     }
4089                 }
4090                 gen_store_gpr(dc, rd, cpu_dst);
4091 #endif
4092             } else if (xop < 0x36) {
4093                 if (xop < 0x20) {
4094                     cpu_src1 = get_src1(dc, insn);
4095                     cpu_src2 = get_src2(dc, insn);
4096                     switch (xop & ~0x10) {
4097                     case 0x0: /* add */
4098                         if (xop & 0x10) {
4099                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4100                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4101                             dc->cc_op = CC_OP_ADD;
4102                         } else {
4103                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4104                         }
4105                         break;
4106                     case 0x1: /* and */
4107                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
4108                         if (xop & 0x10) {
4109                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4110                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4111                             dc->cc_op = CC_OP_LOGIC;
4112                         }
4113                         break;
4114                     case 0x2: /* or */
4115                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4116                         if (xop & 0x10) {
4117                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4118                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4119                             dc->cc_op = CC_OP_LOGIC;
4120                         }
4121                         break;
4122                     case 0x3: /* xor */
4123                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4124                         if (xop & 0x10) {
4125                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4126                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4127                             dc->cc_op = CC_OP_LOGIC;
4128                         }
4129                         break;
4130                     case 0x4: /* sub */
4131                         if (xop & 0x10) {
4132                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4133                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4134                             dc->cc_op = CC_OP_SUB;
4135                         } else {
4136                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4137                         }
4138                         break;
4139                     case 0x5: /* andn */
4140                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4141                         if (xop & 0x10) {
4142                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4143                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4144                             dc->cc_op = CC_OP_LOGIC;
4145                         }
4146                         break;
4147                     case 0x6: /* orn */
4148                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4149                         if (xop & 0x10) {
4150                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4151                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4152                             dc->cc_op = CC_OP_LOGIC;
4153                         }
4154                         break;
4155                     case 0x7: /* xorn */
4156                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4157                         if (xop & 0x10) {
4158                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4159                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4160                             dc->cc_op = CC_OP_LOGIC;
4161                         }
4162                         break;
4163                     case 0x8: /* addx, V9 addc */
4164                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4165                                         (xop & 0x10));
4166                         break;
4167 #ifdef TARGET_SPARC64
4168                     case 0x9: /* V9 mulx */
4169                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4170                         break;
4171 #endif
4172                     case 0xa: /* umul */
4173                         CHECK_IU_FEATURE(dc, MUL);
4174                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4175                         if (xop & 0x10) {
4176                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4177                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4178                             dc->cc_op = CC_OP_LOGIC;
4179                         }
4180                         break;
4181                     case 0xb: /* smul */
4182                         CHECK_IU_FEATURE(dc, MUL);
4183                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4184                         if (xop & 0x10) {
4185                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4186                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4187                             dc->cc_op = CC_OP_LOGIC;
4188                         }
4189                         break;
4190                     case 0xc: /* subx, V9 subc */
4191                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4192                                         (xop & 0x10));
4193                         break;
4194 #ifdef TARGET_SPARC64
4195                     case 0xd: /* V9 udivx */
4196                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4197                         break;
4198 #endif
4199                     case 0xe: /* udiv */
4200                         CHECK_IU_FEATURE(dc, DIV);
4201                         if (xop & 0x10) {
4202                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4203                                                cpu_src2);
4204                             dc->cc_op = CC_OP_DIV;
4205                         } else {
4206                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4207                                             cpu_src2);
4208                         }
4209                         break;
4210                     case 0xf: /* sdiv */
4211                         CHECK_IU_FEATURE(dc, DIV);
4212                         if (xop & 0x10) {
4213                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4214                                                cpu_src2);
4215                             dc->cc_op = CC_OP_DIV;
4216                         } else {
4217                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4218                                             cpu_src2);
4219                         }
4220                         break;
4221                     default:
4222                         goto illegal_insn;
4223                     }
4224                     gen_store_gpr(dc, rd, cpu_dst);
4225                 } else {
4226                     cpu_src1 = get_src1(dc, insn);
4227                     cpu_src2 = get_src2(dc, insn);
4228                     switch (xop) {
4229                     case 0x20: /* taddcc */
4230                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4231                         gen_store_gpr(dc, rd, cpu_dst);
4232                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4233                         dc->cc_op = CC_OP_TADD;
4234                         break;
4235                     case 0x21: /* tsubcc */
4236                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4237                         gen_store_gpr(dc, rd, cpu_dst);
4238                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4239                         dc->cc_op = CC_OP_TSUB;
4240                         break;
4241                     case 0x22: /* taddcctv */
4242                         gen_helper_taddcctv(cpu_dst, cpu_env,
4243                                             cpu_src1, cpu_src2);
4244                         gen_store_gpr(dc, rd, cpu_dst);
4245                         dc->cc_op = CC_OP_TADDTV;
4246                         break;
4247                     case 0x23: /* tsubcctv */
4248                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4249                                             cpu_src1, cpu_src2);
4250                         gen_store_gpr(dc, rd, cpu_dst);
4251                         dc->cc_op = CC_OP_TSUBTV;
4252                         break;
4253                     case 0x24: /* mulscc */
4254                         update_psr(dc);
4255                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4256                         gen_store_gpr(dc, rd, cpu_dst);
4257                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4258                         dc->cc_op = CC_OP_ADD;
4259                         break;
4260 #ifndef TARGET_SPARC64
4261                     case 0x25:  /* sll */
4262                         if (IS_IMM) { /* immediate */
4263                             simm = GET_FIELDs(insn, 20, 31);
4264                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4265                         } else { /* register */
4266                             cpu_tmp0 = get_temp_tl(dc);
4267                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4268                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4269                         }
4270                         gen_store_gpr(dc, rd, cpu_dst);
4271                         break;
4272                     case 0x26:  /* srl */
4273                         if (IS_IMM) { /* immediate */
4274                             simm = GET_FIELDs(insn, 20, 31);
4275                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4276                         } else { /* register */
4277                             cpu_tmp0 = get_temp_tl(dc);
4278                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4279                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4280                         }
4281                         gen_store_gpr(dc, rd, cpu_dst);
4282                         break;
4283                     case 0x27:  /* sra */
4284                         if (IS_IMM) { /* immediate */
4285                             simm = GET_FIELDs(insn, 20, 31);
4286                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4287                         } else { /* register */
4288                             cpu_tmp0 = get_temp_tl(dc);
4289                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4290                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4291                         }
4292                         gen_store_gpr(dc, rd, cpu_dst);
4293                         break;
4294 #endif
4295                     case 0x30:
4296                         {
4297                             cpu_tmp0 = get_temp_tl(dc);
4298                             switch(rd) {
4299                             case 0: /* wry */
4300                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4301                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4302                                 break;
4303 #ifndef TARGET_SPARC64
4304                             case 0x01 ... 0x0f: /* undefined in the
4305                                                    SPARCv8 manual, nop
4306                                                    on the microSPARC
4307                                                    II */
4308                             case 0x10 ... 0x1f: /* implementation-dependent
4309                                                    in the SPARCv8
4310                                                    manual, nop on the
4311                                                    microSPARC II */
4312                                 if ((rd == 0x13) && (dc->def->features &
4313                                                      CPU_FEATURE_POWERDOWN)) {
4314                                     /* LEON3 power-down */
4315                                     save_state(dc);
4316                                     gen_helper_power_down(cpu_env);
4317                                 }
4318                                 break;
4319 #else
4320                             case 0x2: /* V9 wrccr */
4321                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4322                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4323                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4324                                 dc->cc_op = CC_OP_FLAGS;
4325                                 break;
4326                             case 0x3: /* V9 wrasi */
4327                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4328                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4329                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4330                                                 offsetof(CPUSPARCState, asi));
4331                                 /* End TB to notice changed ASI.  */
4332                                 save_state(dc);
4333                                 gen_op_next_insn();
4334                                 tcg_gen_exit_tb(0);
4335                                 dc->is_br = 1;
4336                                 break;
4337                             case 0x6: /* V9 wrfprs */
4338                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4339                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4340                                 dc->fprs_dirty = 0;
4341                                 save_state(dc);
4342                                 gen_op_next_insn();
4343                                 tcg_gen_exit_tb(0);
4344                                 dc->is_br = 1;
4345                                 break;
4346                             case 0xf: /* V9 sir, nop if user */
4347 #if !defined(CONFIG_USER_ONLY)
4348                                 if (supervisor(dc)) {
4349                                     ; // XXX
4350                                 }
4351 #endif
4352                                 break;
4353                             case 0x13: /* Graphics Status */
4354                                 if (gen_trap_ifnofpu(dc)) {
4355                                     goto jmp_insn;
4356                                 }
4357                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4358                                 break;
4359                             case 0x14: /* Softint set */
4360                                 if (!supervisor(dc))
4361                                     goto illegal_insn;
4362                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4363                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4364                                 break;
4365                             case 0x15: /* Softint clear */
4366                                 if (!supervisor(dc))
4367                                     goto illegal_insn;
4368                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4369                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4370                                 break;
4371                             case 0x16: /* Softint write */
4372                                 if (!supervisor(dc))
4373                                     goto illegal_insn;
4374                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4375                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4376                                 break;
4377                             case 0x17: /* Tick compare */
4378 #if !defined(CONFIG_USER_ONLY)
4379                                 if (!supervisor(dc))
4380                                     goto illegal_insn;
4381 #endif
4382                                 {
4383                                     TCGv_ptr r_tickptr;
4384 
4385                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4386                                                    cpu_src2);
4387                                     r_tickptr = tcg_temp_new_ptr();
4388                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4389                                                    offsetof(CPUSPARCState, tick));
4390                                     gen_helper_tick_set_limit(r_tickptr,
4391                                                               cpu_tick_cmpr);
4392                                     tcg_temp_free_ptr(r_tickptr);
4393                                 }
4394                                 break;
4395                             case 0x18: /* System tick */
4396 #if !defined(CONFIG_USER_ONLY)
4397                                 if (!supervisor(dc))
4398                                     goto illegal_insn;
4399 #endif
4400                                 {
4401                                     TCGv_ptr r_tickptr;
4402 
4403                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4404                                                    cpu_src2);
4405                                     r_tickptr = tcg_temp_new_ptr();
4406                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4407                                                    offsetof(CPUSPARCState, stick));
4408                                     gen_helper_tick_set_count(r_tickptr,
4409                                                               cpu_tmp0);
4410                                     tcg_temp_free_ptr(r_tickptr);
4411                                 }
4412                                 break;
4413                             case 0x19: /* System tick compare */
4414 #if !defined(CONFIG_USER_ONLY)
4415                                 if (!supervisor(dc))
4416                                     goto illegal_insn;
4417 #endif
4418                                 {
4419                                     TCGv_ptr r_tickptr;
4420 
4421                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4422                                                    cpu_src2);
4423                                     r_tickptr = tcg_temp_new_ptr();
4424                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4425                                                    offsetof(CPUSPARCState, stick));
4426                                     gen_helper_tick_set_limit(r_tickptr,
4427                                                               cpu_stick_cmpr);
4428                                     tcg_temp_free_ptr(r_tickptr);
4429                                 }
4430                                 break;
4431 
4432                             case 0x10: /* Performance Control */
4433                             case 0x11: /* Performance Instrumentation
4434                                           Counter */
4435                             case 0x12: /* Dispatch Control */
4436 #endif
4437                             default:
4438                                 goto illegal_insn;
4439                             }
4440                         }
4441                         break;
4442 #if !defined(CONFIG_USER_ONLY)
4443                     case 0x31: /* wrpsr, V9 saved, restored */
4444                         {
4445                             if (!supervisor(dc))
4446                                 goto priv_insn;
4447 #ifdef TARGET_SPARC64
4448                             switch (rd) {
4449                             case 0:
4450                                 gen_helper_saved(cpu_env);
4451                                 break;
4452                             case 1:
4453                                 gen_helper_restored(cpu_env);
4454                                 break;
4455                             case 2: /* UA2005 allclean */
4456                             case 3: /* UA2005 otherw */
4457                             case 4: /* UA2005 normalw */
4458                             case 5: /* UA2005 invalw */
4459                                 // XXX
4460                             default:
4461                                 goto illegal_insn;
4462                             }
4463 #else
4464                             cpu_tmp0 = get_temp_tl(dc);
4465                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4466                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4467                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4468                             dc->cc_op = CC_OP_FLAGS;
4469                             save_state(dc);
4470                             gen_op_next_insn();
4471                             tcg_gen_exit_tb(0);
4472                             dc->is_br = 1;
4473 #endif
4474                         }
4475                         break;
4476                     case 0x32: /* wrwim, V9 wrpr */
4477                         {
4478                             if (!supervisor(dc))
4479                                 goto priv_insn;
4480                             cpu_tmp0 = get_temp_tl(dc);
4481                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4482 #ifdef TARGET_SPARC64
4483                             switch (rd) {
4484                             case 0: // tpc
4485                                 {
4486                                     TCGv_ptr r_tsptr;
4487 
4488                                     r_tsptr = tcg_temp_new_ptr();
4489                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4490                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4491                                                   offsetof(trap_state, tpc));
4492                                     tcg_temp_free_ptr(r_tsptr);
4493                                 }
4494                                 break;
4495                             case 1: // tnpc
4496                                 {
4497                                     TCGv_ptr r_tsptr;
4498 
4499                                     r_tsptr = tcg_temp_new_ptr();
4500                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4501                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4502                                                   offsetof(trap_state, tnpc));
4503                                     tcg_temp_free_ptr(r_tsptr);
4504                                 }
4505                                 break;
4506                             case 2: // tstate
4507                                 {
4508                                     TCGv_ptr r_tsptr;
4509 
4510                                     r_tsptr = tcg_temp_new_ptr();
4511                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4512                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4513                                                   offsetof(trap_state,
4514                                                            tstate));
4515                                     tcg_temp_free_ptr(r_tsptr);
4516                                 }
4517                                 break;
4518                             case 3: // tt
4519                                 {
4520                                     TCGv_ptr r_tsptr;
4521 
4522                                     r_tsptr = tcg_temp_new_ptr();
4523                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4524                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4525                                                     offsetof(trap_state, tt));
4526                                     tcg_temp_free_ptr(r_tsptr);
4527                                 }
4528                                 break;
4529                             case 4: // tick
4530                                 {
4531                                     TCGv_ptr r_tickptr;
4532 
4533                                     r_tickptr = tcg_temp_new_ptr();
4534                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4535                                                    offsetof(CPUSPARCState, tick));
4536                                     gen_helper_tick_set_count(r_tickptr,
4537                                                               cpu_tmp0);
4538                                     tcg_temp_free_ptr(r_tickptr);
4539                                 }
4540                                 break;
4541                             case 5: // tba
4542                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4543                                 break;
4544                             case 6: // pstate
4545                                 save_state(dc);
4546                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4547                                 dc->npc = DYNAMIC_PC;
4548                                 break;
4549                             case 7: // tl
4550                                 save_state(dc);
4551                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4552                                                offsetof(CPUSPARCState, tl));
4553                                 dc->npc = DYNAMIC_PC;
4554                                 break;
4555                             case 8: // pil
4556                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4557                                 break;
4558                             case 9: // cwp
4559                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4560                                 break;
4561                             case 10: // cansave
4562                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4563                                                 offsetof(CPUSPARCState,
4564                                                          cansave));
4565                                 break;
4566                             case 11: // canrestore
4567                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4568                                                 offsetof(CPUSPARCState,
4569                                                          canrestore));
4570                                 break;
4571                             case 12: // cleanwin
4572                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4573                                                 offsetof(CPUSPARCState,
4574                                                          cleanwin));
4575                                 break;
4576                             case 13: // otherwin
4577                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4578                                                 offsetof(CPUSPARCState,
4579                                                          otherwin));
4580                                 break;
4581                             case 14: // wstate
4582                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4583                                                 offsetof(CPUSPARCState,
4584                                                          wstate));
4585                                 break;
4586                             case 16: // UA2005 gl
4587                                 CHECK_IU_FEATURE(dc, GL);
4588                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4589                                 break;
4590                             case 26: // UA2005 strand status
4591                                 CHECK_IU_FEATURE(dc, HYPV);
4592                                 if (!hypervisor(dc))
4593                                     goto priv_insn;
4594                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4595                                 break;
4596                             default:
4597                                 goto illegal_insn;
4598                             }
4599 #else
4600                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4601                             if (dc->def->nwindows != 32) {
4602                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4603                                                 (1 << dc->def->nwindows) - 1);
4604                             }
4605 #endif
4606                         }
4607                         break;
4608                     case 0x33: /* wrtbr, UA2005 wrhpr */
4609                         {
4610 #ifndef TARGET_SPARC64
4611                             if (!supervisor(dc))
4612                                 goto priv_insn;
4613                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4614 #else
4615                             CHECK_IU_FEATURE(dc, HYPV);
4616                             if (!hypervisor(dc))
4617                                 goto priv_insn;
4618                             cpu_tmp0 = get_temp_tl(dc);
4619                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4620                             switch (rd) {
4621                             case 0: // hpstate
4622                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4623                                                offsetof(CPUSPARCState,
4624                                                         hpstate));
4625                                 save_state(dc);
4626                                 gen_op_next_insn();
4627                                 tcg_gen_exit_tb(0);
4628                                 dc->is_br = 1;
4629                                 break;
4630                             case 1: // htstate
4631                                 // XXX gen_op_wrhtstate();
4632                                 break;
4633                             case 3: // hintp
4634                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4635                                 break;
4636                             case 5: // htba
4637                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4638                                 break;
4639                             case 31: // hstick_cmpr
4640                                 {
4641                                     TCGv_ptr r_tickptr;
4642 
4643                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4644                                     r_tickptr = tcg_temp_new_ptr();
4645                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4646                                                    offsetof(CPUSPARCState, hstick));
4647                                     gen_helper_tick_set_limit(r_tickptr,
4648                                                               cpu_hstick_cmpr);
4649                                     tcg_temp_free_ptr(r_tickptr);
4650                                 }
4651                                 break;
4652                             case 6: // hver readonly
4653                             default:
4654                                 goto illegal_insn;
4655                             }
4656 #endif
4657                         }
4658                         break;
4659 #endif
4660 #ifdef TARGET_SPARC64
4661                     case 0x2c: /* V9 movcc */
4662                         {
4663                             int cc = GET_FIELD_SP(insn, 11, 12);
4664                             int cond = GET_FIELD_SP(insn, 14, 17);
4665                             DisasCompare cmp;
4666                             TCGv dst;
4667 
4668                             if (insn & (1 << 18)) {
4669                                 if (cc == 0) {
4670                                     gen_compare(&cmp, 0, cond, dc);
4671                                 } else if (cc == 2) {
4672                                     gen_compare(&cmp, 1, cond, dc);
4673                                 } else {
4674                                     goto illegal_insn;
4675                                 }
4676                             } else {
4677                                 gen_fcompare(&cmp, cc, cond);
4678                             }
4679 
4680                             /* The get_src2 above loaded the normal 13-bit
4681                                immediate field, not the 11-bit field we have
4682                                in movcc.  But it did handle the reg case.  */
4683                             if (IS_IMM) {
4684                                 simm = GET_FIELD_SPs(insn, 0, 10);
4685                                 tcg_gen_movi_tl(cpu_src2, simm);
4686                             }
4687 
4688                             dst = gen_load_gpr(dc, rd);
4689                             tcg_gen_movcond_tl(cmp.cond, dst,
4690                                                cmp.c1, cmp.c2,
4691                                                cpu_src2, dst);
4692                             free_compare(&cmp);
4693                             gen_store_gpr(dc, rd, dst);
4694                             break;
4695                         }
4696                     case 0x2d: /* V9 sdivx */
4697                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4698                         gen_store_gpr(dc, rd, cpu_dst);
4699                         break;
4700                     case 0x2e: /* V9 popc */
4701                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4702                         gen_store_gpr(dc, rd, cpu_dst);
4703                         break;
4704                     case 0x2f: /* V9 movr */
4705                         {
4706                             int cond = GET_FIELD_SP(insn, 10, 12);
4707                             DisasCompare cmp;
4708                             TCGv dst;
4709 
4710                             gen_compare_reg(&cmp, cond, cpu_src1);
4711 
4712                             /* The get_src2 above loaded the normal 13-bit
4713                                immediate field, not the 10-bit field we have
4714                                in movr.  But it did handle the reg case.  */
4715                             if (IS_IMM) {
4716                                 simm = GET_FIELD_SPs(insn, 0, 9);
4717                                 tcg_gen_movi_tl(cpu_src2, simm);
4718                             }
4719 
4720                             dst = gen_load_gpr(dc, rd);
4721                             tcg_gen_movcond_tl(cmp.cond, dst,
4722                                                cmp.c1, cmp.c2,
4723                                                cpu_src2, dst);
4724                             free_compare(&cmp);
4725                             gen_store_gpr(dc, rd, dst);
4726                             break;
4727                         }
4728 #endif
4729                     default:
4730                         goto illegal_insn;
4731                     }
4732                 }
4733             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4734 #ifdef TARGET_SPARC64
4735                 int opf = GET_FIELD_SP(insn, 5, 13);
4736                 rs1 = GET_FIELD(insn, 13, 17);
4737                 rs2 = GET_FIELD(insn, 27, 31);
4738                 if (gen_trap_ifnofpu(dc)) {
4739                     goto jmp_insn;
4740                 }
4741 
4742                 switch (opf) {
4743                 case 0x000: /* VIS I edge8cc */
4744                     CHECK_FPU_FEATURE(dc, VIS1);
4745                     cpu_src1 = gen_load_gpr(dc, rs1);
4746                     cpu_src2 = gen_load_gpr(dc, rs2);
4747                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4748                     gen_store_gpr(dc, rd, cpu_dst);
4749                     break;
4750                 case 0x001: /* VIS II edge8n */
4751                     CHECK_FPU_FEATURE(dc, VIS2);
4752                     cpu_src1 = gen_load_gpr(dc, rs1);
4753                     cpu_src2 = gen_load_gpr(dc, rs2);
4754                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4755                     gen_store_gpr(dc, rd, cpu_dst);
4756                     break;
4757                 case 0x002: /* VIS I edge8lcc */
4758                     CHECK_FPU_FEATURE(dc, VIS1);
4759                     cpu_src1 = gen_load_gpr(dc, rs1);
4760                     cpu_src2 = gen_load_gpr(dc, rs2);
4761                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4762                     gen_store_gpr(dc, rd, cpu_dst);
4763                     break;
4764                 case 0x003: /* VIS II edge8ln */
4765                     CHECK_FPU_FEATURE(dc, VIS2);
4766                     cpu_src1 = gen_load_gpr(dc, rs1);
4767                     cpu_src2 = gen_load_gpr(dc, rs2);
4768                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4769                     gen_store_gpr(dc, rd, cpu_dst);
4770                     break;
4771                 case 0x004: /* VIS I edge16cc */
4772                     CHECK_FPU_FEATURE(dc, VIS1);
4773                     cpu_src1 = gen_load_gpr(dc, rs1);
4774                     cpu_src2 = gen_load_gpr(dc, rs2);
4775                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4776                     gen_store_gpr(dc, rd, cpu_dst);
4777                     break;
4778                 case 0x005: /* VIS II edge16n */
4779                     CHECK_FPU_FEATURE(dc, VIS2);
4780                     cpu_src1 = gen_load_gpr(dc, rs1);
4781                     cpu_src2 = gen_load_gpr(dc, rs2);
4782                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4783                     gen_store_gpr(dc, rd, cpu_dst);
4784                     break;
4785                 case 0x006: /* VIS I edge16lcc */
4786                     CHECK_FPU_FEATURE(dc, VIS1);
4787                     cpu_src1 = gen_load_gpr(dc, rs1);
4788                     cpu_src2 = gen_load_gpr(dc, rs2);
4789                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4790                     gen_store_gpr(dc, rd, cpu_dst);
4791                     break;
4792                 case 0x007: /* VIS II edge16ln */
4793                     CHECK_FPU_FEATURE(dc, VIS2);
4794                     cpu_src1 = gen_load_gpr(dc, rs1);
4795                     cpu_src2 = gen_load_gpr(dc, rs2);
4796                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4797                     gen_store_gpr(dc, rd, cpu_dst);
4798                     break;
4799                 case 0x008: /* VIS I edge32cc */
4800                     CHECK_FPU_FEATURE(dc, VIS1);
4801                     cpu_src1 = gen_load_gpr(dc, rs1);
4802                     cpu_src2 = gen_load_gpr(dc, rs2);
4803                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4804                     gen_store_gpr(dc, rd, cpu_dst);
4805                     break;
4806                 case 0x009: /* VIS II edge32n */
4807                     CHECK_FPU_FEATURE(dc, VIS2);
4808                     cpu_src1 = gen_load_gpr(dc, rs1);
4809                     cpu_src2 = gen_load_gpr(dc, rs2);
4810                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4811                     gen_store_gpr(dc, rd, cpu_dst);
4812                     break;
4813                 case 0x00a: /* VIS I edge32lcc */
4814                     CHECK_FPU_FEATURE(dc, VIS1);
4815                     cpu_src1 = gen_load_gpr(dc, rs1);
4816                     cpu_src2 = gen_load_gpr(dc, rs2);
4817                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4818                     gen_store_gpr(dc, rd, cpu_dst);
4819                     break;
4820                 case 0x00b: /* VIS II edge32ln */
4821                     CHECK_FPU_FEATURE(dc, VIS2);
4822                     cpu_src1 = gen_load_gpr(dc, rs1);
4823                     cpu_src2 = gen_load_gpr(dc, rs2);
4824                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4825                     gen_store_gpr(dc, rd, cpu_dst);
4826                     break;
4827                 case 0x010: /* VIS I array8 */
4828                     CHECK_FPU_FEATURE(dc, VIS1);
4829                     cpu_src1 = gen_load_gpr(dc, rs1);
4830                     cpu_src2 = gen_load_gpr(dc, rs2);
4831                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4832                     gen_store_gpr(dc, rd, cpu_dst);
4833                     break;
4834                 case 0x012: /* VIS I array16 */
4835                     CHECK_FPU_FEATURE(dc, VIS1);
4836                     cpu_src1 = gen_load_gpr(dc, rs1);
4837                     cpu_src2 = gen_load_gpr(dc, rs2);
4838                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4839                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4840                     gen_store_gpr(dc, rd, cpu_dst);
4841                     break;
4842                 case 0x014: /* VIS I array32 */
4843                     CHECK_FPU_FEATURE(dc, VIS1);
4844                     cpu_src1 = gen_load_gpr(dc, rs1);
4845                     cpu_src2 = gen_load_gpr(dc, rs2);
4846                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4847                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4848                     gen_store_gpr(dc, rd, cpu_dst);
4849                     break;
4850                 case 0x018: /* VIS I alignaddr */
4851                     CHECK_FPU_FEATURE(dc, VIS1);
4852                     cpu_src1 = gen_load_gpr(dc, rs1);
4853                     cpu_src2 = gen_load_gpr(dc, rs2);
4854                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4855                     gen_store_gpr(dc, rd, cpu_dst);
4856                     break;
4857                 case 0x01a: /* VIS I alignaddrl */
4858                     CHECK_FPU_FEATURE(dc, VIS1);
4859                     cpu_src1 = gen_load_gpr(dc, rs1);
4860                     cpu_src2 = gen_load_gpr(dc, rs2);
4861                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4862                     gen_store_gpr(dc, rd, cpu_dst);
4863                     break;
4864                 case 0x019: /* VIS II bmask */
4865                     CHECK_FPU_FEATURE(dc, VIS2);
4866                     cpu_src1 = gen_load_gpr(dc, rs1);
4867                     cpu_src2 = gen_load_gpr(dc, rs2);
4868                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4869                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4870                     gen_store_gpr(dc, rd, cpu_dst);
4871                     break;
4872                 case 0x020: /* VIS I fcmple16 */
4873                     CHECK_FPU_FEATURE(dc, VIS1);
4874                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4875                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4876                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4877                     gen_store_gpr(dc, rd, cpu_dst);
4878                     break;
4879                 case 0x022: /* VIS I fcmpne16 */
4880                     CHECK_FPU_FEATURE(dc, VIS1);
4881                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4882                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4883                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4884                     gen_store_gpr(dc, rd, cpu_dst);
4885                     break;
4886                 case 0x024: /* VIS I fcmple32 */
4887                     CHECK_FPU_FEATURE(dc, VIS1);
4888                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4889                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4890                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4891                     gen_store_gpr(dc, rd, cpu_dst);
4892                     break;
4893                 case 0x026: /* VIS I fcmpne32 */
4894                     CHECK_FPU_FEATURE(dc, VIS1);
4895                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4896                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4897                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4898                     gen_store_gpr(dc, rd, cpu_dst);
4899                     break;
4900                 case 0x028: /* VIS I fcmpgt16 */
4901                     CHECK_FPU_FEATURE(dc, VIS1);
4902                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4903                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4904                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4905                     gen_store_gpr(dc, rd, cpu_dst);
4906                     break;
4907                 case 0x02a: /* VIS I fcmpeq16 */
4908                     CHECK_FPU_FEATURE(dc, VIS1);
4909                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4910                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4911                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4912                     gen_store_gpr(dc, rd, cpu_dst);
4913                     break;
4914                 case 0x02c: /* VIS I fcmpgt32 */
4915                     CHECK_FPU_FEATURE(dc, VIS1);
4916                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4917                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4918                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4919                     gen_store_gpr(dc, rd, cpu_dst);
4920                     break;
4921                 case 0x02e: /* VIS I fcmpeq32 */
4922                     CHECK_FPU_FEATURE(dc, VIS1);
4923                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4924                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4925                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4926                     gen_store_gpr(dc, rd, cpu_dst);
4927                     break;
4928                 case 0x031: /* VIS I fmul8x16 */
4929                     CHECK_FPU_FEATURE(dc, VIS1);
4930                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4931                     break;
4932                 case 0x033: /* VIS I fmul8x16au */
4933                     CHECK_FPU_FEATURE(dc, VIS1);
4934                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4935                     break;
4936                 case 0x035: /* VIS I fmul8x16al */
4937                     CHECK_FPU_FEATURE(dc, VIS1);
4938                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4939                     break;
4940                 case 0x036: /* VIS I fmul8sux16 */
4941                     CHECK_FPU_FEATURE(dc, VIS1);
4942                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4943                     break;
4944                 case 0x037: /* VIS I fmul8ulx16 */
4945                     CHECK_FPU_FEATURE(dc, VIS1);
4946                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4947                     break;
4948                 case 0x038: /* VIS I fmuld8sux16 */
4949                     CHECK_FPU_FEATURE(dc, VIS1);
4950                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4951                     break;
4952                 case 0x039: /* VIS I fmuld8ulx16 */
4953                     CHECK_FPU_FEATURE(dc, VIS1);
4954                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4955                     break;
4956                 case 0x03a: /* VIS I fpack32 */
4957                     CHECK_FPU_FEATURE(dc, VIS1);
4958                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4959                     break;
4960                 case 0x03b: /* VIS I fpack16 */
4961                     CHECK_FPU_FEATURE(dc, VIS1);
4962                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4963                     cpu_dst_32 = gen_dest_fpr_F(dc);
4964                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4965                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4966                     break;
4967                 case 0x03d: /* VIS I fpackfix */
4968                     CHECK_FPU_FEATURE(dc, VIS1);
4969                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4970                     cpu_dst_32 = gen_dest_fpr_F(dc);
4971                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4972                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4973                     break;
4974                 case 0x03e: /* VIS I pdist */
4975                     CHECK_FPU_FEATURE(dc, VIS1);
4976                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4977                     break;
4978                 case 0x048: /* VIS I faligndata */
4979                     CHECK_FPU_FEATURE(dc, VIS1);
4980                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4981                     break;
4982                 case 0x04b: /* VIS I fpmerge */
4983                     CHECK_FPU_FEATURE(dc, VIS1);
4984                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4985                     break;
4986                 case 0x04c: /* VIS II bshuffle */
4987                     CHECK_FPU_FEATURE(dc, VIS2);
4988                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4989                     break;
4990                 case 0x04d: /* VIS I fexpand */
4991                     CHECK_FPU_FEATURE(dc, VIS1);
4992                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4993                     break;
4994                 case 0x050: /* VIS I fpadd16 */
4995                     CHECK_FPU_FEATURE(dc, VIS1);
4996                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4997                     break;
4998                 case 0x051: /* VIS I fpadd16s */
4999                     CHECK_FPU_FEATURE(dc, VIS1);
5000                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
5001                     break;
5002                 case 0x052: /* VIS I fpadd32 */
5003                     CHECK_FPU_FEATURE(dc, VIS1);
5004                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
5005                     break;
5006                 case 0x053: /* VIS I fpadd32s */
5007                     CHECK_FPU_FEATURE(dc, VIS1);
5008                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
5009                     break;
5010                 case 0x054: /* VIS I fpsub16 */
5011                     CHECK_FPU_FEATURE(dc, VIS1);
5012                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
5013                     break;
5014                 case 0x055: /* VIS I fpsub16s */
5015                     CHECK_FPU_FEATURE(dc, VIS1);
5016                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
5017                     break;
5018                 case 0x056: /* VIS I fpsub32 */
5019                     CHECK_FPU_FEATURE(dc, VIS1);
5020                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
5021                     break;
5022                 case 0x057: /* VIS I fpsub32s */
5023                     CHECK_FPU_FEATURE(dc, VIS1);
5024                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
5025                     break;
5026                 case 0x060: /* VIS I fzero */
5027                     CHECK_FPU_FEATURE(dc, VIS1);
5028                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5029                     tcg_gen_movi_i64(cpu_dst_64, 0);
5030                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5031                     break;
5032                 case 0x061: /* VIS I fzeros */
5033                     CHECK_FPU_FEATURE(dc, VIS1);
5034                     cpu_dst_32 = gen_dest_fpr_F(dc);
5035                     tcg_gen_movi_i32(cpu_dst_32, 0);
5036                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5037                     break;
5038                 case 0x062: /* VIS I fnor */
5039                     CHECK_FPU_FEATURE(dc, VIS1);
5040                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
5041                     break;
5042                 case 0x063: /* VIS I fnors */
5043                     CHECK_FPU_FEATURE(dc, VIS1);
5044                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
5045                     break;
5046                 case 0x064: /* VIS I fandnot2 */
5047                     CHECK_FPU_FEATURE(dc, VIS1);
5048                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
5049                     break;
5050                 case 0x065: /* VIS I fandnot2s */
5051                     CHECK_FPU_FEATURE(dc, VIS1);
5052                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
5053                     break;
5054                 case 0x066: /* VIS I fnot2 */
5055                     CHECK_FPU_FEATURE(dc, VIS1);
5056                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
5057                     break;
5058                 case 0x067: /* VIS I fnot2s */
5059                     CHECK_FPU_FEATURE(dc, VIS1);
5060                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
5061                     break;
5062                 case 0x068: /* VIS I fandnot1 */
5063                     CHECK_FPU_FEATURE(dc, VIS1);
5064                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
5065                     break;
5066                 case 0x069: /* VIS I fandnot1s */
5067                     CHECK_FPU_FEATURE(dc, VIS1);
5068                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
5069                     break;
5070                 case 0x06a: /* VIS I fnot1 */
5071                     CHECK_FPU_FEATURE(dc, VIS1);
5072                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
5073                     break;
5074                 case 0x06b: /* VIS I fnot1s */
5075                     CHECK_FPU_FEATURE(dc, VIS1);
5076                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
5077                     break;
5078                 case 0x06c: /* VIS I fxor */
5079                     CHECK_FPU_FEATURE(dc, VIS1);
5080                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
5081                     break;
5082                 case 0x06d: /* VIS I fxors */
5083                     CHECK_FPU_FEATURE(dc, VIS1);
5084                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
5085                     break;
5086                 case 0x06e: /* VIS I fnand */
5087                     CHECK_FPU_FEATURE(dc, VIS1);
5088                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
5089                     break;
5090                 case 0x06f: /* VIS I fnands */
5091                     CHECK_FPU_FEATURE(dc, VIS1);
5092                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
5093                     break;
5094                 case 0x070: /* VIS I fand */
5095                     CHECK_FPU_FEATURE(dc, VIS1);
5096                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
5097                     break;
5098                 case 0x071: /* VIS I fands */
5099                     CHECK_FPU_FEATURE(dc, VIS1);
5100                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
5101                     break;
5102                 case 0x072: /* VIS I fxnor */
5103                     CHECK_FPU_FEATURE(dc, VIS1);
5104                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5105                     break;
5106                 case 0x073: /* VIS I fxnors */
5107                     CHECK_FPU_FEATURE(dc, VIS1);
5108                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5109                     break;
5110                 case 0x074: /* VIS I fsrc1 */
5111                     CHECK_FPU_FEATURE(dc, VIS1);
5112                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5113                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5114                     break;
5115                 case 0x075: /* VIS I fsrc1s */
5116                     CHECK_FPU_FEATURE(dc, VIS1);
5117                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5118                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5119                     break;
5120                 case 0x076: /* VIS I fornot2 */
5121                     CHECK_FPU_FEATURE(dc, VIS1);
5122                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5123                     break;
5124                 case 0x077: /* VIS I fornot2s */
5125                     CHECK_FPU_FEATURE(dc, VIS1);
5126                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5127                     break;
5128                 case 0x078: /* VIS I fsrc2 */
5129                     CHECK_FPU_FEATURE(dc, VIS1);
5130                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5131                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5132                     break;
5133                 case 0x079: /* VIS I fsrc2s */
5134                     CHECK_FPU_FEATURE(dc, VIS1);
5135                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5136                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5137                     break;
5138                 case 0x07a: /* VIS I fornot1 */
5139                     CHECK_FPU_FEATURE(dc, VIS1);
5140                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5141                     break;
5142                 case 0x07b: /* VIS I fornot1s */
5143                     CHECK_FPU_FEATURE(dc, VIS1);
5144                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5145                     break;
5146                 case 0x07c: /* VIS I for */
5147                     CHECK_FPU_FEATURE(dc, VIS1);
5148                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5149                     break;
5150                 case 0x07d: /* VIS I fors */
5151                     CHECK_FPU_FEATURE(dc, VIS1);
5152                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5153                     break;
5154                 case 0x07e: /* VIS I fone */
5155                     CHECK_FPU_FEATURE(dc, VIS1);
5156                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5157                     tcg_gen_movi_i64(cpu_dst_64, -1);
5158                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5159                     break;
5160                 case 0x07f: /* VIS I fones */
5161                     CHECK_FPU_FEATURE(dc, VIS1);
5162                     cpu_dst_32 = gen_dest_fpr_F(dc);
5163                     tcg_gen_movi_i32(cpu_dst_32, -1);
5164                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5165                     break;
5166                 case 0x080: /* VIS I shutdown */
5167                 case 0x081: /* VIS II siam */
5168                     // XXX
5169                     goto illegal_insn;
5170                 default:
5171                     goto illegal_insn;
5172                 }
5173 #else
5174                 goto ncp_insn;
5175 #endif
5176             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5177 #ifdef TARGET_SPARC64
5178                 goto illegal_insn;
5179 #else
5180                 goto ncp_insn;
5181 #endif
5182 #ifdef TARGET_SPARC64
5183             } else if (xop == 0x39) { /* V9 return */
5184                 save_state(dc);
5185                 cpu_src1 = get_src1(dc, insn);
5186                 cpu_tmp0 = get_temp_tl(dc);
5187                 if (IS_IMM) {   /* immediate */
5188                     simm = GET_FIELDs(insn, 19, 31);
5189                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5190                 } else {                /* register */
5191                     rs2 = GET_FIELD(insn, 27, 31);
5192                     if (rs2) {
5193                         cpu_src2 = gen_load_gpr(dc, rs2);
5194                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5195                     } else {
5196                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5197                     }
5198                 }
5199                 gen_helper_restore(cpu_env);
5200                 gen_mov_pc_npc(dc);
5201                 gen_check_align(cpu_tmp0, 3);
5202                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5203                 dc->npc = DYNAMIC_PC;
5204                 goto jmp_insn;
5205 #endif
5206             } else {
5207                 cpu_src1 = get_src1(dc, insn);
5208                 cpu_tmp0 = get_temp_tl(dc);
5209                 if (IS_IMM) {   /* immediate */
5210                     simm = GET_FIELDs(insn, 19, 31);
5211                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5212                 } else {                /* register */
5213                     rs2 = GET_FIELD(insn, 27, 31);
5214                     if (rs2) {
5215                         cpu_src2 = gen_load_gpr(dc, rs2);
5216                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5217                     } else {
5218                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5219                     }
5220                 }
5221                 switch (xop) {
5222                 case 0x38:      /* jmpl */
5223                     {
5224                         TCGv t = gen_dest_gpr(dc, rd);
5225                         tcg_gen_movi_tl(t, dc->pc);
5226                         gen_store_gpr(dc, rd, t);
5227 
5228                         gen_mov_pc_npc(dc);
5229                         gen_check_align(cpu_tmp0, 3);
5230                         gen_address_mask(dc, cpu_tmp0);
5231                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5232                         dc->npc = DYNAMIC_PC;
5233                     }
5234                     goto jmp_insn;
5235 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5236                 case 0x39:      /* rett, V9 return */
5237                     {
5238                         if (!supervisor(dc))
5239                             goto priv_insn;
5240                         gen_mov_pc_npc(dc);
5241                         gen_check_align(cpu_tmp0, 3);
5242                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5243                         dc->npc = DYNAMIC_PC;
5244                         gen_helper_rett(cpu_env);
5245                     }
5246                     goto jmp_insn;
5247 #endif
5248                 case 0x3b: /* flush */
5249                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5250                         goto unimp_flush;
5251                     /* nop */
5252                     break;
5253                 case 0x3c:      /* save */
5254                     gen_helper_save(cpu_env);
5255                     gen_store_gpr(dc, rd, cpu_tmp0);
5256                     break;
5257                 case 0x3d:      /* restore */
5258                     gen_helper_restore(cpu_env);
5259                     gen_store_gpr(dc, rd, cpu_tmp0);
5260                     break;
5261 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5262                 case 0x3e:      /* V9 done/retry */
5263                     {
5264                         switch (rd) {
5265                         case 0:
5266                             if (!supervisor(dc))
5267                                 goto priv_insn;
5268                             dc->npc = DYNAMIC_PC;
5269                             dc->pc = DYNAMIC_PC;
5270                             gen_helper_done(cpu_env);
5271                             goto jmp_insn;
5272                         case 1:
5273                             if (!supervisor(dc))
5274                                 goto priv_insn;
5275                             dc->npc = DYNAMIC_PC;
5276                             dc->pc = DYNAMIC_PC;
5277                             gen_helper_retry(cpu_env);
5278                             goto jmp_insn;
5279                         default:
5280                             goto illegal_insn;
5281                         }
5282                     }
5283                     break;
5284 #endif
5285                 default:
5286                     goto illegal_insn;
5287                 }
5288             }
5289             break;
5290         }
5291         break;
5292     case 3:                     /* load/store instructions */
5293         {
5294             unsigned int xop = GET_FIELD(insn, 7, 12);
5295             /* ??? gen_address_mask prevents us from using a source
5296                register directly.  Always generate a temporary.  */
5297             TCGv cpu_addr = get_temp_tl(dc);
5298 
5299             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5300             if (xop == 0x3c || xop == 0x3e) {
5301                 /* V9 casa/casxa : no offset */
5302             } else if (IS_IMM) {     /* immediate */
5303                 simm = GET_FIELDs(insn, 19, 31);
5304                 if (simm != 0) {
5305                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5306                 }
5307             } else {            /* register */
5308                 rs2 = GET_FIELD(insn, 27, 31);
5309                 if (rs2 != 0) {
5310                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5311                 }
5312             }
5313             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5314                 (xop > 0x17 && xop <= 0x1d ) ||
5315                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5316                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5317 
5318                 switch (xop) {
5319                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5320                     gen_address_mask(dc, cpu_addr);
5321                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5322                     break;
5323                 case 0x1:       /* ldub, load unsigned byte */
5324                     gen_address_mask(dc, cpu_addr);
5325                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5326                     break;
5327                 case 0x2:       /* lduh, load unsigned halfword */
5328                     gen_address_mask(dc, cpu_addr);
5329                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5330                     break;
5331                 case 0x3:       /* ldd, load double word */
5332                     if (rd & 1)
5333                         goto illegal_insn;
5334                     else {
5335                         TCGv_i64 t64;
5336 
5337                         gen_address_mask(dc, cpu_addr);
5338                         t64 = tcg_temp_new_i64();
5339                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5340                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5341                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5342                         gen_store_gpr(dc, rd + 1, cpu_val);
5343                         tcg_gen_shri_i64(t64, t64, 32);
5344                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5345                         tcg_temp_free_i64(t64);
5346                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5347                     }
5348                     break;
5349                 case 0x9:       /* ldsb, load signed byte */
5350                     gen_address_mask(dc, cpu_addr);
5351                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5352                     break;
5353                 case 0xa:       /* ldsh, load signed halfword */
5354                     gen_address_mask(dc, cpu_addr);
5355                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5356                     break;
5357                 case 0xd:       /* ldstub */
5358                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5359                     break;
5360                 case 0x0f:
5361                     /* swap, swap register with memory. Also atomically */
5362                     CHECK_IU_FEATURE(dc, SWAP);
5363                     cpu_src1 = gen_load_gpr(dc, rd);
5364                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5365                              dc->mem_idx, MO_TEUL);
5366                     break;
5367 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5368                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5369                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5370                     break;
5371                 case 0x11:      /* lduba, load unsigned byte alternate */
5372                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5373                     break;
5374                 case 0x12:      /* lduha, load unsigned halfword alternate */
5375                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5376                     break;
5377                 case 0x13:      /* ldda, load double word alternate */
5378                     if (rd & 1) {
5379                         goto illegal_insn;
5380                     }
5381                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5382                     goto skip_move;
5383                 case 0x19:      /* ldsba, load signed byte alternate */
5384                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5385                     break;
5386                 case 0x1a:      /* ldsha, load signed halfword alternate */
5387                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5388                     break;
5389                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5390                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5391                     break;
5392                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5393                                    atomically */
5394                     CHECK_IU_FEATURE(dc, SWAP);
5395                     cpu_src1 = gen_load_gpr(dc, rd);
5396                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5397                     break;
5398 
5399 #ifndef TARGET_SPARC64
5400                 case 0x30: /* ldc */
5401                 case 0x31: /* ldcsr */
5402                 case 0x33: /* lddc */
5403                     goto ncp_insn;
5404 #endif
5405 #endif
5406 #ifdef TARGET_SPARC64
5407                 case 0x08: /* V9 ldsw */
5408                     gen_address_mask(dc, cpu_addr);
5409                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5410                     break;
5411                 case 0x0b: /* V9 ldx */
5412                     gen_address_mask(dc, cpu_addr);
5413                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5414                     break;
5415                 case 0x18: /* V9 ldswa */
5416                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5417                     break;
5418                 case 0x1b: /* V9 ldxa */
5419                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5420                     break;
5421                 case 0x2d: /* V9 prefetch, no effect */
5422                     goto skip_move;
5423                 case 0x30: /* V9 ldfa */
5424                     if (gen_trap_ifnofpu(dc)) {
5425                         goto jmp_insn;
5426                     }
5427                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5428                     gen_update_fprs_dirty(dc, rd);
5429                     goto skip_move;
5430                 case 0x33: /* V9 lddfa */
5431                     if (gen_trap_ifnofpu(dc)) {
5432                         goto jmp_insn;
5433                     }
5434                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5435                     gen_update_fprs_dirty(dc, DFPREG(rd));
5436                     goto skip_move;
5437                 case 0x3d: /* V9 prefetcha, no effect */
5438                     goto skip_move;
5439                 case 0x32: /* V9 ldqfa */
5440                     CHECK_FPU_FEATURE(dc, FLOAT128);
5441                     if (gen_trap_ifnofpu(dc)) {
5442                         goto jmp_insn;
5443                     }
5444                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5445                     gen_update_fprs_dirty(dc, QFPREG(rd));
5446                     goto skip_move;
5447 #endif
5448                 default:
5449                     goto illegal_insn;
5450                 }
5451                 gen_store_gpr(dc, rd, cpu_val);
5452 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5453             skip_move: ;
5454 #endif
5455             } else if (xop >= 0x20 && xop < 0x24) {
5456                 if (gen_trap_ifnofpu(dc)) {
5457                     goto jmp_insn;
5458                 }
5459                 switch (xop) {
5460                 case 0x20:      /* ldf, load fpreg */
5461                     gen_address_mask(dc, cpu_addr);
5462                     cpu_dst_32 = gen_dest_fpr_F(dc);
5463                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5464                                         dc->mem_idx, MO_TEUL);
5465                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5466                     break;
5467                 case 0x21:      /* ldfsr, V9 ldxfsr */
5468 #ifdef TARGET_SPARC64
5469                     gen_address_mask(dc, cpu_addr);
5470                     if (rd == 1) {
5471                         TCGv_i64 t64 = tcg_temp_new_i64();
5472                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5473                                             dc->mem_idx, MO_TEQ);
5474                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5475                         tcg_temp_free_i64(t64);
5476                         break;
5477                     }
5478 #endif
5479                     cpu_dst_32 = get_temp_i32(dc);
5480                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5481                                         dc->mem_idx, MO_TEUL);
5482                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5483                     break;
5484                 case 0x22:      /* ldqf, load quad fpreg */
5485                     CHECK_FPU_FEATURE(dc, FLOAT128);
5486                     gen_address_mask(dc, cpu_addr);
5487                     cpu_src1_64 = tcg_temp_new_i64();
5488                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5489                                         MO_TEQ | MO_ALIGN_4);
5490                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5491                     cpu_src2_64 = tcg_temp_new_i64();
5492                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5493                                         MO_TEQ | MO_ALIGN_4);
5494                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5495                     tcg_temp_free_i64(cpu_src1_64);
5496                     tcg_temp_free_i64(cpu_src2_64);
5497                     break;
5498                 case 0x23:      /* lddf, load double fpreg */
5499                     gen_address_mask(dc, cpu_addr);
5500                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5501                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5502                                         MO_TEQ | MO_ALIGN_4);
5503                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5504                     break;
5505                 default:
5506                     goto illegal_insn;
5507                 }
5508             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5509                        xop == 0xe || xop == 0x1e) {
5510                 TCGv cpu_val = gen_load_gpr(dc, rd);
5511 
5512                 switch (xop) {
5513                 case 0x4: /* st, store word */
5514                     gen_address_mask(dc, cpu_addr);
5515                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5516                     break;
5517                 case 0x5: /* stb, store byte */
5518                     gen_address_mask(dc, cpu_addr);
5519                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5520                     break;
5521                 case 0x6: /* sth, store halfword */
5522                     gen_address_mask(dc, cpu_addr);
5523                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5524                     break;
5525                 case 0x7: /* std, store double word */
5526                     if (rd & 1)
5527                         goto illegal_insn;
5528                     else {
5529                         TCGv_i64 t64;
5530                         TCGv lo;
5531 
5532                         gen_address_mask(dc, cpu_addr);
5533                         lo = gen_load_gpr(dc, rd + 1);
5534                         t64 = tcg_temp_new_i64();
5535                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5536                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5537                         tcg_temp_free_i64(t64);
5538                     }
5539                     break;
5540 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5541                 case 0x14: /* sta, V9 stwa, store word alternate */
5542                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5543                     break;
5544                 case 0x15: /* stba, store byte alternate */
5545                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5546                     break;
5547                 case 0x16: /* stha, store halfword alternate */
5548                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5549                     break;
5550                 case 0x17: /* stda, store double word alternate */
5551                     if (rd & 1) {
5552                         goto illegal_insn;
5553                     }
5554                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5555                     break;
5556 #endif
5557 #ifdef TARGET_SPARC64
5558                 case 0x0e: /* V9 stx */
5559                     gen_address_mask(dc, cpu_addr);
5560                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5561                     break;
5562                 case 0x1e: /* V9 stxa */
5563                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5564                     break;
5565 #endif
5566                 default:
5567                     goto illegal_insn;
5568                 }
5569             } else if (xop > 0x23 && xop < 0x28) {
5570                 if (gen_trap_ifnofpu(dc)) {
5571                     goto jmp_insn;
5572                 }
5573                 switch (xop) {
5574                 case 0x24: /* stf, store fpreg */
5575                     gen_address_mask(dc, cpu_addr);
5576                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5577                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5578                                         dc->mem_idx, MO_TEUL);
5579                     break;
5580                 case 0x25: /* stfsr, V9 stxfsr */
5581                     {
5582 #ifdef TARGET_SPARC64
5583                         gen_address_mask(dc, cpu_addr);
5584                         if (rd == 1) {
5585                             tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5586                             break;
5587                         }
5588 #endif
5589                         tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5590                     }
5591                     break;
5592                 case 0x26:
5593 #ifdef TARGET_SPARC64
5594                     /* V9 stqf, store quad fpreg */
5595                     CHECK_FPU_FEATURE(dc, FLOAT128);
5596                     gen_address_mask(dc, cpu_addr);
5597                     /* ??? While stqf only requires 4-byte alignment, it is
5598                        legal for the cpu to signal the unaligned exception.
5599                        The OS trap handler is then required to fix it up.
5600                        For qemu, this avoids having to probe the second page
5601                        before performing the first write.  */
5602                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5603                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5604                                         dc->mem_idx, MO_TEQ | MO_ALIGN_16);
5605                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5606                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5607                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5608                                         dc->mem_idx, MO_TEQ);
5609                     break;
5610 #else /* !TARGET_SPARC64 */
5611                     /* stdfq, store floating point queue */
5612 #if defined(CONFIG_USER_ONLY)
5613                     goto illegal_insn;
5614 #else
5615                     if (!supervisor(dc))
5616                         goto priv_insn;
5617                     if (gen_trap_ifnofpu(dc)) {
5618                         goto jmp_insn;
5619                     }
5620                     goto nfq_insn;
5621 #endif
5622 #endif
5623                 case 0x27: /* stdf, store double fpreg */
5624                     gen_address_mask(dc, cpu_addr);
5625                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5626                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5627                                         MO_TEQ | MO_ALIGN_4);
5628                     break;
5629                 default:
5630                     goto illegal_insn;
5631                 }
5632             } else if (xop > 0x33 && xop < 0x3f) {
5633                 switch (xop) {
5634 #ifdef TARGET_SPARC64
5635                 case 0x34: /* V9 stfa */
5636                     if (gen_trap_ifnofpu(dc)) {
5637                         goto jmp_insn;
5638                     }
5639                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5640                     break;
5641                 case 0x36: /* V9 stqfa */
5642                     {
5643                         CHECK_FPU_FEATURE(dc, FLOAT128);
5644                         if (gen_trap_ifnofpu(dc)) {
5645                             goto jmp_insn;
5646                         }
5647                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5648                     }
5649                     break;
5650                 case 0x37: /* V9 stdfa */
5651                     if (gen_trap_ifnofpu(dc)) {
5652                         goto jmp_insn;
5653                     }
5654                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5655                     break;
5656                 case 0x3e: /* V9 casxa */
5657                     rs2 = GET_FIELD(insn, 27, 31);
5658                     cpu_src2 = gen_load_gpr(dc, rs2);
5659                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5660                     break;
5661 #else
5662                 case 0x34: /* stc */
5663                 case 0x35: /* stcsr */
5664                 case 0x36: /* stdcq */
5665                 case 0x37: /* stdc */
5666                     goto ncp_insn;
5667 #endif
5668 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5669                 case 0x3c: /* V9 or LEON3 casa */
5670 #ifndef TARGET_SPARC64
5671                     CHECK_IU_FEATURE(dc, CASA);
5672 #endif
5673                     rs2 = GET_FIELD(insn, 27, 31);
5674                     cpu_src2 = gen_load_gpr(dc, rs2);
5675                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5676                     break;
5677 #endif
5678                 default:
5679                     goto illegal_insn;
5680                 }
5681             } else {
5682                 goto illegal_insn;
5683             }
5684         }
5685         break;
5686     }
5687     /* default case for non jump instructions */
5688     if (dc->npc == DYNAMIC_PC) {
5689         dc->pc = DYNAMIC_PC;
5690         gen_op_next_insn();
5691     } else if (dc->npc == JUMP_PC) {
5692         /* we can do a static jump */
5693         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5694         dc->is_br = 1;
5695     } else {
5696         dc->pc = dc->npc;
5697         dc->npc = dc->npc + 4;
5698     }
5699  jmp_insn:
5700     goto egress;
5701  illegal_insn:
5702     gen_exception(dc, TT_ILL_INSN);
5703     goto egress;
5704  unimp_flush:
5705     gen_exception(dc, TT_UNIMP_FLUSH);
5706     goto egress;
5707 #if !defined(CONFIG_USER_ONLY)
5708  priv_insn:
5709     gen_exception(dc, TT_PRIV_INSN);
5710     goto egress;
5711 #endif
5712  nfpu_insn:
5713     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5714     goto egress;
5715 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5716  nfq_insn:
5717     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5718     goto egress;
5719 #endif
5720 #ifndef TARGET_SPARC64
5721  ncp_insn:
5722     gen_exception(dc, TT_NCP_INSN);
5723     goto egress;
5724 #endif
5725  egress:
5726     if (dc->n_t32 != 0) {
5727         int i;
5728         for (i = dc->n_t32 - 1; i >= 0; --i) {
5729             tcg_temp_free_i32(dc->t32[i]);
5730         }
5731         dc->n_t32 = 0;
5732     }
5733     if (dc->n_ttl != 0) {
5734         int i;
5735         for (i = dc->n_ttl - 1; i >= 0; --i) {
5736             tcg_temp_free(dc->ttl[i]);
5737         }
5738         dc->n_ttl = 0;
5739     }
5740 }
5741 
5742 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5743 {
5744     SPARCCPU *cpu = sparc_env_get_cpu(env);
5745     CPUState *cs = CPU(cpu);
5746     target_ulong pc_start, last_pc;
5747     DisasContext dc1, *dc = &dc1;
5748     int num_insns;
5749     int max_insns;
5750     unsigned int insn;
5751 
5752     memset(dc, 0, sizeof(DisasContext));
5753     dc->tb = tb;
5754     pc_start = tb->pc;
5755     dc->pc = pc_start;
5756     last_pc = dc->pc;
5757     dc->npc = (target_ulong) tb->cs_base;
5758     dc->cc_op = CC_OP_DYNAMIC;
5759     dc->mem_idx = tb->flags & TB_FLAG_MMU_MASK;
5760     dc->def = env->def;
5761     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5762     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5763     dc->singlestep = (cs->singlestep_enabled || singlestep);
5764 #ifndef CONFIG_USER_ONLY
5765     dc->supervisor = (tb->flags & TB_FLAG_SUPER) != 0;
5766 #endif
5767 #ifdef TARGET_SPARC64
5768     dc->fprs_dirty = 0;
5769     dc->asi = (tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5770 #ifndef CONFIG_USER_ONLY
5771     dc->hypervisor = (tb->flags & TB_FLAG_HYPER) != 0;
5772 #endif
5773 #endif
5774 
5775     num_insns = 0;
5776     max_insns = tb->cflags & CF_COUNT_MASK;
5777     if (max_insns == 0) {
5778         max_insns = CF_COUNT_MASK;
5779     }
5780     if (max_insns > TCG_MAX_INSNS) {
5781         max_insns = TCG_MAX_INSNS;
5782     }
5783 
5784     gen_tb_start(tb);
5785     do {
5786         if (dc->npc & JUMP_PC) {
5787             assert(dc->jump_pc[1] == dc->pc + 4);
5788             tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5789         } else {
5790             tcg_gen_insn_start(dc->pc, dc->npc);
5791         }
5792         num_insns++;
5793         last_pc = dc->pc;
5794 
5795         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5796             if (dc->pc != pc_start) {
5797                 save_state(dc);
5798             }
5799             gen_helper_debug(cpu_env);
5800             tcg_gen_exit_tb(0);
5801             dc->is_br = 1;
5802             goto exit_gen_loop;
5803         }
5804 
5805         if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5806             gen_io_start();
5807         }
5808 
5809         insn = cpu_ldl_code(env, dc->pc);
5810 
5811         disas_sparc_insn(dc, insn);
5812 
5813         if (dc->is_br)
5814             break;
5815         /* if the next PC is different, we abort now */
5816         if (dc->pc != (last_pc + 4))
5817             break;
5818         /* if we reach a page boundary, we stop generation so that the
5819            PC of a TT_TFAULT exception is always in the right page */
5820         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5821             break;
5822         /* if single step mode, we generate only one instruction and
5823            generate an exception */
5824         if (dc->singlestep) {
5825             break;
5826         }
5827     } while (!tcg_op_buf_full() &&
5828              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5829              num_insns < max_insns);
5830 
5831  exit_gen_loop:
5832     if (tb->cflags & CF_LAST_IO) {
5833         gen_io_end();
5834     }
5835     if (!dc->is_br) {
5836         if (dc->pc != DYNAMIC_PC &&
5837             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5838             /* static PC and NPC: we can use direct chaining */
5839             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5840         } else {
5841             if (dc->pc != DYNAMIC_PC) {
5842                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5843             }
5844             save_npc(dc);
5845             tcg_gen_exit_tb(0);
5846         }
5847     }
5848     gen_tb_end(tb, num_insns);
5849 
5850     tb->size = last_pc + 4 - pc_start;
5851     tb->icount = num_insns;
5852 
5853 #ifdef DEBUG_DISAS
5854     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5855         && qemu_log_in_addr_range(pc_start)) {
5856         qemu_log_lock();
5857         qemu_log("--------------\n");
5858         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5859         log_target_disas(cs, pc_start, last_pc + 4 - pc_start, 0);
5860         qemu_log("\n");
5861         qemu_log_unlock();
5862     }
5863 #endif
5864 }
5865 
5866 void gen_intermediate_code_init(CPUSPARCState *env)
5867 {
5868     static int inited;
5869     static const char gregnames[32][4] = {
5870         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5871         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5872         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5873         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5874     };
5875     static const char fregnames[32][4] = {
5876         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5877         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5878         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5879         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5880     };
5881 
5882     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5883 #ifdef TARGET_SPARC64
5884         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5885         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5886 #else
5887         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5888 #endif
5889         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5890         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5891     };
5892 
5893     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5894 #ifdef TARGET_SPARC64
5895         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5896         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5897         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5898         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5899           "hstick_cmpr" },
5900         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5901         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5902         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5903         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5904         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5905 #endif
5906         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5907         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5908         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5909         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5910         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5911         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5912         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5913         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5914 #ifndef CONFIG_USER_ONLY
5915         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5916 #endif
5917     };
5918 
5919     unsigned int i;
5920 
5921     /* init various static tables */
5922     if (inited) {
5923         return;
5924     }
5925     inited = 1;
5926 
5927     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5928     tcg_ctx.tcg_env = cpu_env;
5929 
5930     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5931                                          offsetof(CPUSPARCState, regwptr),
5932                                          "regwptr");
5933 
5934     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5935         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5936     }
5937 
5938     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5939         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5940     }
5941 
5942     TCGV_UNUSED(cpu_regs[0]);
5943     for (i = 1; i < 8; ++i) {
5944         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5945                                          offsetof(CPUSPARCState, gregs[i]),
5946                                          gregnames[i]);
5947     }
5948 
5949     for (i = 8; i < 32; ++i) {
5950         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5951                                          (i - 8) * sizeof(target_ulong),
5952                                          gregnames[i]);
5953     }
5954 
5955     for (i = 0; i < TARGET_DPREGS; i++) {
5956         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5957                                             offsetof(CPUSPARCState, fpr[i]),
5958                                             fregnames[i]);
5959     }
5960 }
5961 
5962 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5963                           target_ulong *data)
5964 {
5965     target_ulong pc = data[0];
5966     target_ulong npc = data[1];
5967 
5968     env->pc = pc;
5969     if (npc == DYNAMIC_PC) {
5970         /* dynamic NPC: already stored */
5971     } else if (npc & JUMP_PC) {
5972         /* jump PC: use 'cond' and the jump targets of the translation */
5973         if (env->cond) {
5974             env->npc = npc & ~3;
5975         } else {
5976             env->npc = pc + 4;
5977         }
5978     } else {
5979         env->npc = npc;
5980     }
5981 }
5982