xref: /openbmc/qemu/target/sparc/translate.c (revision 0b1183e3)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 
30 #include "exec/helper-gen.h"
31 
32 #include "trace-tcg.h"
33 #include "exec/log.h"
34 #include "asi.h"
35 
36 
37 #define DEBUG_DISAS
38 
39 #define DYNAMIC_PC  1 /* dynamic pc value */
40 #define JUMP_PC     2 /* dynamic pc value which takes only two values
41                          according to jump_pc[T2] */
42 
43 /* global register indexes */
44 static TCGv_env cpu_env;
45 static TCGv_ptr cpu_regwptr;
46 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
47 static TCGv_i32 cpu_cc_op;
48 static TCGv_i32 cpu_psr;
49 static TCGv cpu_fsr, cpu_pc, cpu_npc;
50 static TCGv cpu_regs[32];
51 static TCGv cpu_y;
52 #ifndef CONFIG_USER_ONLY
53 static TCGv cpu_tbr;
54 #endif
55 static TCGv cpu_cond;
56 #ifdef TARGET_SPARC64
57 static TCGv_i32 cpu_xcc, cpu_fprs;
58 static TCGv cpu_gsr;
59 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
60 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
61 #else
62 static TCGv cpu_wim;
63 #endif
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
66 
67 #include "exec/gen-icount.h"
68 
69 typedef struct DisasContext {
70     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
71     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
72     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
73     int is_br;
74     int mem_idx;
75     bool fpu_enabled;
76     bool address_mask_32bit;
77     bool singlestep;
78 #ifndef CONFIG_USER_ONLY
79     bool supervisor;
80 #ifdef TARGET_SPARC64
81     bool hypervisor;
82 #endif
83 #endif
84 
85     uint32_t cc_op;  /* current CC operation */
86     struct TranslationBlock *tb;
87     sparc_def_t *def;
88     TCGv_i32 t32[3];
89     TCGv ttl[5];
90     int n_t32;
91     int n_ttl;
92 #ifdef TARGET_SPARC64
93     int fprs_dirty;
94     int asi;
95 #endif
96 } DisasContext;
97 
98 typedef struct {
99     TCGCond cond;
100     bool is_bool;
101     bool g1, g2;
102     TCGv c1, c2;
103 } DisasCompare;
104 
105 // This function uses non-native bit order
106 #define GET_FIELD(X, FROM, TO)                                  \
107     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
108 
109 // This function uses the order in the manuals, i.e. bit 0 is 2^0
110 #define GET_FIELD_SP(X, FROM, TO)               \
111     GET_FIELD(X, 31 - (TO), 31 - (FROM))
112 
113 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
114 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
115 
116 #ifdef TARGET_SPARC64
117 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
118 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
119 #else
120 #define DFPREG(r) (r & 0x1e)
121 #define QFPREG(r) (r & 0x1c)
122 #endif
123 
124 #define UA2005_HTRAP_MASK 0xff
125 #define V8_TRAP_MASK 0x7f
126 
127 static int sign_extend(int x, int len)
128 {
129     len = 32 - len;
130     return (x << len) >> len;
131 }
132 
133 #define IS_IMM (insn & (1<<13))
134 
135 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
136 {
137     TCGv_i32 t;
138     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
139     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
140     return t;
141 }
142 
143 static inline TCGv get_temp_tl(DisasContext *dc)
144 {
145     TCGv t;
146     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
147     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
148     return t;
149 }
150 
151 static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
152 {
153 #if defined(TARGET_SPARC64)
154     int bit = (rd < 32) ? 1 : 2;
155     /* If we know we've already set this bit within the TB,
156        we can avoid setting it again.  */
157     if (!(dc->fprs_dirty & bit)) {
158         dc->fprs_dirty |= bit;
159         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
160     }
161 #endif
162 }
163 
164 /* floating point registers moves */
165 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
166 {
167 #if TCG_TARGET_REG_BITS == 32
168     if (src & 1) {
169         return TCGV_LOW(cpu_fpr[src / 2]);
170     } else {
171         return TCGV_HIGH(cpu_fpr[src / 2]);
172     }
173 #else
174     if (src & 1) {
175         return MAKE_TCGV_I32(GET_TCGV_I64(cpu_fpr[src / 2]));
176     } else {
177         TCGv_i32 ret = get_temp_i32(dc);
178         TCGv_i64 t = tcg_temp_new_i64();
179 
180         tcg_gen_shri_i64(t, cpu_fpr[src / 2], 32);
181         tcg_gen_extrl_i64_i32(ret, t);
182         tcg_temp_free_i64(t);
183 
184         return ret;
185     }
186 #endif
187 }
188 
189 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
190 {
191 #if TCG_TARGET_REG_BITS == 32
192     if (dst & 1) {
193         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
194     } else {
195         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
196     }
197 #else
198     TCGv_i64 t = MAKE_TCGV_I64(GET_TCGV_I32(v));
199     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
200                         (dst & 1 ? 0 : 32), 32);
201 #endif
202     gen_update_fprs_dirty(dc, dst);
203 }
204 
205 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
206 {
207     return get_temp_i32(dc);
208 }
209 
210 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
211 {
212     src = DFPREG(src);
213     return cpu_fpr[src / 2];
214 }
215 
216 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
217 {
218     dst = DFPREG(dst);
219     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
220     gen_update_fprs_dirty(dc, dst);
221 }
222 
223 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
224 {
225     return cpu_fpr[DFPREG(dst) / 2];
226 }
227 
228 static void gen_op_load_fpr_QT0(unsigned int src)
229 {
230     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
231                    offsetof(CPU_QuadU, ll.upper));
232     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
233                    offsetof(CPU_QuadU, ll.lower));
234 }
235 
236 static void gen_op_load_fpr_QT1(unsigned int src)
237 {
238     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
239                    offsetof(CPU_QuadU, ll.upper));
240     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
241                    offsetof(CPU_QuadU, ll.lower));
242 }
243 
244 static void gen_op_store_QT0_fpr(unsigned int dst)
245 {
246     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
247                    offsetof(CPU_QuadU, ll.upper));
248     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
249                    offsetof(CPU_QuadU, ll.lower));
250 }
251 
252 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
253                             TCGv_i64 v1, TCGv_i64 v2)
254 {
255     dst = QFPREG(dst);
256 
257     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
258     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
259     gen_update_fprs_dirty(dc, dst);
260 }
261 
262 #ifdef TARGET_SPARC64
263 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
264 {
265     src = QFPREG(src);
266     return cpu_fpr[src / 2];
267 }
268 
269 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
270 {
271     src = QFPREG(src);
272     return cpu_fpr[src / 2 + 1];
273 }
274 
275 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
276 {
277     rd = QFPREG(rd);
278     rs = QFPREG(rs);
279 
280     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
281     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
282     gen_update_fprs_dirty(dc, rd);
283 }
284 #endif
285 
286 /* moves */
287 #ifdef CONFIG_USER_ONLY
288 #define supervisor(dc) 0
289 #ifdef TARGET_SPARC64
290 #define hypervisor(dc) 0
291 #endif
292 #else
293 #ifdef TARGET_SPARC64
294 #define hypervisor(dc) (dc->hypervisor)
295 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
296 #else
297 #define supervisor(dc) (dc->supervisor)
298 #endif
299 #endif
300 
301 #ifdef TARGET_SPARC64
302 #ifndef TARGET_ABI32
303 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
304 #else
305 #define AM_CHECK(dc) (1)
306 #endif
307 #endif
308 
309 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
310 {
311 #ifdef TARGET_SPARC64
312     if (AM_CHECK(dc))
313         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
314 #endif
315 }
316 
317 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
318 {
319     if (reg > 0) {
320         assert(reg < 32);
321         return cpu_regs[reg];
322     } else {
323         TCGv t = get_temp_tl(dc);
324         tcg_gen_movi_tl(t, 0);
325         return t;
326     }
327 }
328 
329 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
330 {
331     if (reg > 0) {
332         assert(reg < 32);
333         tcg_gen_mov_tl(cpu_regs[reg], v);
334     }
335 }
336 
337 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
338 {
339     if (reg > 0) {
340         assert(reg < 32);
341         return cpu_regs[reg];
342     } else {
343         return get_temp_tl(dc);
344     }
345 }
346 
347 static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
348                                target_ulong npc)
349 {
350     if (unlikely(s->singlestep)) {
351         return false;
352     }
353 
354 #ifndef CONFIG_USER_ONLY
355     return (pc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK) &&
356            (npc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK);
357 #else
358     return true;
359 #endif
360 }
361 
362 static inline void gen_goto_tb(DisasContext *s, int tb_num,
363                                target_ulong pc, target_ulong npc)
364 {
365     if (use_goto_tb(s, pc, npc))  {
366         /* jump to same page: we can use a direct jump */
367         tcg_gen_goto_tb(tb_num);
368         tcg_gen_movi_tl(cpu_pc, pc);
369         tcg_gen_movi_tl(cpu_npc, npc);
370         tcg_gen_exit_tb((uintptr_t)s->tb + tb_num);
371     } else {
372         /* jump to another page: currently not optimized */
373         tcg_gen_movi_tl(cpu_pc, pc);
374         tcg_gen_movi_tl(cpu_npc, npc);
375         tcg_gen_exit_tb(0);
376     }
377 }
378 
379 // XXX suboptimal
380 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
381 {
382     tcg_gen_extu_i32_tl(reg, src);
383     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
384 }
385 
386 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
387 {
388     tcg_gen_extu_i32_tl(reg, src);
389     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
390 }
391 
392 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
393 {
394     tcg_gen_extu_i32_tl(reg, src);
395     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
396 }
397 
398 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
399 {
400     tcg_gen_extu_i32_tl(reg, src);
401     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
402 }
403 
404 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
405 {
406     tcg_gen_mov_tl(cpu_cc_src, src1);
407     tcg_gen_mov_tl(cpu_cc_src2, src2);
408     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
409     tcg_gen_mov_tl(dst, cpu_cc_dst);
410 }
411 
412 static TCGv_i32 gen_add32_carry32(void)
413 {
414     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
415 
416     /* Carry is computed from a previous add: (dst < src)  */
417 #if TARGET_LONG_BITS == 64
418     cc_src1_32 = tcg_temp_new_i32();
419     cc_src2_32 = tcg_temp_new_i32();
420     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
421     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
422 #else
423     cc_src1_32 = cpu_cc_dst;
424     cc_src2_32 = cpu_cc_src;
425 #endif
426 
427     carry_32 = tcg_temp_new_i32();
428     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
429 
430 #if TARGET_LONG_BITS == 64
431     tcg_temp_free_i32(cc_src1_32);
432     tcg_temp_free_i32(cc_src2_32);
433 #endif
434 
435     return carry_32;
436 }
437 
438 static TCGv_i32 gen_sub32_carry32(void)
439 {
440     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
441 
442     /* Carry is computed from a previous borrow: (src1 < src2)  */
443 #if TARGET_LONG_BITS == 64
444     cc_src1_32 = tcg_temp_new_i32();
445     cc_src2_32 = tcg_temp_new_i32();
446     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
447     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
448 #else
449     cc_src1_32 = cpu_cc_src;
450     cc_src2_32 = cpu_cc_src2;
451 #endif
452 
453     carry_32 = tcg_temp_new_i32();
454     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
455 
456 #if TARGET_LONG_BITS == 64
457     tcg_temp_free_i32(cc_src1_32);
458     tcg_temp_free_i32(cc_src2_32);
459 #endif
460 
461     return carry_32;
462 }
463 
464 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
465                             TCGv src2, int update_cc)
466 {
467     TCGv_i32 carry_32;
468     TCGv carry;
469 
470     switch (dc->cc_op) {
471     case CC_OP_DIV:
472     case CC_OP_LOGIC:
473         /* Carry is known to be zero.  Fall back to plain ADD.  */
474         if (update_cc) {
475             gen_op_add_cc(dst, src1, src2);
476         } else {
477             tcg_gen_add_tl(dst, src1, src2);
478         }
479         return;
480 
481     case CC_OP_ADD:
482     case CC_OP_TADD:
483     case CC_OP_TADDTV:
484         if (TARGET_LONG_BITS == 32) {
485             /* We can re-use the host's hardware carry generation by using
486                an ADD2 opcode.  We discard the low part of the output.
487                Ideally we'd combine this operation with the add that
488                generated the carry in the first place.  */
489             carry = tcg_temp_new();
490             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
491             tcg_temp_free(carry);
492             goto add_done;
493         }
494         carry_32 = gen_add32_carry32();
495         break;
496 
497     case CC_OP_SUB:
498     case CC_OP_TSUB:
499     case CC_OP_TSUBTV:
500         carry_32 = gen_sub32_carry32();
501         break;
502 
503     default:
504         /* We need external help to produce the carry.  */
505         carry_32 = tcg_temp_new_i32();
506         gen_helper_compute_C_icc(carry_32, cpu_env);
507         break;
508     }
509 
510 #if TARGET_LONG_BITS == 64
511     carry = tcg_temp_new();
512     tcg_gen_extu_i32_i64(carry, carry_32);
513 #else
514     carry = carry_32;
515 #endif
516 
517     tcg_gen_add_tl(dst, src1, src2);
518     tcg_gen_add_tl(dst, dst, carry);
519 
520     tcg_temp_free_i32(carry_32);
521 #if TARGET_LONG_BITS == 64
522     tcg_temp_free(carry);
523 #endif
524 
525  add_done:
526     if (update_cc) {
527         tcg_gen_mov_tl(cpu_cc_src, src1);
528         tcg_gen_mov_tl(cpu_cc_src2, src2);
529         tcg_gen_mov_tl(cpu_cc_dst, dst);
530         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
531         dc->cc_op = CC_OP_ADDX;
532     }
533 }
534 
535 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
536 {
537     tcg_gen_mov_tl(cpu_cc_src, src1);
538     tcg_gen_mov_tl(cpu_cc_src2, src2);
539     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
540     tcg_gen_mov_tl(dst, cpu_cc_dst);
541 }
542 
543 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
544                             TCGv src2, int update_cc)
545 {
546     TCGv_i32 carry_32;
547     TCGv carry;
548 
549     switch (dc->cc_op) {
550     case CC_OP_DIV:
551     case CC_OP_LOGIC:
552         /* Carry is known to be zero.  Fall back to plain SUB.  */
553         if (update_cc) {
554             gen_op_sub_cc(dst, src1, src2);
555         } else {
556             tcg_gen_sub_tl(dst, src1, src2);
557         }
558         return;
559 
560     case CC_OP_ADD:
561     case CC_OP_TADD:
562     case CC_OP_TADDTV:
563         carry_32 = gen_add32_carry32();
564         break;
565 
566     case CC_OP_SUB:
567     case CC_OP_TSUB:
568     case CC_OP_TSUBTV:
569         if (TARGET_LONG_BITS == 32) {
570             /* We can re-use the host's hardware carry generation by using
571                a SUB2 opcode.  We discard the low part of the output.
572                Ideally we'd combine this operation with the add that
573                generated the carry in the first place.  */
574             carry = tcg_temp_new();
575             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
576             tcg_temp_free(carry);
577             goto sub_done;
578         }
579         carry_32 = gen_sub32_carry32();
580         break;
581 
582     default:
583         /* We need external help to produce the carry.  */
584         carry_32 = tcg_temp_new_i32();
585         gen_helper_compute_C_icc(carry_32, cpu_env);
586         break;
587     }
588 
589 #if TARGET_LONG_BITS == 64
590     carry = tcg_temp_new();
591     tcg_gen_extu_i32_i64(carry, carry_32);
592 #else
593     carry = carry_32;
594 #endif
595 
596     tcg_gen_sub_tl(dst, src1, src2);
597     tcg_gen_sub_tl(dst, dst, carry);
598 
599     tcg_temp_free_i32(carry_32);
600 #if TARGET_LONG_BITS == 64
601     tcg_temp_free(carry);
602 #endif
603 
604  sub_done:
605     if (update_cc) {
606         tcg_gen_mov_tl(cpu_cc_src, src1);
607         tcg_gen_mov_tl(cpu_cc_src2, src2);
608         tcg_gen_mov_tl(cpu_cc_dst, dst);
609         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
610         dc->cc_op = CC_OP_SUBX;
611     }
612 }
613 
614 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
615 {
616     TCGv r_temp, zero, t0;
617 
618     r_temp = tcg_temp_new();
619     t0 = tcg_temp_new();
620 
621     /* old op:
622     if (!(env->y & 1))
623         T1 = 0;
624     */
625     zero = tcg_const_tl(0);
626     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
627     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
628     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
629     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
630                        zero, cpu_cc_src2);
631     tcg_temp_free(zero);
632 
633     // b2 = T0 & 1;
634     // env->y = (b2 << 31) | (env->y >> 1);
635     tcg_gen_andi_tl(r_temp, cpu_cc_src, 0x1);
636     tcg_gen_shli_tl(r_temp, r_temp, 31);
637     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
638     tcg_gen_or_tl(t0, t0, r_temp);
639     tcg_gen_andi_tl(cpu_y, t0, 0xffffffff);
640 
641     // b1 = N ^ V;
642     gen_mov_reg_N(t0, cpu_psr);
643     gen_mov_reg_V(r_temp, cpu_psr);
644     tcg_gen_xor_tl(t0, t0, r_temp);
645     tcg_temp_free(r_temp);
646 
647     // T0 = (b1 << 31) | (T0 >> 1);
648     // src1 = T0;
649     tcg_gen_shli_tl(t0, t0, 31);
650     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
651     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
652     tcg_temp_free(t0);
653 
654     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
655 
656     tcg_gen_mov_tl(dst, cpu_cc_dst);
657 }
658 
659 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
660 {
661 #if TARGET_LONG_BITS == 32
662     if (sign_ext) {
663         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
664     } else {
665         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
666     }
667 #else
668     TCGv t0 = tcg_temp_new_i64();
669     TCGv t1 = tcg_temp_new_i64();
670 
671     if (sign_ext) {
672         tcg_gen_ext32s_i64(t0, src1);
673         tcg_gen_ext32s_i64(t1, src2);
674     } else {
675         tcg_gen_ext32u_i64(t0, src1);
676         tcg_gen_ext32u_i64(t1, src2);
677     }
678 
679     tcg_gen_mul_i64(dst, t0, t1);
680     tcg_temp_free(t0);
681     tcg_temp_free(t1);
682 
683     tcg_gen_shri_i64(cpu_y, dst, 32);
684 #endif
685 }
686 
687 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
688 {
689     /* zero-extend truncated operands before multiplication */
690     gen_op_multiply(dst, src1, src2, 0);
691 }
692 
693 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
694 {
695     /* sign-extend truncated operands before multiplication */
696     gen_op_multiply(dst, src1, src2, 1);
697 }
698 
699 // 1
700 static inline void gen_op_eval_ba(TCGv dst)
701 {
702     tcg_gen_movi_tl(dst, 1);
703 }
704 
705 // Z
706 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
707 {
708     gen_mov_reg_Z(dst, src);
709 }
710 
711 // Z | (N ^ V)
712 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
713 {
714     TCGv t0 = tcg_temp_new();
715     gen_mov_reg_N(t0, src);
716     gen_mov_reg_V(dst, src);
717     tcg_gen_xor_tl(dst, dst, t0);
718     gen_mov_reg_Z(t0, src);
719     tcg_gen_or_tl(dst, dst, t0);
720     tcg_temp_free(t0);
721 }
722 
723 // N ^ V
724 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
725 {
726     TCGv t0 = tcg_temp_new();
727     gen_mov_reg_V(t0, src);
728     gen_mov_reg_N(dst, src);
729     tcg_gen_xor_tl(dst, dst, t0);
730     tcg_temp_free(t0);
731 }
732 
733 // C | Z
734 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
735 {
736     TCGv t0 = tcg_temp_new();
737     gen_mov_reg_Z(t0, src);
738     gen_mov_reg_C(dst, src);
739     tcg_gen_or_tl(dst, dst, t0);
740     tcg_temp_free(t0);
741 }
742 
743 // C
744 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
745 {
746     gen_mov_reg_C(dst, src);
747 }
748 
749 // V
750 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
751 {
752     gen_mov_reg_V(dst, src);
753 }
754 
755 // 0
756 static inline void gen_op_eval_bn(TCGv dst)
757 {
758     tcg_gen_movi_tl(dst, 0);
759 }
760 
761 // N
762 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
763 {
764     gen_mov_reg_N(dst, src);
765 }
766 
767 // !Z
768 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
769 {
770     gen_mov_reg_Z(dst, src);
771     tcg_gen_xori_tl(dst, dst, 0x1);
772 }
773 
774 // !(Z | (N ^ V))
775 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
776 {
777     gen_op_eval_ble(dst, src);
778     tcg_gen_xori_tl(dst, dst, 0x1);
779 }
780 
781 // !(N ^ V)
782 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
783 {
784     gen_op_eval_bl(dst, src);
785     tcg_gen_xori_tl(dst, dst, 0x1);
786 }
787 
788 // !(C | Z)
789 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
790 {
791     gen_op_eval_bleu(dst, src);
792     tcg_gen_xori_tl(dst, dst, 0x1);
793 }
794 
795 // !C
796 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
797 {
798     gen_mov_reg_C(dst, src);
799     tcg_gen_xori_tl(dst, dst, 0x1);
800 }
801 
802 // !N
803 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
804 {
805     gen_mov_reg_N(dst, src);
806     tcg_gen_xori_tl(dst, dst, 0x1);
807 }
808 
809 // !V
810 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
811 {
812     gen_mov_reg_V(dst, src);
813     tcg_gen_xori_tl(dst, dst, 0x1);
814 }
815 
816 /*
817   FPSR bit field FCC1 | FCC0:
818    0 =
819    1 <
820    2 >
821    3 unordered
822 */
823 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
824                                     unsigned int fcc_offset)
825 {
826     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
827     tcg_gen_andi_tl(reg, reg, 0x1);
828 }
829 
830 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
831                                     unsigned int fcc_offset)
832 {
833     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
834     tcg_gen_andi_tl(reg, reg, 0x1);
835 }
836 
837 // !0: FCC0 | FCC1
838 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
839                                     unsigned int fcc_offset)
840 {
841     TCGv t0 = tcg_temp_new();
842     gen_mov_reg_FCC0(dst, src, fcc_offset);
843     gen_mov_reg_FCC1(t0, src, fcc_offset);
844     tcg_gen_or_tl(dst, dst, t0);
845     tcg_temp_free(t0);
846 }
847 
848 // 1 or 2: FCC0 ^ FCC1
849 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
850                                     unsigned int fcc_offset)
851 {
852     TCGv t0 = tcg_temp_new();
853     gen_mov_reg_FCC0(dst, src, fcc_offset);
854     gen_mov_reg_FCC1(t0, src, fcc_offset);
855     tcg_gen_xor_tl(dst, dst, t0);
856     tcg_temp_free(t0);
857 }
858 
859 // 1 or 3: FCC0
860 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
861                                     unsigned int fcc_offset)
862 {
863     gen_mov_reg_FCC0(dst, src, fcc_offset);
864 }
865 
866 // 1: FCC0 & !FCC1
867 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
868                                     unsigned int fcc_offset)
869 {
870     TCGv t0 = tcg_temp_new();
871     gen_mov_reg_FCC0(dst, src, fcc_offset);
872     gen_mov_reg_FCC1(t0, src, fcc_offset);
873     tcg_gen_andc_tl(dst, dst, t0);
874     tcg_temp_free(t0);
875 }
876 
877 // 2 or 3: FCC1
878 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
879                                     unsigned int fcc_offset)
880 {
881     gen_mov_reg_FCC1(dst, src, fcc_offset);
882 }
883 
884 // 2: !FCC0 & FCC1
885 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
886                                     unsigned int fcc_offset)
887 {
888     TCGv t0 = tcg_temp_new();
889     gen_mov_reg_FCC0(dst, src, fcc_offset);
890     gen_mov_reg_FCC1(t0, src, fcc_offset);
891     tcg_gen_andc_tl(dst, t0, dst);
892     tcg_temp_free(t0);
893 }
894 
895 // 3: FCC0 & FCC1
896 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
897                                     unsigned int fcc_offset)
898 {
899     TCGv t0 = tcg_temp_new();
900     gen_mov_reg_FCC0(dst, src, fcc_offset);
901     gen_mov_reg_FCC1(t0, src, fcc_offset);
902     tcg_gen_and_tl(dst, dst, t0);
903     tcg_temp_free(t0);
904 }
905 
906 // 0: !(FCC0 | FCC1)
907 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
908                                     unsigned int fcc_offset)
909 {
910     TCGv t0 = tcg_temp_new();
911     gen_mov_reg_FCC0(dst, src, fcc_offset);
912     gen_mov_reg_FCC1(t0, src, fcc_offset);
913     tcg_gen_or_tl(dst, dst, t0);
914     tcg_gen_xori_tl(dst, dst, 0x1);
915     tcg_temp_free(t0);
916 }
917 
918 // 0 or 3: !(FCC0 ^ FCC1)
919 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
920                                     unsigned int fcc_offset)
921 {
922     TCGv t0 = tcg_temp_new();
923     gen_mov_reg_FCC0(dst, src, fcc_offset);
924     gen_mov_reg_FCC1(t0, src, fcc_offset);
925     tcg_gen_xor_tl(dst, dst, t0);
926     tcg_gen_xori_tl(dst, dst, 0x1);
927     tcg_temp_free(t0);
928 }
929 
930 // 0 or 2: !FCC0
931 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
932                                     unsigned int fcc_offset)
933 {
934     gen_mov_reg_FCC0(dst, src, fcc_offset);
935     tcg_gen_xori_tl(dst, dst, 0x1);
936 }
937 
938 // !1: !(FCC0 & !FCC1)
939 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
940                                     unsigned int fcc_offset)
941 {
942     TCGv t0 = tcg_temp_new();
943     gen_mov_reg_FCC0(dst, src, fcc_offset);
944     gen_mov_reg_FCC1(t0, src, fcc_offset);
945     tcg_gen_andc_tl(dst, dst, t0);
946     tcg_gen_xori_tl(dst, dst, 0x1);
947     tcg_temp_free(t0);
948 }
949 
950 // 0 or 1: !FCC1
951 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
952                                     unsigned int fcc_offset)
953 {
954     gen_mov_reg_FCC1(dst, src, fcc_offset);
955     tcg_gen_xori_tl(dst, dst, 0x1);
956 }
957 
958 // !2: !(!FCC0 & FCC1)
959 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
960                                     unsigned int fcc_offset)
961 {
962     TCGv t0 = tcg_temp_new();
963     gen_mov_reg_FCC0(dst, src, fcc_offset);
964     gen_mov_reg_FCC1(t0, src, fcc_offset);
965     tcg_gen_andc_tl(dst, t0, dst);
966     tcg_gen_xori_tl(dst, dst, 0x1);
967     tcg_temp_free(t0);
968 }
969 
970 // !3: !(FCC0 & FCC1)
971 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
972                                     unsigned int fcc_offset)
973 {
974     TCGv t0 = tcg_temp_new();
975     gen_mov_reg_FCC0(dst, src, fcc_offset);
976     gen_mov_reg_FCC1(t0, src, fcc_offset);
977     tcg_gen_and_tl(dst, dst, t0);
978     tcg_gen_xori_tl(dst, dst, 0x1);
979     tcg_temp_free(t0);
980 }
981 
982 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
983                                target_ulong pc2, TCGv r_cond)
984 {
985     TCGLabel *l1 = gen_new_label();
986 
987     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
988 
989     gen_goto_tb(dc, 0, pc1, pc1 + 4);
990 
991     gen_set_label(l1);
992     gen_goto_tb(dc, 1, pc2, pc2 + 4);
993 }
994 
995 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
996 {
997     TCGLabel *l1 = gen_new_label();
998     target_ulong npc = dc->npc;
999 
1000     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
1001 
1002     gen_goto_tb(dc, 0, npc, pc1);
1003 
1004     gen_set_label(l1);
1005     gen_goto_tb(dc, 1, npc + 4, npc + 8);
1006 
1007     dc->is_br = 1;
1008 }
1009 
1010 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
1011 {
1012     target_ulong npc = dc->npc;
1013 
1014     if (likely(npc != DYNAMIC_PC)) {
1015         dc->pc = npc;
1016         dc->jump_pc[0] = pc1;
1017         dc->jump_pc[1] = npc + 4;
1018         dc->npc = JUMP_PC;
1019     } else {
1020         TCGv t, z;
1021 
1022         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1023 
1024         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1025         t = tcg_const_tl(pc1);
1026         z = tcg_const_tl(0);
1027         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
1028         tcg_temp_free(t);
1029         tcg_temp_free(z);
1030 
1031         dc->pc = DYNAMIC_PC;
1032     }
1033 }
1034 
1035 static inline void gen_generic_branch(DisasContext *dc)
1036 {
1037     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1038     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1039     TCGv zero = tcg_const_tl(0);
1040 
1041     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1042 
1043     tcg_temp_free(npc0);
1044     tcg_temp_free(npc1);
1045     tcg_temp_free(zero);
1046 }
1047 
1048 /* call this function before using the condition register as it may
1049    have been set for a jump */
1050 static inline void flush_cond(DisasContext *dc)
1051 {
1052     if (dc->npc == JUMP_PC) {
1053         gen_generic_branch(dc);
1054         dc->npc = DYNAMIC_PC;
1055     }
1056 }
1057 
1058 static inline void save_npc(DisasContext *dc)
1059 {
1060     if (dc->npc == JUMP_PC) {
1061         gen_generic_branch(dc);
1062         dc->npc = DYNAMIC_PC;
1063     } else if (dc->npc != DYNAMIC_PC) {
1064         tcg_gen_movi_tl(cpu_npc, dc->npc);
1065     }
1066 }
1067 
1068 static inline void update_psr(DisasContext *dc)
1069 {
1070     if (dc->cc_op != CC_OP_FLAGS) {
1071         dc->cc_op = CC_OP_FLAGS;
1072         gen_helper_compute_psr(cpu_env);
1073     }
1074 }
1075 
1076 static inline void save_state(DisasContext *dc)
1077 {
1078     tcg_gen_movi_tl(cpu_pc, dc->pc);
1079     save_npc(dc);
1080 }
1081 
1082 static void gen_exception(DisasContext *dc, int which)
1083 {
1084     TCGv_i32 t;
1085 
1086     save_state(dc);
1087     t = tcg_const_i32(which);
1088     gen_helper_raise_exception(cpu_env, t);
1089     tcg_temp_free_i32(t);
1090     dc->is_br = 1;
1091 }
1092 
1093 static void gen_check_align(TCGv addr, int mask)
1094 {
1095     TCGv_i32 r_mask = tcg_const_i32(mask);
1096     gen_helper_check_align(cpu_env, addr, r_mask);
1097     tcg_temp_free_i32(r_mask);
1098 }
1099 
1100 static inline void gen_mov_pc_npc(DisasContext *dc)
1101 {
1102     if (dc->npc == JUMP_PC) {
1103         gen_generic_branch(dc);
1104         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1105         dc->pc = DYNAMIC_PC;
1106     } else if (dc->npc == DYNAMIC_PC) {
1107         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1108         dc->pc = DYNAMIC_PC;
1109     } else {
1110         dc->pc = dc->npc;
1111     }
1112 }
1113 
1114 static inline void gen_op_next_insn(void)
1115 {
1116     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1117     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1118 }
1119 
1120 static void free_compare(DisasCompare *cmp)
1121 {
1122     if (!cmp->g1) {
1123         tcg_temp_free(cmp->c1);
1124     }
1125     if (!cmp->g2) {
1126         tcg_temp_free(cmp->c2);
1127     }
1128 }
1129 
1130 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1131                         DisasContext *dc)
1132 {
1133     static int subcc_cond[16] = {
1134         TCG_COND_NEVER,
1135         TCG_COND_EQ,
1136         TCG_COND_LE,
1137         TCG_COND_LT,
1138         TCG_COND_LEU,
1139         TCG_COND_LTU,
1140         -1, /* neg */
1141         -1, /* overflow */
1142         TCG_COND_ALWAYS,
1143         TCG_COND_NE,
1144         TCG_COND_GT,
1145         TCG_COND_GE,
1146         TCG_COND_GTU,
1147         TCG_COND_GEU,
1148         -1, /* pos */
1149         -1, /* no overflow */
1150     };
1151 
1152     static int logic_cond[16] = {
1153         TCG_COND_NEVER,
1154         TCG_COND_EQ,     /* eq:  Z */
1155         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1156         TCG_COND_LT,     /* lt:  N ^ V -> N */
1157         TCG_COND_EQ,     /* leu: C | Z -> Z */
1158         TCG_COND_NEVER,  /* ltu: C -> 0 */
1159         TCG_COND_LT,     /* neg: N */
1160         TCG_COND_NEVER,  /* vs:  V -> 0 */
1161         TCG_COND_ALWAYS,
1162         TCG_COND_NE,     /* ne:  !Z */
1163         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1164         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1165         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1166         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1167         TCG_COND_GE,     /* pos: !N */
1168         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1169     };
1170 
1171     TCGv_i32 r_src;
1172     TCGv r_dst;
1173 
1174 #ifdef TARGET_SPARC64
1175     if (xcc) {
1176         r_src = cpu_xcc;
1177     } else {
1178         r_src = cpu_psr;
1179     }
1180 #else
1181     r_src = cpu_psr;
1182 #endif
1183 
1184     switch (dc->cc_op) {
1185     case CC_OP_LOGIC:
1186         cmp->cond = logic_cond[cond];
1187     do_compare_dst_0:
1188         cmp->is_bool = false;
1189         cmp->g2 = false;
1190         cmp->c2 = tcg_const_tl(0);
1191 #ifdef TARGET_SPARC64
1192         if (!xcc) {
1193             cmp->g1 = false;
1194             cmp->c1 = tcg_temp_new();
1195             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1196             break;
1197         }
1198 #endif
1199         cmp->g1 = true;
1200         cmp->c1 = cpu_cc_dst;
1201         break;
1202 
1203     case CC_OP_SUB:
1204         switch (cond) {
1205         case 6:  /* neg */
1206         case 14: /* pos */
1207             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1208             goto do_compare_dst_0;
1209 
1210         case 7: /* overflow */
1211         case 15: /* !overflow */
1212             goto do_dynamic;
1213 
1214         default:
1215             cmp->cond = subcc_cond[cond];
1216             cmp->is_bool = false;
1217 #ifdef TARGET_SPARC64
1218             if (!xcc) {
1219                 /* Note that sign-extension works for unsigned compares as
1220                    long as both operands are sign-extended.  */
1221                 cmp->g1 = cmp->g2 = false;
1222                 cmp->c1 = tcg_temp_new();
1223                 cmp->c2 = tcg_temp_new();
1224                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1225                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1226                 break;
1227             }
1228 #endif
1229             cmp->g1 = cmp->g2 = true;
1230             cmp->c1 = cpu_cc_src;
1231             cmp->c2 = cpu_cc_src2;
1232             break;
1233         }
1234         break;
1235 
1236     default:
1237     do_dynamic:
1238         gen_helper_compute_psr(cpu_env);
1239         dc->cc_op = CC_OP_FLAGS;
1240         /* FALLTHRU */
1241 
1242     case CC_OP_FLAGS:
1243         /* We're going to generate a boolean result.  */
1244         cmp->cond = TCG_COND_NE;
1245         cmp->is_bool = true;
1246         cmp->g1 = cmp->g2 = false;
1247         cmp->c1 = r_dst = tcg_temp_new();
1248         cmp->c2 = tcg_const_tl(0);
1249 
1250         switch (cond) {
1251         case 0x0:
1252             gen_op_eval_bn(r_dst);
1253             break;
1254         case 0x1:
1255             gen_op_eval_be(r_dst, r_src);
1256             break;
1257         case 0x2:
1258             gen_op_eval_ble(r_dst, r_src);
1259             break;
1260         case 0x3:
1261             gen_op_eval_bl(r_dst, r_src);
1262             break;
1263         case 0x4:
1264             gen_op_eval_bleu(r_dst, r_src);
1265             break;
1266         case 0x5:
1267             gen_op_eval_bcs(r_dst, r_src);
1268             break;
1269         case 0x6:
1270             gen_op_eval_bneg(r_dst, r_src);
1271             break;
1272         case 0x7:
1273             gen_op_eval_bvs(r_dst, r_src);
1274             break;
1275         case 0x8:
1276             gen_op_eval_ba(r_dst);
1277             break;
1278         case 0x9:
1279             gen_op_eval_bne(r_dst, r_src);
1280             break;
1281         case 0xa:
1282             gen_op_eval_bg(r_dst, r_src);
1283             break;
1284         case 0xb:
1285             gen_op_eval_bge(r_dst, r_src);
1286             break;
1287         case 0xc:
1288             gen_op_eval_bgu(r_dst, r_src);
1289             break;
1290         case 0xd:
1291             gen_op_eval_bcc(r_dst, r_src);
1292             break;
1293         case 0xe:
1294             gen_op_eval_bpos(r_dst, r_src);
1295             break;
1296         case 0xf:
1297             gen_op_eval_bvc(r_dst, r_src);
1298             break;
1299         }
1300         break;
1301     }
1302 }
1303 
1304 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1305 {
1306     unsigned int offset;
1307     TCGv r_dst;
1308 
1309     /* For now we still generate a straight boolean result.  */
1310     cmp->cond = TCG_COND_NE;
1311     cmp->is_bool = true;
1312     cmp->g1 = cmp->g2 = false;
1313     cmp->c1 = r_dst = tcg_temp_new();
1314     cmp->c2 = tcg_const_tl(0);
1315 
1316     switch (cc) {
1317     default:
1318     case 0x0:
1319         offset = 0;
1320         break;
1321     case 0x1:
1322         offset = 32 - 10;
1323         break;
1324     case 0x2:
1325         offset = 34 - 10;
1326         break;
1327     case 0x3:
1328         offset = 36 - 10;
1329         break;
1330     }
1331 
1332     switch (cond) {
1333     case 0x0:
1334         gen_op_eval_bn(r_dst);
1335         break;
1336     case 0x1:
1337         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1338         break;
1339     case 0x2:
1340         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1341         break;
1342     case 0x3:
1343         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1344         break;
1345     case 0x4:
1346         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1347         break;
1348     case 0x5:
1349         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1350         break;
1351     case 0x6:
1352         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1353         break;
1354     case 0x7:
1355         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1356         break;
1357     case 0x8:
1358         gen_op_eval_ba(r_dst);
1359         break;
1360     case 0x9:
1361         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1362         break;
1363     case 0xa:
1364         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1365         break;
1366     case 0xb:
1367         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1368         break;
1369     case 0xc:
1370         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1371         break;
1372     case 0xd:
1373         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1374         break;
1375     case 0xe:
1376         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1377         break;
1378     case 0xf:
1379         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1380         break;
1381     }
1382 }
1383 
1384 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1385                      DisasContext *dc)
1386 {
1387     DisasCompare cmp;
1388     gen_compare(&cmp, cc, cond, dc);
1389 
1390     /* The interface is to return a boolean in r_dst.  */
1391     if (cmp.is_bool) {
1392         tcg_gen_mov_tl(r_dst, cmp.c1);
1393     } else {
1394         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1395     }
1396 
1397     free_compare(&cmp);
1398 }
1399 
1400 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1401 {
1402     DisasCompare cmp;
1403     gen_fcompare(&cmp, cc, cond);
1404 
1405     /* The interface is to return a boolean in r_dst.  */
1406     if (cmp.is_bool) {
1407         tcg_gen_mov_tl(r_dst, cmp.c1);
1408     } else {
1409         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1410     }
1411 
1412     free_compare(&cmp);
1413 }
1414 
1415 #ifdef TARGET_SPARC64
1416 // Inverted logic
1417 static const int gen_tcg_cond_reg[8] = {
1418     -1,
1419     TCG_COND_NE,
1420     TCG_COND_GT,
1421     TCG_COND_GE,
1422     -1,
1423     TCG_COND_EQ,
1424     TCG_COND_LE,
1425     TCG_COND_LT,
1426 };
1427 
1428 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1429 {
1430     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1431     cmp->is_bool = false;
1432     cmp->g1 = true;
1433     cmp->g2 = false;
1434     cmp->c1 = r_src;
1435     cmp->c2 = tcg_const_tl(0);
1436 }
1437 
1438 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1439 {
1440     DisasCompare cmp;
1441     gen_compare_reg(&cmp, cond, r_src);
1442 
1443     /* The interface is to return a boolean in r_dst.  */
1444     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1445 
1446     free_compare(&cmp);
1447 }
1448 #endif
1449 
1450 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1451 {
1452     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1453     target_ulong target = dc->pc + offset;
1454 
1455 #ifdef TARGET_SPARC64
1456     if (unlikely(AM_CHECK(dc))) {
1457         target &= 0xffffffffULL;
1458     }
1459 #endif
1460     if (cond == 0x0) {
1461         /* unconditional not taken */
1462         if (a) {
1463             dc->pc = dc->npc + 4;
1464             dc->npc = dc->pc + 4;
1465         } else {
1466             dc->pc = dc->npc;
1467             dc->npc = dc->pc + 4;
1468         }
1469     } else if (cond == 0x8) {
1470         /* unconditional taken */
1471         if (a) {
1472             dc->pc = target;
1473             dc->npc = dc->pc + 4;
1474         } else {
1475             dc->pc = dc->npc;
1476             dc->npc = target;
1477             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1478         }
1479     } else {
1480         flush_cond(dc);
1481         gen_cond(cpu_cond, cc, cond, dc);
1482         if (a) {
1483             gen_branch_a(dc, target);
1484         } else {
1485             gen_branch_n(dc, target);
1486         }
1487     }
1488 }
1489 
1490 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1491 {
1492     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1493     target_ulong target = dc->pc + offset;
1494 
1495 #ifdef TARGET_SPARC64
1496     if (unlikely(AM_CHECK(dc))) {
1497         target &= 0xffffffffULL;
1498     }
1499 #endif
1500     if (cond == 0x0) {
1501         /* unconditional not taken */
1502         if (a) {
1503             dc->pc = dc->npc + 4;
1504             dc->npc = dc->pc + 4;
1505         } else {
1506             dc->pc = dc->npc;
1507             dc->npc = dc->pc + 4;
1508         }
1509     } else if (cond == 0x8) {
1510         /* unconditional taken */
1511         if (a) {
1512             dc->pc = target;
1513             dc->npc = dc->pc + 4;
1514         } else {
1515             dc->pc = dc->npc;
1516             dc->npc = target;
1517             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1518         }
1519     } else {
1520         flush_cond(dc);
1521         gen_fcond(cpu_cond, cc, cond);
1522         if (a) {
1523             gen_branch_a(dc, target);
1524         } else {
1525             gen_branch_n(dc, target);
1526         }
1527     }
1528 }
1529 
1530 #ifdef TARGET_SPARC64
1531 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1532                           TCGv r_reg)
1533 {
1534     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1535     target_ulong target = dc->pc + offset;
1536 
1537     if (unlikely(AM_CHECK(dc))) {
1538         target &= 0xffffffffULL;
1539     }
1540     flush_cond(dc);
1541     gen_cond_reg(cpu_cond, cond, r_reg);
1542     if (a) {
1543         gen_branch_a(dc, target);
1544     } else {
1545         gen_branch_n(dc, target);
1546     }
1547 }
1548 
1549 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1550 {
1551     switch (fccno) {
1552     case 0:
1553         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1554         break;
1555     case 1:
1556         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1557         break;
1558     case 2:
1559         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1560         break;
1561     case 3:
1562         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1563         break;
1564     }
1565 }
1566 
1567 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1568 {
1569     switch (fccno) {
1570     case 0:
1571         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1572         break;
1573     case 1:
1574         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1575         break;
1576     case 2:
1577         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1578         break;
1579     case 3:
1580         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1581         break;
1582     }
1583 }
1584 
1585 static inline void gen_op_fcmpq(int fccno)
1586 {
1587     switch (fccno) {
1588     case 0:
1589         gen_helper_fcmpq(cpu_fsr, cpu_env);
1590         break;
1591     case 1:
1592         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1593         break;
1594     case 2:
1595         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1596         break;
1597     case 3:
1598         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1599         break;
1600     }
1601 }
1602 
1603 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1604 {
1605     switch (fccno) {
1606     case 0:
1607         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1608         break;
1609     case 1:
1610         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1611         break;
1612     case 2:
1613         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1614         break;
1615     case 3:
1616         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1617         break;
1618     }
1619 }
1620 
1621 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1622 {
1623     switch (fccno) {
1624     case 0:
1625         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1626         break;
1627     case 1:
1628         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1629         break;
1630     case 2:
1631         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1632         break;
1633     case 3:
1634         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1635         break;
1636     }
1637 }
1638 
1639 static inline void gen_op_fcmpeq(int fccno)
1640 {
1641     switch (fccno) {
1642     case 0:
1643         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1644         break;
1645     case 1:
1646         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1647         break;
1648     case 2:
1649         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1650         break;
1651     case 3:
1652         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1653         break;
1654     }
1655 }
1656 
1657 #else
1658 
1659 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1660 {
1661     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1662 }
1663 
1664 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1665 {
1666     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1667 }
1668 
1669 static inline void gen_op_fcmpq(int fccno)
1670 {
1671     gen_helper_fcmpq(cpu_fsr, cpu_env);
1672 }
1673 
1674 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1675 {
1676     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1677 }
1678 
1679 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1680 {
1681     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1682 }
1683 
1684 static inline void gen_op_fcmpeq(int fccno)
1685 {
1686     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1687 }
1688 #endif
1689 
1690 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1691 {
1692     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1693     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1694     gen_exception(dc, TT_FP_EXCP);
1695 }
1696 
1697 static int gen_trap_ifnofpu(DisasContext *dc)
1698 {
1699 #if !defined(CONFIG_USER_ONLY)
1700     if (!dc->fpu_enabled) {
1701         gen_exception(dc, TT_NFPU_INSN);
1702         return 1;
1703     }
1704 #endif
1705     return 0;
1706 }
1707 
1708 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1709 {
1710     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1711 }
1712 
1713 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1714                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1715 {
1716     TCGv_i32 dst, src;
1717 
1718     src = gen_load_fpr_F(dc, rs);
1719     dst = gen_dest_fpr_F(dc);
1720 
1721     gen(dst, cpu_env, src);
1722     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1723 
1724     gen_store_fpr_F(dc, rd, dst);
1725 }
1726 
1727 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1728                                  void (*gen)(TCGv_i32, TCGv_i32))
1729 {
1730     TCGv_i32 dst, src;
1731 
1732     src = gen_load_fpr_F(dc, rs);
1733     dst = gen_dest_fpr_F(dc);
1734 
1735     gen(dst, src);
1736 
1737     gen_store_fpr_F(dc, rd, dst);
1738 }
1739 
1740 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1741                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1742 {
1743     TCGv_i32 dst, src1, src2;
1744 
1745     src1 = gen_load_fpr_F(dc, rs1);
1746     src2 = gen_load_fpr_F(dc, rs2);
1747     dst = gen_dest_fpr_F(dc);
1748 
1749     gen(dst, cpu_env, src1, src2);
1750     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1751 
1752     gen_store_fpr_F(dc, rd, dst);
1753 }
1754 
1755 #ifdef TARGET_SPARC64
1756 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1757                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1758 {
1759     TCGv_i32 dst, src1, src2;
1760 
1761     src1 = gen_load_fpr_F(dc, rs1);
1762     src2 = gen_load_fpr_F(dc, rs2);
1763     dst = gen_dest_fpr_F(dc);
1764 
1765     gen(dst, src1, src2);
1766 
1767     gen_store_fpr_F(dc, rd, dst);
1768 }
1769 #endif
1770 
1771 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1772                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1773 {
1774     TCGv_i64 dst, src;
1775 
1776     src = gen_load_fpr_D(dc, rs);
1777     dst = gen_dest_fpr_D(dc, rd);
1778 
1779     gen(dst, cpu_env, src);
1780     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1781 
1782     gen_store_fpr_D(dc, rd, dst);
1783 }
1784 
1785 #ifdef TARGET_SPARC64
1786 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1787                                  void (*gen)(TCGv_i64, TCGv_i64))
1788 {
1789     TCGv_i64 dst, src;
1790 
1791     src = gen_load_fpr_D(dc, rs);
1792     dst = gen_dest_fpr_D(dc, rd);
1793 
1794     gen(dst, src);
1795 
1796     gen_store_fpr_D(dc, rd, dst);
1797 }
1798 #endif
1799 
1800 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1801                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1802 {
1803     TCGv_i64 dst, src1, src2;
1804 
1805     src1 = gen_load_fpr_D(dc, rs1);
1806     src2 = gen_load_fpr_D(dc, rs2);
1807     dst = gen_dest_fpr_D(dc, rd);
1808 
1809     gen(dst, cpu_env, src1, src2);
1810     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1811 
1812     gen_store_fpr_D(dc, rd, dst);
1813 }
1814 
1815 #ifdef TARGET_SPARC64
1816 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1817                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1818 {
1819     TCGv_i64 dst, src1, src2;
1820 
1821     src1 = gen_load_fpr_D(dc, rs1);
1822     src2 = gen_load_fpr_D(dc, rs2);
1823     dst = gen_dest_fpr_D(dc, rd);
1824 
1825     gen(dst, src1, src2);
1826 
1827     gen_store_fpr_D(dc, rd, dst);
1828 }
1829 
1830 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1831                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1832 {
1833     TCGv_i64 dst, src1, src2;
1834 
1835     src1 = gen_load_fpr_D(dc, rs1);
1836     src2 = gen_load_fpr_D(dc, rs2);
1837     dst = gen_dest_fpr_D(dc, rd);
1838 
1839     gen(dst, cpu_gsr, src1, src2);
1840 
1841     gen_store_fpr_D(dc, rd, dst);
1842 }
1843 
1844 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1845                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1846 {
1847     TCGv_i64 dst, src0, src1, src2;
1848 
1849     src1 = gen_load_fpr_D(dc, rs1);
1850     src2 = gen_load_fpr_D(dc, rs2);
1851     src0 = gen_load_fpr_D(dc, rd);
1852     dst = gen_dest_fpr_D(dc, rd);
1853 
1854     gen(dst, src0, src1, src2);
1855 
1856     gen_store_fpr_D(dc, rd, dst);
1857 }
1858 #endif
1859 
1860 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1861                               void (*gen)(TCGv_ptr))
1862 {
1863     gen_op_load_fpr_QT1(QFPREG(rs));
1864 
1865     gen(cpu_env);
1866     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1867 
1868     gen_op_store_QT0_fpr(QFPREG(rd));
1869     gen_update_fprs_dirty(dc, QFPREG(rd));
1870 }
1871 
1872 #ifdef TARGET_SPARC64
1873 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1874                                  void (*gen)(TCGv_ptr))
1875 {
1876     gen_op_load_fpr_QT1(QFPREG(rs));
1877 
1878     gen(cpu_env);
1879 
1880     gen_op_store_QT0_fpr(QFPREG(rd));
1881     gen_update_fprs_dirty(dc, QFPREG(rd));
1882 }
1883 #endif
1884 
1885 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1886                                void (*gen)(TCGv_ptr))
1887 {
1888     gen_op_load_fpr_QT0(QFPREG(rs1));
1889     gen_op_load_fpr_QT1(QFPREG(rs2));
1890 
1891     gen(cpu_env);
1892     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1893 
1894     gen_op_store_QT0_fpr(QFPREG(rd));
1895     gen_update_fprs_dirty(dc, QFPREG(rd));
1896 }
1897 
1898 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1899                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1900 {
1901     TCGv_i64 dst;
1902     TCGv_i32 src1, src2;
1903 
1904     src1 = gen_load_fpr_F(dc, rs1);
1905     src2 = gen_load_fpr_F(dc, rs2);
1906     dst = gen_dest_fpr_D(dc, rd);
1907 
1908     gen(dst, cpu_env, src1, src2);
1909     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1910 
1911     gen_store_fpr_D(dc, rd, dst);
1912 }
1913 
1914 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1915                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1916 {
1917     TCGv_i64 src1, src2;
1918 
1919     src1 = gen_load_fpr_D(dc, rs1);
1920     src2 = gen_load_fpr_D(dc, rs2);
1921 
1922     gen(cpu_env, src1, src2);
1923     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1924 
1925     gen_op_store_QT0_fpr(QFPREG(rd));
1926     gen_update_fprs_dirty(dc, QFPREG(rd));
1927 }
1928 
1929 #ifdef TARGET_SPARC64
1930 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1931                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1932 {
1933     TCGv_i64 dst;
1934     TCGv_i32 src;
1935 
1936     src = gen_load_fpr_F(dc, rs);
1937     dst = gen_dest_fpr_D(dc, rd);
1938 
1939     gen(dst, cpu_env, src);
1940     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1941 
1942     gen_store_fpr_D(dc, rd, dst);
1943 }
1944 #endif
1945 
1946 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1947                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1948 {
1949     TCGv_i64 dst;
1950     TCGv_i32 src;
1951 
1952     src = gen_load_fpr_F(dc, rs);
1953     dst = gen_dest_fpr_D(dc, rd);
1954 
1955     gen(dst, cpu_env, src);
1956 
1957     gen_store_fpr_D(dc, rd, dst);
1958 }
1959 
1960 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1961                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1962 {
1963     TCGv_i32 dst;
1964     TCGv_i64 src;
1965 
1966     src = gen_load_fpr_D(dc, rs);
1967     dst = gen_dest_fpr_F(dc);
1968 
1969     gen(dst, cpu_env, src);
1970     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1971 
1972     gen_store_fpr_F(dc, rd, dst);
1973 }
1974 
1975 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1976                               void (*gen)(TCGv_i32, TCGv_ptr))
1977 {
1978     TCGv_i32 dst;
1979 
1980     gen_op_load_fpr_QT1(QFPREG(rs));
1981     dst = gen_dest_fpr_F(dc);
1982 
1983     gen(dst, cpu_env);
1984     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1985 
1986     gen_store_fpr_F(dc, rd, dst);
1987 }
1988 
1989 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1990                               void (*gen)(TCGv_i64, TCGv_ptr))
1991 {
1992     TCGv_i64 dst;
1993 
1994     gen_op_load_fpr_QT1(QFPREG(rs));
1995     dst = gen_dest_fpr_D(dc, rd);
1996 
1997     gen(dst, cpu_env);
1998     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1999 
2000     gen_store_fpr_D(dc, rd, dst);
2001 }
2002 
2003 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
2004                                  void (*gen)(TCGv_ptr, TCGv_i32))
2005 {
2006     TCGv_i32 src;
2007 
2008     src = gen_load_fpr_F(dc, rs);
2009 
2010     gen(cpu_env, src);
2011 
2012     gen_op_store_QT0_fpr(QFPREG(rd));
2013     gen_update_fprs_dirty(dc, QFPREG(rd));
2014 }
2015 
2016 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2017                                  void (*gen)(TCGv_ptr, TCGv_i64))
2018 {
2019     TCGv_i64 src;
2020 
2021     src = gen_load_fpr_D(dc, rs);
2022 
2023     gen(cpu_env, src);
2024 
2025     gen_op_store_QT0_fpr(QFPREG(rd));
2026     gen_update_fprs_dirty(dc, QFPREG(rd));
2027 }
2028 
2029 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
2030                      TCGv addr, int mmu_idx, TCGMemOp memop)
2031 {
2032     gen_address_mask(dc, addr);
2033     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
2034 }
2035 
2036 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
2037 {
2038     TCGv m1 = tcg_const_tl(0xff);
2039     gen_address_mask(dc, addr);
2040     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
2041     tcg_temp_free(m1);
2042 }
2043 
2044 /* asi moves */
2045 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2046 typedef enum {
2047     GET_ASI_HELPER,
2048     GET_ASI_EXCP,
2049     GET_ASI_DIRECT,
2050     GET_ASI_DTWINX,
2051     GET_ASI_BLOCK,
2052     GET_ASI_SHORT,
2053     GET_ASI_BCOPY,
2054     GET_ASI_BFILL,
2055 } ASIType;
2056 
2057 typedef struct {
2058     ASIType type;
2059     int asi;
2060     int mem_idx;
2061     TCGMemOp memop;
2062 } DisasASI;
2063 
2064 static DisasASI get_asi(DisasContext *dc, int insn, TCGMemOp memop)
2065 {
2066     int asi = GET_FIELD(insn, 19, 26);
2067     ASIType type = GET_ASI_HELPER;
2068     int mem_idx = dc->mem_idx;
2069 
2070 #ifndef TARGET_SPARC64
2071     /* Before v9, all asis are immediate and privileged.  */
2072     if (IS_IMM) {
2073         gen_exception(dc, TT_ILL_INSN);
2074         type = GET_ASI_EXCP;
2075     } else if (supervisor(dc)
2076                /* Note that LEON accepts ASI_USERDATA in user mode, for
2077                   use with CASA.  Also note that previous versions of
2078                   QEMU allowed (and old versions of gcc emitted) ASI_P
2079                   for LEON, which is incorrect.  */
2080                || (asi == ASI_USERDATA
2081                    && (dc->def->features & CPU_FEATURE_CASA))) {
2082         switch (asi) {
2083         case ASI_USERDATA:   /* User data access */
2084             mem_idx = MMU_USER_IDX;
2085             type = GET_ASI_DIRECT;
2086             break;
2087         case ASI_KERNELDATA: /* Supervisor data access */
2088             mem_idx = MMU_KERNEL_IDX;
2089             type = GET_ASI_DIRECT;
2090             break;
2091         case ASI_M_BYPASS:    /* MMU passthrough */
2092         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2093             mem_idx = MMU_PHYS_IDX;
2094             type = GET_ASI_DIRECT;
2095             break;
2096         case ASI_M_BCOPY: /* Block copy, sta access */
2097             mem_idx = MMU_KERNEL_IDX;
2098             type = GET_ASI_BCOPY;
2099             break;
2100         case ASI_M_BFILL: /* Block fill, stda access */
2101             mem_idx = MMU_KERNEL_IDX;
2102             type = GET_ASI_BFILL;
2103             break;
2104         }
2105     } else {
2106         gen_exception(dc, TT_PRIV_INSN);
2107         type = GET_ASI_EXCP;
2108     }
2109 #else
2110     if (IS_IMM) {
2111         asi = dc->asi;
2112     }
2113     /* With v9, all asis below 0x80 are privileged.  */
2114     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2115        down that bit into DisasContext.  For the moment that's ok,
2116        since the direct implementations below doesn't have any ASIs
2117        in the restricted [0x30, 0x7f] range, and the check will be
2118        done properly in the helper.  */
2119     if (!supervisor(dc) && asi < 0x80) {
2120         gen_exception(dc, TT_PRIV_ACT);
2121         type = GET_ASI_EXCP;
2122     } else {
2123         switch (asi) {
2124         case ASI_REAL:      /* Bypass */
2125         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2126         case ASI_REAL_L:    /* Bypass LE */
2127         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2128         case ASI_TWINX_REAL:   /* Real address, twinx */
2129         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2130         case ASI_QUAD_LDD_PHYS:
2131         case ASI_QUAD_LDD_PHYS_L:
2132             mem_idx = MMU_PHYS_IDX;
2133             break;
2134         case ASI_N:  /* Nucleus */
2135         case ASI_NL: /* Nucleus LE */
2136         case ASI_TWINX_N:
2137         case ASI_TWINX_NL:
2138         case ASI_NUCLEUS_QUAD_LDD:
2139         case ASI_NUCLEUS_QUAD_LDD_L:
2140             if (hypervisor(dc)) {
2141                 mem_idx = MMU_PHYS_IDX;
2142             } else {
2143                 mem_idx = MMU_NUCLEUS_IDX;
2144             }
2145             break;
2146         case ASI_AIUP:  /* As if user primary */
2147         case ASI_AIUPL: /* As if user primary LE */
2148         case ASI_TWINX_AIUP:
2149         case ASI_TWINX_AIUP_L:
2150         case ASI_BLK_AIUP_4V:
2151         case ASI_BLK_AIUP_L_4V:
2152         case ASI_BLK_AIUP:
2153         case ASI_BLK_AIUPL:
2154             mem_idx = MMU_USER_IDX;
2155             break;
2156         case ASI_AIUS:  /* As if user secondary */
2157         case ASI_AIUSL: /* As if user secondary LE */
2158         case ASI_TWINX_AIUS:
2159         case ASI_TWINX_AIUS_L:
2160         case ASI_BLK_AIUS_4V:
2161         case ASI_BLK_AIUS_L_4V:
2162         case ASI_BLK_AIUS:
2163         case ASI_BLK_AIUSL:
2164             mem_idx = MMU_USER_SECONDARY_IDX;
2165             break;
2166         case ASI_S:  /* Secondary */
2167         case ASI_SL: /* Secondary LE */
2168         case ASI_TWINX_S:
2169         case ASI_TWINX_SL:
2170         case ASI_BLK_COMMIT_S:
2171         case ASI_BLK_S:
2172         case ASI_BLK_SL:
2173         case ASI_FL8_S:
2174         case ASI_FL8_SL:
2175         case ASI_FL16_S:
2176         case ASI_FL16_SL:
2177             if (mem_idx == MMU_USER_IDX) {
2178                 mem_idx = MMU_USER_SECONDARY_IDX;
2179             } else if (mem_idx == MMU_KERNEL_IDX) {
2180                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2181             }
2182             break;
2183         case ASI_P:  /* Primary */
2184         case ASI_PL: /* Primary LE */
2185         case ASI_TWINX_P:
2186         case ASI_TWINX_PL:
2187         case ASI_BLK_COMMIT_P:
2188         case ASI_BLK_P:
2189         case ASI_BLK_PL:
2190         case ASI_FL8_P:
2191         case ASI_FL8_PL:
2192         case ASI_FL16_P:
2193         case ASI_FL16_PL:
2194             break;
2195         }
2196         switch (asi) {
2197         case ASI_REAL:
2198         case ASI_REAL_IO:
2199         case ASI_REAL_L:
2200         case ASI_REAL_IO_L:
2201         case ASI_N:
2202         case ASI_NL:
2203         case ASI_AIUP:
2204         case ASI_AIUPL:
2205         case ASI_AIUS:
2206         case ASI_AIUSL:
2207         case ASI_S:
2208         case ASI_SL:
2209         case ASI_P:
2210         case ASI_PL:
2211             type = GET_ASI_DIRECT;
2212             break;
2213         case ASI_TWINX_REAL:
2214         case ASI_TWINX_REAL_L:
2215         case ASI_TWINX_N:
2216         case ASI_TWINX_NL:
2217         case ASI_TWINX_AIUP:
2218         case ASI_TWINX_AIUP_L:
2219         case ASI_TWINX_AIUS:
2220         case ASI_TWINX_AIUS_L:
2221         case ASI_TWINX_P:
2222         case ASI_TWINX_PL:
2223         case ASI_TWINX_S:
2224         case ASI_TWINX_SL:
2225         case ASI_QUAD_LDD_PHYS:
2226         case ASI_QUAD_LDD_PHYS_L:
2227         case ASI_NUCLEUS_QUAD_LDD:
2228         case ASI_NUCLEUS_QUAD_LDD_L:
2229             type = GET_ASI_DTWINX;
2230             break;
2231         case ASI_BLK_COMMIT_P:
2232         case ASI_BLK_COMMIT_S:
2233         case ASI_BLK_AIUP_4V:
2234         case ASI_BLK_AIUP_L_4V:
2235         case ASI_BLK_AIUP:
2236         case ASI_BLK_AIUPL:
2237         case ASI_BLK_AIUS_4V:
2238         case ASI_BLK_AIUS_L_4V:
2239         case ASI_BLK_AIUS:
2240         case ASI_BLK_AIUSL:
2241         case ASI_BLK_S:
2242         case ASI_BLK_SL:
2243         case ASI_BLK_P:
2244         case ASI_BLK_PL:
2245             type = GET_ASI_BLOCK;
2246             break;
2247         case ASI_FL8_S:
2248         case ASI_FL8_SL:
2249         case ASI_FL8_P:
2250         case ASI_FL8_PL:
2251             memop = MO_UB;
2252             type = GET_ASI_SHORT;
2253             break;
2254         case ASI_FL16_S:
2255         case ASI_FL16_SL:
2256         case ASI_FL16_P:
2257         case ASI_FL16_PL:
2258             memop = MO_TEUW;
2259             type = GET_ASI_SHORT;
2260             break;
2261         }
2262         /* The little-endian asis all have bit 3 set.  */
2263         if (asi & 8) {
2264             memop ^= MO_BSWAP;
2265         }
2266     }
2267 #endif
2268 
2269     return (DisasASI){ type, asi, mem_idx, memop };
2270 }
2271 
2272 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2273                        int insn, TCGMemOp memop)
2274 {
2275     DisasASI da = get_asi(dc, insn, memop);
2276 
2277     switch (da.type) {
2278     case GET_ASI_EXCP:
2279         break;
2280     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2281         gen_exception(dc, TT_ILL_INSN);
2282         break;
2283     case GET_ASI_DIRECT:
2284         gen_address_mask(dc, addr);
2285         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2286         break;
2287     default:
2288         {
2289             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2290             TCGv_i32 r_mop = tcg_const_i32(memop);
2291 
2292             save_state(dc);
2293 #ifdef TARGET_SPARC64
2294             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2295 #else
2296             {
2297                 TCGv_i64 t64 = tcg_temp_new_i64();
2298                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2299                 tcg_gen_trunc_i64_tl(dst, t64);
2300                 tcg_temp_free_i64(t64);
2301             }
2302 #endif
2303             tcg_temp_free_i32(r_mop);
2304             tcg_temp_free_i32(r_asi);
2305         }
2306         break;
2307     }
2308 }
2309 
2310 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2311                        int insn, TCGMemOp memop)
2312 {
2313     DisasASI da = get_asi(dc, insn, memop);
2314 
2315     switch (da.type) {
2316     case GET_ASI_EXCP:
2317         break;
2318     case GET_ASI_DTWINX: /* Reserved for stda.  */
2319 #ifndef TARGET_SPARC64
2320         gen_exception(dc, TT_ILL_INSN);
2321         break;
2322 #else
2323         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2324             /* Pre OpenSPARC CPUs don't have these */
2325             gen_exception(dc, TT_ILL_INSN);
2326             return;
2327         }
2328         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2329          * are ST_BLKINIT_ ASIs */
2330         /* fall through */
2331 #endif
2332     case GET_ASI_DIRECT:
2333         gen_address_mask(dc, addr);
2334         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2335         break;
2336 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2337     case GET_ASI_BCOPY:
2338         /* Copy 32 bytes from the address in SRC to ADDR.  */
2339         /* ??? The original qemu code suggests 4-byte alignment, dropping
2340            the low bits, but the only place I can see this used is in the
2341            Linux kernel with 32 byte alignment, which would make more sense
2342            as a cacheline-style operation.  */
2343         {
2344             TCGv saddr = tcg_temp_new();
2345             TCGv daddr = tcg_temp_new();
2346             TCGv four = tcg_const_tl(4);
2347             TCGv_i32 tmp = tcg_temp_new_i32();
2348             int i;
2349 
2350             tcg_gen_andi_tl(saddr, src, -4);
2351             tcg_gen_andi_tl(daddr, addr, -4);
2352             for (i = 0; i < 32; i += 4) {
2353                 /* Since the loads and stores are paired, allow the
2354                    copy to happen in the host endianness.  */
2355                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2356                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2357                 tcg_gen_add_tl(saddr, saddr, four);
2358                 tcg_gen_add_tl(daddr, daddr, four);
2359             }
2360 
2361             tcg_temp_free(saddr);
2362             tcg_temp_free(daddr);
2363             tcg_temp_free(four);
2364             tcg_temp_free_i32(tmp);
2365         }
2366         break;
2367 #endif
2368     default:
2369         {
2370             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2371             TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2372 
2373             save_state(dc);
2374 #ifdef TARGET_SPARC64
2375             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2376 #else
2377             {
2378                 TCGv_i64 t64 = tcg_temp_new_i64();
2379                 tcg_gen_extu_tl_i64(t64, src);
2380                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2381                 tcg_temp_free_i64(t64);
2382             }
2383 #endif
2384             tcg_temp_free_i32(r_mop);
2385             tcg_temp_free_i32(r_asi);
2386 
2387             /* A write to a TLB register may alter page maps.  End the TB. */
2388             dc->npc = DYNAMIC_PC;
2389         }
2390         break;
2391     }
2392 }
2393 
2394 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2395                          TCGv addr, int insn)
2396 {
2397     DisasASI da = get_asi(dc, insn, MO_TEUL);
2398 
2399     switch (da.type) {
2400     case GET_ASI_EXCP:
2401         break;
2402     case GET_ASI_DIRECT:
2403         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2404         break;
2405     default:
2406         /* ??? Should be DAE_invalid_asi.  */
2407         gen_exception(dc, TT_DATA_ACCESS);
2408         break;
2409     }
2410 }
2411 
2412 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2413                         int insn, int rd)
2414 {
2415     DisasASI da = get_asi(dc, insn, MO_TEUL);
2416     TCGv oldv;
2417 
2418     switch (da.type) {
2419     case GET_ASI_EXCP:
2420         return;
2421     case GET_ASI_DIRECT:
2422         oldv = tcg_temp_new();
2423         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2424                                   da.mem_idx, da.memop);
2425         gen_store_gpr(dc, rd, oldv);
2426         tcg_temp_free(oldv);
2427         break;
2428     default:
2429         /* ??? Should be DAE_invalid_asi.  */
2430         gen_exception(dc, TT_DATA_ACCESS);
2431         break;
2432     }
2433 }
2434 
2435 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2436 {
2437     DisasASI da = get_asi(dc, insn, MO_UB);
2438 
2439     switch (da.type) {
2440     case GET_ASI_EXCP:
2441         break;
2442     case GET_ASI_DIRECT:
2443         gen_ldstub(dc, dst, addr, da.mem_idx);
2444         break;
2445     default:
2446         /* ??? In theory, this should be raise DAE_invalid_asi.
2447            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2448         if (parallel_cpus) {
2449             gen_helper_exit_atomic(cpu_env);
2450         } else {
2451             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2452             TCGv_i32 r_mop = tcg_const_i32(MO_UB);
2453             TCGv_i64 s64, t64;
2454 
2455             save_state(dc);
2456             t64 = tcg_temp_new_i64();
2457             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2458 
2459             s64 = tcg_const_i64(0xff);
2460             gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2461             tcg_temp_free_i64(s64);
2462             tcg_temp_free_i32(r_mop);
2463             tcg_temp_free_i32(r_asi);
2464 
2465             tcg_gen_trunc_i64_tl(dst, t64);
2466             tcg_temp_free_i64(t64);
2467 
2468             /* End the TB.  */
2469             dc->npc = DYNAMIC_PC;
2470         }
2471         break;
2472     }
2473 }
2474 #endif
2475 
2476 #ifdef TARGET_SPARC64
2477 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2478                         int insn, int size, int rd)
2479 {
2480     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2481     TCGv_i32 d32;
2482     TCGv_i64 d64;
2483 
2484     switch (da.type) {
2485     case GET_ASI_EXCP:
2486         break;
2487 
2488     case GET_ASI_DIRECT:
2489         gen_address_mask(dc, addr);
2490         switch (size) {
2491         case 4:
2492             d32 = gen_dest_fpr_F(dc);
2493             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2494             gen_store_fpr_F(dc, rd, d32);
2495             break;
2496         case 8:
2497             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2498                                 da.memop | MO_ALIGN_4);
2499             break;
2500         case 16:
2501             d64 = tcg_temp_new_i64();
2502             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2503             tcg_gen_addi_tl(addr, addr, 8);
2504             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2505                                 da.memop | MO_ALIGN_4);
2506             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2507             tcg_temp_free_i64(d64);
2508             break;
2509         default:
2510             g_assert_not_reached();
2511         }
2512         break;
2513 
2514     case GET_ASI_BLOCK:
2515         /* Valid for lddfa on aligned registers only.  */
2516         if (size == 8 && (rd & 7) == 0) {
2517             TCGMemOp memop;
2518             TCGv eight;
2519             int i;
2520 
2521             gen_address_mask(dc, addr);
2522 
2523             /* The first operation checks required alignment.  */
2524             memop = da.memop | MO_ALIGN_64;
2525             eight = tcg_const_tl(8);
2526             for (i = 0; ; ++i) {
2527                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2528                                     da.mem_idx, memop);
2529                 if (i == 7) {
2530                     break;
2531                 }
2532                 tcg_gen_add_tl(addr, addr, eight);
2533                 memop = da.memop;
2534             }
2535             tcg_temp_free(eight);
2536         } else {
2537             gen_exception(dc, TT_ILL_INSN);
2538         }
2539         break;
2540 
2541     case GET_ASI_SHORT:
2542         /* Valid for lddfa only.  */
2543         if (size == 8) {
2544             gen_address_mask(dc, addr);
2545             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2546         } else {
2547             gen_exception(dc, TT_ILL_INSN);
2548         }
2549         break;
2550 
2551     default:
2552         {
2553             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2554             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2555 
2556             save_state(dc);
2557             /* According to the table in the UA2011 manual, the only
2558                other asis that are valid for ldfa/lddfa/ldqfa are
2559                the NO_FAULT asis.  We still need a helper for these,
2560                but we can just use the integer asi helper for them.  */
2561             switch (size) {
2562             case 4:
2563                 d64 = tcg_temp_new_i64();
2564                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2565                 d32 = gen_dest_fpr_F(dc);
2566                 tcg_gen_extrl_i64_i32(d32, d64);
2567                 tcg_temp_free_i64(d64);
2568                 gen_store_fpr_F(dc, rd, d32);
2569                 break;
2570             case 8:
2571                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2572                 break;
2573             case 16:
2574                 d64 = tcg_temp_new_i64();
2575                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2576                 tcg_gen_addi_tl(addr, addr, 8);
2577                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2578                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2579                 tcg_temp_free_i64(d64);
2580                 break;
2581             default:
2582                 g_assert_not_reached();
2583             }
2584             tcg_temp_free_i32(r_mop);
2585             tcg_temp_free_i32(r_asi);
2586         }
2587         break;
2588     }
2589 }
2590 
2591 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2592                         int insn, int size, int rd)
2593 {
2594     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2595     TCGv_i32 d32;
2596 
2597     switch (da.type) {
2598     case GET_ASI_EXCP:
2599         break;
2600 
2601     case GET_ASI_DIRECT:
2602         gen_address_mask(dc, addr);
2603         switch (size) {
2604         case 4:
2605             d32 = gen_load_fpr_F(dc, rd);
2606             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2607             break;
2608         case 8:
2609             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2610                                 da.memop | MO_ALIGN_4);
2611             break;
2612         case 16:
2613             /* Only 4-byte alignment required.  However, it is legal for the
2614                cpu to signal the alignment fault, and the OS trap handler is
2615                required to fix it up.  Requiring 16-byte alignment here avoids
2616                having to probe the second page before performing the first
2617                write.  */
2618             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2619                                 da.memop | MO_ALIGN_16);
2620             tcg_gen_addi_tl(addr, addr, 8);
2621             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2622             break;
2623         default:
2624             g_assert_not_reached();
2625         }
2626         break;
2627 
2628     case GET_ASI_BLOCK:
2629         /* Valid for stdfa on aligned registers only.  */
2630         if (size == 8 && (rd & 7) == 0) {
2631             TCGMemOp memop;
2632             TCGv eight;
2633             int i;
2634 
2635             gen_address_mask(dc, addr);
2636 
2637             /* The first operation checks required alignment.  */
2638             memop = da.memop | MO_ALIGN_64;
2639             eight = tcg_const_tl(8);
2640             for (i = 0; ; ++i) {
2641                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2642                                     da.mem_idx, memop);
2643                 if (i == 7) {
2644                     break;
2645                 }
2646                 tcg_gen_add_tl(addr, addr, eight);
2647                 memop = da.memop;
2648             }
2649             tcg_temp_free(eight);
2650         } else {
2651             gen_exception(dc, TT_ILL_INSN);
2652         }
2653         break;
2654 
2655     case GET_ASI_SHORT:
2656         /* Valid for stdfa only.  */
2657         if (size == 8) {
2658             gen_address_mask(dc, addr);
2659             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2660         } else {
2661             gen_exception(dc, TT_ILL_INSN);
2662         }
2663         break;
2664 
2665     default:
2666         /* According to the table in the UA2011 manual, the only
2667            other asis that are valid for ldfa/lddfa/ldqfa are
2668            the PST* asis, which aren't currently handled.  */
2669         gen_exception(dc, TT_ILL_INSN);
2670         break;
2671     }
2672 }
2673 
2674 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2675 {
2676     DisasASI da = get_asi(dc, insn, MO_TEQ);
2677     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2678     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2679 
2680     switch (da.type) {
2681     case GET_ASI_EXCP:
2682         return;
2683 
2684     case GET_ASI_DTWINX:
2685         gen_address_mask(dc, addr);
2686         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2687         tcg_gen_addi_tl(addr, addr, 8);
2688         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2689         break;
2690 
2691     case GET_ASI_DIRECT:
2692         {
2693             TCGv_i64 tmp = tcg_temp_new_i64();
2694 
2695             gen_address_mask(dc, addr);
2696             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2697 
2698             /* Note that LE ldda acts as if each 32-bit register
2699                result is byte swapped.  Having just performed one
2700                64-bit bswap, we need now to swap the writebacks.  */
2701             if ((da.memop & MO_BSWAP) == MO_TE) {
2702                 tcg_gen_extr32_i64(lo, hi, tmp);
2703             } else {
2704                 tcg_gen_extr32_i64(hi, lo, tmp);
2705             }
2706             tcg_temp_free_i64(tmp);
2707         }
2708         break;
2709 
2710     default:
2711         /* ??? In theory we've handled all of the ASIs that are valid
2712            for ldda, and this should raise DAE_invalid_asi.  However,
2713            real hardware allows others.  This can be seen with e.g.
2714            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2715         {
2716             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2717             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2718             TCGv_i64 tmp = tcg_temp_new_i64();
2719 
2720             save_state(dc);
2721             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2722             tcg_temp_free_i32(r_asi);
2723             tcg_temp_free_i32(r_mop);
2724 
2725             /* See above.  */
2726             if ((da.memop & MO_BSWAP) == MO_TE) {
2727                 tcg_gen_extr32_i64(lo, hi, tmp);
2728             } else {
2729                 tcg_gen_extr32_i64(hi, lo, tmp);
2730             }
2731             tcg_temp_free_i64(tmp);
2732         }
2733         break;
2734     }
2735 
2736     gen_store_gpr(dc, rd, hi);
2737     gen_store_gpr(dc, rd + 1, lo);
2738 }
2739 
2740 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2741                          int insn, int rd)
2742 {
2743     DisasASI da = get_asi(dc, insn, MO_TEQ);
2744     TCGv lo = gen_load_gpr(dc, rd + 1);
2745 
2746     switch (da.type) {
2747     case GET_ASI_EXCP:
2748         break;
2749 
2750     case GET_ASI_DTWINX:
2751         gen_address_mask(dc, addr);
2752         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2753         tcg_gen_addi_tl(addr, addr, 8);
2754         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2755         break;
2756 
2757     case GET_ASI_DIRECT:
2758         {
2759             TCGv_i64 t64 = tcg_temp_new_i64();
2760 
2761             /* Note that LE stda acts as if each 32-bit register result is
2762                byte swapped.  We will perform one 64-bit LE store, so now
2763                we must swap the order of the construction.  */
2764             if ((da.memop & MO_BSWAP) == MO_TE) {
2765                 tcg_gen_concat32_i64(t64, lo, hi);
2766             } else {
2767                 tcg_gen_concat32_i64(t64, hi, lo);
2768             }
2769             gen_address_mask(dc, addr);
2770             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2771             tcg_temp_free_i64(t64);
2772         }
2773         break;
2774 
2775     default:
2776         /* ??? In theory we've handled all of the ASIs that are valid
2777            for stda, and this should raise DAE_invalid_asi.  */
2778         {
2779             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2780             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2781             TCGv_i64 t64 = tcg_temp_new_i64();
2782 
2783             /* See above.  */
2784             if ((da.memop & MO_BSWAP) == MO_TE) {
2785                 tcg_gen_concat32_i64(t64, lo, hi);
2786             } else {
2787                 tcg_gen_concat32_i64(t64, hi, lo);
2788             }
2789 
2790             save_state(dc);
2791             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2792             tcg_temp_free_i32(r_mop);
2793             tcg_temp_free_i32(r_asi);
2794             tcg_temp_free_i64(t64);
2795         }
2796         break;
2797     }
2798 }
2799 
2800 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2801                          int insn, int rd)
2802 {
2803     DisasASI da = get_asi(dc, insn, MO_TEQ);
2804     TCGv oldv;
2805 
2806     switch (da.type) {
2807     case GET_ASI_EXCP:
2808         return;
2809     case GET_ASI_DIRECT:
2810         oldv = tcg_temp_new();
2811         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2812                                   da.mem_idx, da.memop);
2813         gen_store_gpr(dc, rd, oldv);
2814         tcg_temp_free(oldv);
2815         break;
2816     default:
2817         /* ??? Should be DAE_invalid_asi.  */
2818         gen_exception(dc, TT_DATA_ACCESS);
2819         break;
2820     }
2821 }
2822 
2823 #elif !defined(CONFIG_USER_ONLY)
2824 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2825 {
2826     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2827        whereby "rd + 1" elicits "error: array subscript is above array".
2828        Since we have already asserted that rd is even, the semantics
2829        are unchanged.  */
2830     TCGv lo = gen_dest_gpr(dc, rd | 1);
2831     TCGv hi = gen_dest_gpr(dc, rd);
2832     TCGv_i64 t64 = tcg_temp_new_i64();
2833     DisasASI da = get_asi(dc, insn, MO_TEQ);
2834 
2835     switch (da.type) {
2836     case GET_ASI_EXCP:
2837         tcg_temp_free_i64(t64);
2838         return;
2839     case GET_ASI_DIRECT:
2840         gen_address_mask(dc, addr);
2841         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2842         break;
2843     default:
2844         {
2845             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2846             TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2847 
2848             save_state(dc);
2849             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2850             tcg_temp_free_i32(r_mop);
2851             tcg_temp_free_i32(r_asi);
2852         }
2853         break;
2854     }
2855 
2856     tcg_gen_extr_i64_i32(lo, hi, t64);
2857     tcg_temp_free_i64(t64);
2858     gen_store_gpr(dc, rd | 1, lo);
2859     gen_store_gpr(dc, rd, hi);
2860 }
2861 
2862 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2863                          int insn, int rd)
2864 {
2865     DisasASI da = get_asi(dc, insn, MO_TEQ);
2866     TCGv lo = gen_load_gpr(dc, rd + 1);
2867     TCGv_i64 t64 = tcg_temp_new_i64();
2868 
2869     tcg_gen_concat_tl_i64(t64, lo, hi);
2870 
2871     switch (da.type) {
2872     case GET_ASI_EXCP:
2873         break;
2874     case GET_ASI_DIRECT:
2875         gen_address_mask(dc, addr);
2876         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2877         break;
2878     case GET_ASI_BFILL:
2879         /* Store 32 bytes of T64 to ADDR.  */
2880         /* ??? The original qemu code suggests 8-byte alignment, dropping
2881            the low bits, but the only place I can see this used is in the
2882            Linux kernel with 32 byte alignment, which would make more sense
2883            as a cacheline-style operation.  */
2884         {
2885             TCGv d_addr = tcg_temp_new();
2886             TCGv eight = tcg_const_tl(8);
2887             int i;
2888 
2889             tcg_gen_andi_tl(d_addr, addr, -8);
2890             for (i = 0; i < 32; i += 8) {
2891                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2892                 tcg_gen_add_tl(d_addr, d_addr, eight);
2893             }
2894 
2895             tcg_temp_free(d_addr);
2896             tcg_temp_free(eight);
2897         }
2898         break;
2899     default:
2900         {
2901             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2902             TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2903 
2904             save_state(dc);
2905             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2906             tcg_temp_free_i32(r_mop);
2907             tcg_temp_free_i32(r_asi);
2908         }
2909         break;
2910     }
2911 
2912     tcg_temp_free_i64(t64);
2913 }
2914 #endif
2915 
2916 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2917 {
2918     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2919     return gen_load_gpr(dc, rs1);
2920 }
2921 
2922 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2923 {
2924     if (IS_IMM) { /* immediate */
2925         target_long simm = GET_FIELDs(insn, 19, 31);
2926         TCGv t = get_temp_tl(dc);
2927         tcg_gen_movi_tl(t, simm);
2928         return t;
2929     } else {      /* register */
2930         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2931         return gen_load_gpr(dc, rs2);
2932     }
2933 }
2934 
2935 #ifdef TARGET_SPARC64
2936 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2937 {
2938     TCGv_i32 c32, zero, dst, s1, s2;
2939 
2940     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2941        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2942        the later.  */
2943     c32 = tcg_temp_new_i32();
2944     if (cmp->is_bool) {
2945         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2946     } else {
2947         TCGv_i64 c64 = tcg_temp_new_i64();
2948         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2949         tcg_gen_extrl_i64_i32(c32, c64);
2950         tcg_temp_free_i64(c64);
2951     }
2952 
2953     s1 = gen_load_fpr_F(dc, rs);
2954     s2 = gen_load_fpr_F(dc, rd);
2955     dst = gen_dest_fpr_F(dc);
2956     zero = tcg_const_i32(0);
2957 
2958     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2959 
2960     tcg_temp_free_i32(c32);
2961     tcg_temp_free_i32(zero);
2962     gen_store_fpr_F(dc, rd, dst);
2963 }
2964 
2965 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2966 {
2967     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2968     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2969                         gen_load_fpr_D(dc, rs),
2970                         gen_load_fpr_D(dc, rd));
2971     gen_store_fpr_D(dc, rd, dst);
2972 }
2973 
2974 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2975 {
2976     int qd = QFPREG(rd);
2977     int qs = QFPREG(rs);
2978 
2979     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2980                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2981     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2982                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2983 
2984     gen_update_fprs_dirty(dc, qd);
2985 }
2986 
2987 #ifndef CONFIG_USER_ONLY
2988 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2989 {
2990     TCGv_i32 r_tl = tcg_temp_new_i32();
2991 
2992     /* load env->tl into r_tl */
2993     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2994 
2995     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2996     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2997 
2998     /* calculate offset to current trap state from env->ts, reuse r_tl */
2999     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
3000     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
3001 
3002     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
3003     {
3004         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
3005         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
3006         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
3007         tcg_temp_free_ptr(r_tl_tmp);
3008     }
3009 
3010     tcg_temp_free_i32(r_tl);
3011 }
3012 #endif
3013 
3014 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
3015                      int width, bool cc, bool left)
3016 {
3017     TCGv lo1, lo2, t1, t2;
3018     uint64_t amask, tabl, tabr;
3019     int shift, imask, omask;
3020 
3021     if (cc) {
3022         tcg_gen_mov_tl(cpu_cc_src, s1);
3023         tcg_gen_mov_tl(cpu_cc_src2, s2);
3024         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
3025         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3026         dc->cc_op = CC_OP_SUB;
3027     }
3028 
3029     /* Theory of operation: there are two tables, left and right (not to
3030        be confused with the left and right versions of the opcode).  These
3031        are indexed by the low 3 bits of the inputs.  To make things "easy",
3032        these tables are loaded into two constants, TABL and TABR below.
3033        The operation index = (input & imask) << shift calculates the index
3034        into the constant, while val = (table >> index) & omask calculates
3035        the value we're looking for.  */
3036     switch (width) {
3037     case 8:
3038         imask = 0x7;
3039         shift = 3;
3040         omask = 0xff;
3041         if (left) {
3042             tabl = 0x80c0e0f0f8fcfeffULL;
3043             tabr = 0xff7f3f1f0f070301ULL;
3044         } else {
3045             tabl = 0x0103070f1f3f7fffULL;
3046             tabr = 0xfffefcf8f0e0c080ULL;
3047         }
3048         break;
3049     case 16:
3050         imask = 0x6;
3051         shift = 1;
3052         omask = 0xf;
3053         if (left) {
3054             tabl = 0x8cef;
3055             tabr = 0xf731;
3056         } else {
3057             tabl = 0x137f;
3058             tabr = 0xfec8;
3059         }
3060         break;
3061     case 32:
3062         imask = 0x4;
3063         shift = 0;
3064         omask = 0x3;
3065         if (left) {
3066             tabl = (2 << 2) | 3;
3067             tabr = (3 << 2) | 1;
3068         } else {
3069             tabl = (1 << 2) | 3;
3070             tabr = (3 << 2) | 2;
3071         }
3072         break;
3073     default:
3074         abort();
3075     }
3076 
3077     lo1 = tcg_temp_new();
3078     lo2 = tcg_temp_new();
3079     tcg_gen_andi_tl(lo1, s1, imask);
3080     tcg_gen_andi_tl(lo2, s2, imask);
3081     tcg_gen_shli_tl(lo1, lo1, shift);
3082     tcg_gen_shli_tl(lo2, lo2, shift);
3083 
3084     t1 = tcg_const_tl(tabl);
3085     t2 = tcg_const_tl(tabr);
3086     tcg_gen_shr_tl(lo1, t1, lo1);
3087     tcg_gen_shr_tl(lo2, t2, lo2);
3088     tcg_gen_andi_tl(dst, lo1, omask);
3089     tcg_gen_andi_tl(lo2, lo2, omask);
3090 
3091     amask = -8;
3092     if (AM_CHECK(dc)) {
3093         amask &= 0xffffffffULL;
3094     }
3095     tcg_gen_andi_tl(s1, s1, amask);
3096     tcg_gen_andi_tl(s2, s2, amask);
3097 
3098     /* We want to compute
3099         dst = (s1 == s2 ? lo1 : lo1 & lo2).
3100        We've already done dst = lo1, so this reduces to
3101         dst &= (s1 == s2 ? -1 : lo2)
3102        Which we perform by
3103         lo2 |= -(s1 == s2)
3104         dst &= lo2
3105     */
3106     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
3107     tcg_gen_neg_tl(t1, t1);
3108     tcg_gen_or_tl(lo2, lo2, t1);
3109     tcg_gen_and_tl(dst, dst, lo2);
3110 
3111     tcg_temp_free(lo1);
3112     tcg_temp_free(lo2);
3113     tcg_temp_free(t1);
3114     tcg_temp_free(t2);
3115 }
3116 
3117 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
3118 {
3119     TCGv tmp = tcg_temp_new();
3120 
3121     tcg_gen_add_tl(tmp, s1, s2);
3122     tcg_gen_andi_tl(dst, tmp, -8);
3123     if (left) {
3124         tcg_gen_neg_tl(tmp, tmp);
3125     }
3126     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
3127 
3128     tcg_temp_free(tmp);
3129 }
3130 
3131 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
3132 {
3133     TCGv t1, t2, shift;
3134 
3135     t1 = tcg_temp_new();
3136     t2 = tcg_temp_new();
3137     shift = tcg_temp_new();
3138 
3139     tcg_gen_andi_tl(shift, gsr, 7);
3140     tcg_gen_shli_tl(shift, shift, 3);
3141     tcg_gen_shl_tl(t1, s1, shift);
3142 
3143     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
3144        shift of (up to 63) followed by a constant shift of 1.  */
3145     tcg_gen_xori_tl(shift, shift, 63);
3146     tcg_gen_shr_tl(t2, s2, shift);
3147     tcg_gen_shri_tl(t2, t2, 1);
3148 
3149     tcg_gen_or_tl(dst, t1, t2);
3150 
3151     tcg_temp_free(t1);
3152     tcg_temp_free(t2);
3153     tcg_temp_free(shift);
3154 }
3155 #endif
3156 
3157 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3158     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3159         goto illegal_insn;
3160 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3161     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3162         goto nfpu_insn;
3163 
3164 /* before an instruction, dc->pc must be static */
3165 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3166 {
3167     unsigned int opc, rs1, rs2, rd;
3168     TCGv cpu_src1, cpu_src2;
3169     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3170     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3171     target_long simm;
3172 
3173     opc = GET_FIELD(insn, 0, 1);
3174     rd = GET_FIELD(insn, 2, 6);
3175 
3176     switch (opc) {
3177     case 0:                     /* branches/sethi */
3178         {
3179             unsigned int xop = GET_FIELD(insn, 7, 9);
3180             int32_t target;
3181             switch (xop) {
3182 #ifdef TARGET_SPARC64
3183             case 0x1:           /* V9 BPcc */
3184                 {
3185                     int cc;
3186 
3187                     target = GET_FIELD_SP(insn, 0, 18);
3188                     target = sign_extend(target, 19);
3189                     target <<= 2;
3190                     cc = GET_FIELD_SP(insn, 20, 21);
3191                     if (cc == 0)
3192                         do_branch(dc, target, insn, 0);
3193                     else if (cc == 2)
3194                         do_branch(dc, target, insn, 1);
3195                     else
3196                         goto illegal_insn;
3197                     goto jmp_insn;
3198                 }
3199             case 0x3:           /* V9 BPr */
3200                 {
3201                     target = GET_FIELD_SP(insn, 0, 13) |
3202                         (GET_FIELD_SP(insn, 20, 21) << 14);
3203                     target = sign_extend(target, 16);
3204                     target <<= 2;
3205                     cpu_src1 = get_src1(dc, insn);
3206                     do_branch_reg(dc, target, insn, cpu_src1);
3207                     goto jmp_insn;
3208                 }
3209             case 0x5:           /* V9 FBPcc */
3210                 {
3211                     int cc = GET_FIELD_SP(insn, 20, 21);
3212                     if (gen_trap_ifnofpu(dc)) {
3213                         goto jmp_insn;
3214                     }
3215                     target = GET_FIELD_SP(insn, 0, 18);
3216                     target = sign_extend(target, 19);
3217                     target <<= 2;
3218                     do_fbranch(dc, target, insn, cc);
3219                     goto jmp_insn;
3220                 }
3221 #else
3222             case 0x7:           /* CBN+x */
3223                 {
3224                     goto ncp_insn;
3225                 }
3226 #endif
3227             case 0x2:           /* BN+x */
3228                 {
3229                     target = GET_FIELD(insn, 10, 31);
3230                     target = sign_extend(target, 22);
3231                     target <<= 2;
3232                     do_branch(dc, target, insn, 0);
3233                     goto jmp_insn;
3234                 }
3235             case 0x6:           /* FBN+x */
3236                 {
3237                     if (gen_trap_ifnofpu(dc)) {
3238                         goto jmp_insn;
3239                     }
3240                     target = GET_FIELD(insn, 10, 31);
3241                     target = sign_extend(target, 22);
3242                     target <<= 2;
3243                     do_fbranch(dc, target, insn, 0);
3244                     goto jmp_insn;
3245                 }
3246             case 0x4:           /* SETHI */
3247                 /* Special-case %g0 because that's the canonical nop.  */
3248                 if (rd) {
3249                     uint32_t value = GET_FIELD(insn, 10, 31);
3250                     TCGv t = gen_dest_gpr(dc, rd);
3251                     tcg_gen_movi_tl(t, value << 10);
3252                     gen_store_gpr(dc, rd, t);
3253                 }
3254                 break;
3255             case 0x0:           /* UNIMPL */
3256             default:
3257                 goto illegal_insn;
3258             }
3259             break;
3260         }
3261         break;
3262     case 1:                     /*CALL*/
3263         {
3264             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3265             TCGv o7 = gen_dest_gpr(dc, 15);
3266 
3267             tcg_gen_movi_tl(o7, dc->pc);
3268             gen_store_gpr(dc, 15, o7);
3269             target += dc->pc;
3270             gen_mov_pc_npc(dc);
3271 #ifdef TARGET_SPARC64
3272             if (unlikely(AM_CHECK(dc))) {
3273                 target &= 0xffffffffULL;
3274             }
3275 #endif
3276             dc->npc = target;
3277         }
3278         goto jmp_insn;
3279     case 2:                     /* FPU & Logical Operations */
3280         {
3281             unsigned int xop = GET_FIELD(insn, 7, 12);
3282             TCGv cpu_dst = get_temp_tl(dc);
3283             TCGv cpu_tmp0;
3284 
3285             if (xop == 0x3a) {  /* generate trap */
3286                 int cond = GET_FIELD(insn, 3, 6);
3287                 TCGv_i32 trap;
3288                 TCGLabel *l1 = NULL;
3289                 int mask;
3290 
3291                 if (cond == 0) {
3292                     /* Trap never.  */
3293                     break;
3294                 }
3295 
3296                 save_state(dc);
3297 
3298                 if (cond != 8) {
3299                     /* Conditional trap.  */
3300                     DisasCompare cmp;
3301 #ifdef TARGET_SPARC64
3302                     /* V9 icc/xcc */
3303                     int cc = GET_FIELD_SP(insn, 11, 12);
3304                     if (cc == 0) {
3305                         gen_compare(&cmp, 0, cond, dc);
3306                     } else if (cc == 2) {
3307                         gen_compare(&cmp, 1, cond, dc);
3308                     } else {
3309                         goto illegal_insn;
3310                     }
3311 #else
3312                     gen_compare(&cmp, 0, cond, dc);
3313 #endif
3314                     l1 = gen_new_label();
3315                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3316                                       cmp.c1, cmp.c2, l1);
3317                     free_compare(&cmp);
3318                 }
3319 
3320                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3321                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3322 
3323                 /* Don't use the normal temporaries, as they may well have
3324                    gone out of scope with the branch above.  While we're
3325                    doing that we might as well pre-truncate to 32-bit.  */
3326                 trap = tcg_temp_new_i32();
3327 
3328                 rs1 = GET_FIELD_SP(insn, 14, 18);
3329                 if (IS_IMM) {
3330                     rs2 = GET_FIELD_SP(insn, 0, 7);
3331                     if (rs1 == 0) {
3332                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3333                         /* Signal that the trap value is fully constant.  */
3334                         mask = 0;
3335                     } else {
3336                         TCGv t1 = gen_load_gpr(dc, rs1);
3337                         tcg_gen_trunc_tl_i32(trap, t1);
3338                         tcg_gen_addi_i32(trap, trap, rs2);
3339                     }
3340                 } else {
3341                     TCGv t1, t2;
3342                     rs2 = GET_FIELD_SP(insn, 0, 4);
3343                     t1 = gen_load_gpr(dc, rs1);
3344                     t2 = gen_load_gpr(dc, rs2);
3345                     tcg_gen_add_tl(t1, t1, t2);
3346                     tcg_gen_trunc_tl_i32(trap, t1);
3347                 }
3348                 if (mask != 0) {
3349                     tcg_gen_andi_i32(trap, trap, mask);
3350                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3351                 }
3352 
3353                 gen_helper_raise_exception(cpu_env, trap);
3354                 tcg_temp_free_i32(trap);
3355 
3356                 if (cond == 8) {
3357                     /* An unconditional trap ends the TB.  */
3358                     dc->is_br = 1;
3359                     goto jmp_insn;
3360                 } else {
3361                     /* A conditional trap falls through to the next insn.  */
3362                     gen_set_label(l1);
3363                     break;
3364                 }
3365             } else if (xop == 0x28) {
3366                 rs1 = GET_FIELD(insn, 13, 17);
3367                 switch(rs1) {
3368                 case 0: /* rdy */
3369 #ifndef TARGET_SPARC64
3370                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3371                                        manual, rdy on the microSPARC
3372                                        II */
3373                 case 0x0f:          /* stbar in the SPARCv8 manual,
3374                                        rdy on the microSPARC II */
3375                 case 0x10 ... 0x1f: /* implementation-dependent in the
3376                                        SPARCv8 manual, rdy on the
3377                                        microSPARC II */
3378                     /* Read Asr17 */
3379                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3380                         TCGv t = gen_dest_gpr(dc, rd);
3381                         /* Read Asr17 for a Leon3 monoprocessor */
3382                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3383                         gen_store_gpr(dc, rd, t);
3384                         break;
3385                     }
3386 #endif
3387                     gen_store_gpr(dc, rd, cpu_y);
3388                     break;
3389 #ifdef TARGET_SPARC64
3390                 case 0x2: /* V9 rdccr */
3391                     update_psr(dc);
3392                     gen_helper_rdccr(cpu_dst, cpu_env);
3393                     gen_store_gpr(dc, rd, cpu_dst);
3394                     break;
3395                 case 0x3: /* V9 rdasi */
3396                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3397                     gen_store_gpr(dc, rd, cpu_dst);
3398                     break;
3399                 case 0x4: /* V9 rdtick */
3400                     {
3401                         TCGv_ptr r_tickptr;
3402                         TCGv_i32 r_const;
3403 
3404                         r_tickptr = tcg_temp_new_ptr();
3405                         r_const = tcg_const_i32(dc->mem_idx);
3406                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3407                                        offsetof(CPUSPARCState, tick));
3408                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3409                                                   r_const);
3410                         tcg_temp_free_ptr(r_tickptr);
3411                         tcg_temp_free_i32(r_const);
3412                         gen_store_gpr(dc, rd, cpu_dst);
3413                     }
3414                     break;
3415                 case 0x5: /* V9 rdpc */
3416                     {
3417                         TCGv t = gen_dest_gpr(dc, rd);
3418                         if (unlikely(AM_CHECK(dc))) {
3419                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3420                         } else {
3421                             tcg_gen_movi_tl(t, dc->pc);
3422                         }
3423                         gen_store_gpr(dc, rd, t);
3424                     }
3425                     break;
3426                 case 0x6: /* V9 rdfprs */
3427                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3428                     gen_store_gpr(dc, rd, cpu_dst);
3429                     break;
3430                 case 0xf: /* V9 membar */
3431                     break; /* no effect */
3432                 case 0x13: /* Graphics Status */
3433                     if (gen_trap_ifnofpu(dc)) {
3434                         goto jmp_insn;
3435                     }
3436                     gen_store_gpr(dc, rd, cpu_gsr);
3437                     break;
3438                 case 0x16: /* Softint */
3439                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3440                                      offsetof(CPUSPARCState, softint));
3441                     gen_store_gpr(dc, rd, cpu_dst);
3442                     break;
3443                 case 0x17: /* Tick compare */
3444                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3445                     break;
3446                 case 0x18: /* System tick */
3447                     {
3448                         TCGv_ptr r_tickptr;
3449                         TCGv_i32 r_const;
3450 
3451                         r_tickptr = tcg_temp_new_ptr();
3452                         r_const = tcg_const_i32(dc->mem_idx);
3453                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3454                                        offsetof(CPUSPARCState, stick));
3455                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3456                                                   r_const);
3457                         tcg_temp_free_ptr(r_tickptr);
3458                         tcg_temp_free_i32(r_const);
3459                         gen_store_gpr(dc, rd, cpu_dst);
3460                     }
3461                     break;
3462                 case 0x19: /* System tick compare */
3463                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3464                     break;
3465                 case 0x1a: /* UltraSPARC-T1 Strand status */
3466                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3467                      * this ASR as impl. dep
3468                      */
3469                     CHECK_IU_FEATURE(dc, HYPV);
3470                     {
3471                         TCGv t = gen_dest_gpr(dc, rd);
3472                         tcg_gen_movi_tl(t, 1UL);
3473                         gen_store_gpr(dc, rd, t);
3474                     }
3475                     break;
3476                 case 0x10: /* Performance Control */
3477                 case 0x11: /* Performance Instrumentation Counter */
3478                 case 0x12: /* Dispatch Control */
3479                 case 0x14: /* Softint set, WO */
3480                 case 0x15: /* Softint clear, WO */
3481 #endif
3482                 default:
3483                     goto illegal_insn;
3484                 }
3485 #if !defined(CONFIG_USER_ONLY)
3486             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3487 #ifndef TARGET_SPARC64
3488                 if (!supervisor(dc)) {
3489                     goto priv_insn;
3490                 }
3491                 update_psr(dc);
3492                 gen_helper_rdpsr(cpu_dst, cpu_env);
3493 #else
3494                 CHECK_IU_FEATURE(dc, HYPV);
3495                 if (!hypervisor(dc))
3496                     goto priv_insn;
3497                 rs1 = GET_FIELD(insn, 13, 17);
3498                 switch (rs1) {
3499                 case 0: // hpstate
3500                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3501                                    offsetof(CPUSPARCState, hpstate));
3502                     break;
3503                 case 1: // htstate
3504                     // gen_op_rdhtstate();
3505                     break;
3506                 case 3: // hintp
3507                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3508                     break;
3509                 case 5: // htba
3510                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3511                     break;
3512                 case 6: // hver
3513                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3514                     break;
3515                 case 31: // hstick_cmpr
3516                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3517                     break;
3518                 default:
3519                     goto illegal_insn;
3520                 }
3521 #endif
3522                 gen_store_gpr(dc, rd, cpu_dst);
3523                 break;
3524             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3525                 if (!supervisor(dc)) {
3526                     goto priv_insn;
3527                 }
3528                 cpu_tmp0 = get_temp_tl(dc);
3529 #ifdef TARGET_SPARC64
3530                 rs1 = GET_FIELD(insn, 13, 17);
3531                 switch (rs1) {
3532                 case 0: // tpc
3533                     {
3534                         TCGv_ptr r_tsptr;
3535 
3536                         r_tsptr = tcg_temp_new_ptr();
3537                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3538                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3539                                       offsetof(trap_state, tpc));
3540                         tcg_temp_free_ptr(r_tsptr);
3541                     }
3542                     break;
3543                 case 1: // tnpc
3544                     {
3545                         TCGv_ptr r_tsptr;
3546 
3547                         r_tsptr = tcg_temp_new_ptr();
3548                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3549                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3550                                       offsetof(trap_state, tnpc));
3551                         tcg_temp_free_ptr(r_tsptr);
3552                     }
3553                     break;
3554                 case 2: // tstate
3555                     {
3556                         TCGv_ptr r_tsptr;
3557 
3558                         r_tsptr = tcg_temp_new_ptr();
3559                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3560                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3561                                       offsetof(trap_state, tstate));
3562                         tcg_temp_free_ptr(r_tsptr);
3563                     }
3564                     break;
3565                 case 3: // tt
3566                     {
3567                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3568 
3569                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3570                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3571                                          offsetof(trap_state, tt));
3572                         tcg_temp_free_ptr(r_tsptr);
3573                     }
3574                     break;
3575                 case 4: // tick
3576                     {
3577                         TCGv_ptr r_tickptr;
3578                         TCGv_i32 r_const;
3579 
3580                         r_tickptr = tcg_temp_new_ptr();
3581                         r_const = tcg_const_i32(dc->mem_idx);
3582                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3583                                        offsetof(CPUSPARCState, tick));
3584                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3585                                                   r_tickptr, r_const);
3586                         tcg_temp_free_ptr(r_tickptr);
3587                         tcg_temp_free_i32(r_const);
3588                     }
3589                     break;
3590                 case 5: // tba
3591                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3592                     break;
3593                 case 6: // pstate
3594                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3595                                      offsetof(CPUSPARCState, pstate));
3596                     break;
3597                 case 7: // tl
3598                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3599                                      offsetof(CPUSPARCState, tl));
3600                     break;
3601                 case 8: // pil
3602                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3603                                      offsetof(CPUSPARCState, psrpil));
3604                     break;
3605                 case 9: // cwp
3606                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3607                     break;
3608                 case 10: // cansave
3609                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3610                                      offsetof(CPUSPARCState, cansave));
3611                     break;
3612                 case 11: // canrestore
3613                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3614                                      offsetof(CPUSPARCState, canrestore));
3615                     break;
3616                 case 12: // cleanwin
3617                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3618                                      offsetof(CPUSPARCState, cleanwin));
3619                     break;
3620                 case 13: // otherwin
3621                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3622                                      offsetof(CPUSPARCState, otherwin));
3623                     break;
3624                 case 14: // wstate
3625                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3626                                      offsetof(CPUSPARCState, wstate));
3627                     break;
3628                 case 16: // UA2005 gl
3629                     CHECK_IU_FEATURE(dc, GL);
3630                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3631                                      offsetof(CPUSPARCState, gl));
3632                     break;
3633                 case 26: // UA2005 strand status
3634                     CHECK_IU_FEATURE(dc, HYPV);
3635                     if (!hypervisor(dc))
3636                         goto priv_insn;
3637                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3638                     break;
3639                 case 31: // ver
3640                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3641                     break;
3642                 case 15: // fq
3643                 default:
3644                     goto illegal_insn;
3645                 }
3646 #else
3647                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3648 #endif
3649                 gen_store_gpr(dc, rd, cpu_tmp0);
3650                 break;
3651             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3652 #ifdef TARGET_SPARC64
3653                 gen_helper_flushw(cpu_env);
3654 #else
3655                 if (!supervisor(dc))
3656                     goto priv_insn;
3657                 gen_store_gpr(dc, rd, cpu_tbr);
3658 #endif
3659                 break;
3660 #endif
3661             } else if (xop == 0x34) {   /* FPU Operations */
3662                 if (gen_trap_ifnofpu(dc)) {
3663                     goto jmp_insn;
3664                 }
3665                 gen_op_clear_ieee_excp_and_FTT();
3666                 rs1 = GET_FIELD(insn, 13, 17);
3667                 rs2 = GET_FIELD(insn, 27, 31);
3668                 xop = GET_FIELD(insn, 18, 26);
3669 
3670                 switch (xop) {
3671                 case 0x1: /* fmovs */
3672                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3673                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3674                     break;
3675                 case 0x5: /* fnegs */
3676                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3677                     break;
3678                 case 0x9: /* fabss */
3679                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3680                     break;
3681                 case 0x29: /* fsqrts */
3682                     CHECK_FPU_FEATURE(dc, FSQRT);
3683                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3684                     break;
3685                 case 0x2a: /* fsqrtd */
3686                     CHECK_FPU_FEATURE(dc, FSQRT);
3687                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3688                     break;
3689                 case 0x2b: /* fsqrtq */
3690                     CHECK_FPU_FEATURE(dc, FLOAT128);
3691                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3692                     break;
3693                 case 0x41: /* fadds */
3694                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3695                     break;
3696                 case 0x42: /* faddd */
3697                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3698                     break;
3699                 case 0x43: /* faddq */
3700                     CHECK_FPU_FEATURE(dc, FLOAT128);
3701                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3702                     break;
3703                 case 0x45: /* fsubs */
3704                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3705                     break;
3706                 case 0x46: /* fsubd */
3707                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3708                     break;
3709                 case 0x47: /* fsubq */
3710                     CHECK_FPU_FEATURE(dc, FLOAT128);
3711                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3712                     break;
3713                 case 0x49: /* fmuls */
3714                     CHECK_FPU_FEATURE(dc, FMUL);
3715                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3716                     break;
3717                 case 0x4a: /* fmuld */
3718                     CHECK_FPU_FEATURE(dc, FMUL);
3719                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3720                     break;
3721                 case 0x4b: /* fmulq */
3722                     CHECK_FPU_FEATURE(dc, FLOAT128);
3723                     CHECK_FPU_FEATURE(dc, FMUL);
3724                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3725                     break;
3726                 case 0x4d: /* fdivs */
3727                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3728                     break;
3729                 case 0x4e: /* fdivd */
3730                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3731                     break;
3732                 case 0x4f: /* fdivq */
3733                     CHECK_FPU_FEATURE(dc, FLOAT128);
3734                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3735                     break;
3736                 case 0x69: /* fsmuld */
3737                     CHECK_FPU_FEATURE(dc, FSMULD);
3738                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3739                     break;
3740                 case 0x6e: /* fdmulq */
3741                     CHECK_FPU_FEATURE(dc, FLOAT128);
3742                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3743                     break;
3744                 case 0xc4: /* fitos */
3745                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3746                     break;
3747                 case 0xc6: /* fdtos */
3748                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3749                     break;
3750                 case 0xc7: /* fqtos */
3751                     CHECK_FPU_FEATURE(dc, FLOAT128);
3752                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3753                     break;
3754                 case 0xc8: /* fitod */
3755                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3756                     break;
3757                 case 0xc9: /* fstod */
3758                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3759                     break;
3760                 case 0xcb: /* fqtod */
3761                     CHECK_FPU_FEATURE(dc, FLOAT128);
3762                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3763                     break;
3764                 case 0xcc: /* fitoq */
3765                     CHECK_FPU_FEATURE(dc, FLOAT128);
3766                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3767                     break;
3768                 case 0xcd: /* fstoq */
3769                     CHECK_FPU_FEATURE(dc, FLOAT128);
3770                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3771                     break;
3772                 case 0xce: /* fdtoq */
3773                     CHECK_FPU_FEATURE(dc, FLOAT128);
3774                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3775                     break;
3776                 case 0xd1: /* fstoi */
3777                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3778                     break;
3779                 case 0xd2: /* fdtoi */
3780                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3781                     break;
3782                 case 0xd3: /* fqtoi */
3783                     CHECK_FPU_FEATURE(dc, FLOAT128);
3784                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3785                     break;
3786 #ifdef TARGET_SPARC64
3787                 case 0x2: /* V9 fmovd */
3788                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3789                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3790                     break;
3791                 case 0x3: /* V9 fmovq */
3792                     CHECK_FPU_FEATURE(dc, FLOAT128);
3793                     gen_move_Q(dc, rd, rs2);
3794                     break;
3795                 case 0x6: /* V9 fnegd */
3796                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3797                     break;
3798                 case 0x7: /* V9 fnegq */
3799                     CHECK_FPU_FEATURE(dc, FLOAT128);
3800                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3801                     break;
3802                 case 0xa: /* V9 fabsd */
3803                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3804                     break;
3805                 case 0xb: /* V9 fabsq */
3806                     CHECK_FPU_FEATURE(dc, FLOAT128);
3807                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3808                     break;
3809                 case 0x81: /* V9 fstox */
3810                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3811                     break;
3812                 case 0x82: /* V9 fdtox */
3813                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3814                     break;
3815                 case 0x83: /* V9 fqtox */
3816                     CHECK_FPU_FEATURE(dc, FLOAT128);
3817                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3818                     break;
3819                 case 0x84: /* V9 fxtos */
3820                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3821                     break;
3822                 case 0x88: /* V9 fxtod */
3823                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3824                     break;
3825                 case 0x8c: /* V9 fxtoq */
3826                     CHECK_FPU_FEATURE(dc, FLOAT128);
3827                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3828                     break;
3829 #endif
3830                 default:
3831                     goto illegal_insn;
3832                 }
3833             } else if (xop == 0x35) {   /* FPU Operations */
3834 #ifdef TARGET_SPARC64
3835                 int cond;
3836 #endif
3837                 if (gen_trap_ifnofpu(dc)) {
3838                     goto jmp_insn;
3839                 }
3840                 gen_op_clear_ieee_excp_and_FTT();
3841                 rs1 = GET_FIELD(insn, 13, 17);
3842                 rs2 = GET_FIELD(insn, 27, 31);
3843                 xop = GET_FIELD(insn, 18, 26);
3844 
3845 #ifdef TARGET_SPARC64
3846 #define FMOVR(sz)                                                  \
3847                 do {                                               \
3848                     DisasCompare cmp;                              \
3849                     cond = GET_FIELD_SP(insn, 10, 12);             \
3850                     cpu_src1 = get_src1(dc, insn);                 \
3851                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3852                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3853                     free_compare(&cmp);                            \
3854                 } while (0)
3855 
3856                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3857                     FMOVR(s);
3858                     break;
3859                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3860                     FMOVR(d);
3861                     break;
3862                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3863                     CHECK_FPU_FEATURE(dc, FLOAT128);
3864                     FMOVR(q);
3865                     break;
3866                 }
3867 #undef FMOVR
3868 #endif
3869                 switch (xop) {
3870 #ifdef TARGET_SPARC64
3871 #define FMOVCC(fcc, sz)                                                 \
3872                     do {                                                \
3873                         DisasCompare cmp;                               \
3874                         cond = GET_FIELD_SP(insn, 14, 17);              \
3875                         gen_fcompare(&cmp, fcc, cond);                  \
3876                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3877                         free_compare(&cmp);                             \
3878                     } while (0)
3879 
3880                     case 0x001: /* V9 fmovscc %fcc0 */
3881                         FMOVCC(0, s);
3882                         break;
3883                     case 0x002: /* V9 fmovdcc %fcc0 */
3884                         FMOVCC(0, d);
3885                         break;
3886                     case 0x003: /* V9 fmovqcc %fcc0 */
3887                         CHECK_FPU_FEATURE(dc, FLOAT128);
3888                         FMOVCC(0, q);
3889                         break;
3890                     case 0x041: /* V9 fmovscc %fcc1 */
3891                         FMOVCC(1, s);
3892                         break;
3893                     case 0x042: /* V9 fmovdcc %fcc1 */
3894                         FMOVCC(1, d);
3895                         break;
3896                     case 0x043: /* V9 fmovqcc %fcc1 */
3897                         CHECK_FPU_FEATURE(dc, FLOAT128);
3898                         FMOVCC(1, q);
3899                         break;
3900                     case 0x081: /* V9 fmovscc %fcc2 */
3901                         FMOVCC(2, s);
3902                         break;
3903                     case 0x082: /* V9 fmovdcc %fcc2 */
3904                         FMOVCC(2, d);
3905                         break;
3906                     case 0x083: /* V9 fmovqcc %fcc2 */
3907                         CHECK_FPU_FEATURE(dc, FLOAT128);
3908                         FMOVCC(2, q);
3909                         break;
3910                     case 0x0c1: /* V9 fmovscc %fcc3 */
3911                         FMOVCC(3, s);
3912                         break;
3913                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3914                         FMOVCC(3, d);
3915                         break;
3916                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3917                         CHECK_FPU_FEATURE(dc, FLOAT128);
3918                         FMOVCC(3, q);
3919                         break;
3920 #undef FMOVCC
3921 #define FMOVCC(xcc, sz)                                                 \
3922                     do {                                                \
3923                         DisasCompare cmp;                               \
3924                         cond = GET_FIELD_SP(insn, 14, 17);              \
3925                         gen_compare(&cmp, xcc, cond, dc);               \
3926                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3927                         free_compare(&cmp);                             \
3928                     } while (0)
3929 
3930                     case 0x101: /* V9 fmovscc %icc */
3931                         FMOVCC(0, s);
3932                         break;
3933                     case 0x102: /* V9 fmovdcc %icc */
3934                         FMOVCC(0, d);
3935                         break;
3936                     case 0x103: /* V9 fmovqcc %icc */
3937                         CHECK_FPU_FEATURE(dc, FLOAT128);
3938                         FMOVCC(0, q);
3939                         break;
3940                     case 0x181: /* V9 fmovscc %xcc */
3941                         FMOVCC(1, s);
3942                         break;
3943                     case 0x182: /* V9 fmovdcc %xcc */
3944                         FMOVCC(1, d);
3945                         break;
3946                     case 0x183: /* V9 fmovqcc %xcc */
3947                         CHECK_FPU_FEATURE(dc, FLOAT128);
3948                         FMOVCC(1, q);
3949                         break;
3950 #undef FMOVCC
3951 #endif
3952                     case 0x51: /* fcmps, V9 %fcc */
3953                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3954                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3955                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3956                         break;
3957                     case 0x52: /* fcmpd, V9 %fcc */
3958                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3959                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3960                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3961                         break;
3962                     case 0x53: /* fcmpq, V9 %fcc */
3963                         CHECK_FPU_FEATURE(dc, FLOAT128);
3964                         gen_op_load_fpr_QT0(QFPREG(rs1));
3965                         gen_op_load_fpr_QT1(QFPREG(rs2));
3966                         gen_op_fcmpq(rd & 3);
3967                         break;
3968                     case 0x55: /* fcmpes, V9 %fcc */
3969                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3970                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3971                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3972                         break;
3973                     case 0x56: /* fcmped, V9 %fcc */
3974                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3975                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3976                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3977                         break;
3978                     case 0x57: /* fcmpeq, V9 %fcc */
3979                         CHECK_FPU_FEATURE(dc, FLOAT128);
3980                         gen_op_load_fpr_QT0(QFPREG(rs1));
3981                         gen_op_load_fpr_QT1(QFPREG(rs2));
3982                         gen_op_fcmpeq(rd & 3);
3983                         break;
3984                     default:
3985                         goto illegal_insn;
3986                 }
3987             } else if (xop == 0x2) {
3988                 TCGv dst = gen_dest_gpr(dc, rd);
3989                 rs1 = GET_FIELD(insn, 13, 17);
3990                 if (rs1 == 0) {
3991                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3992                     if (IS_IMM) {       /* immediate */
3993                         simm = GET_FIELDs(insn, 19, 31);
3994                         tcg_gen_movi_tl(dst, simm);
3995                         gen_store_gpr(dc, rd, dst);
3996                     } else {            /* register */
3997                         rs2 = GET_FIELD(insn, 27, 31);
3998                         if (rs2 == 0) {
3999                             tcg_gen_movi_tl(dst, 0);
4000                             gen_store_gpr(dc, rd, dst);
4001                         } else {
4002                             cpu_src2 = gen_load_gpr(dc, rs2);
4003                             gen_store_gpr(dc, rd, cpu_src2);
4004                         }
4005                     }
4006                 } else {
4007                     cpu_src1 = get_src1(dc, insn);
4008                     if (IS_IMM) {       /* immediate */
4009                         simm = GET_FIELDs(insn, 19, 31);
4010                         tcg_gen_ori_tl(dst, cpu_src1, simm);
4011                         gen_store_gpr(dc, rd, dst);
4012                     } else {            /* register */
4013                         rs2 = GET_FIELD(insn, 27, 31);
4014                         if (rs2 == 0) {
4015                             /* mov shortcut:  or x, %g0, y -> mov x, y */
4016                             gen_store_gpr(dc, rd, cpu_src1);
4017                         } else {
4018                             cpu_src2 = gen_load_gpr(dc, rs2);
4019                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
4020                             gen_store_gpr(dc, rd, dst);
4021                         }
4022                     }
4023                 }
4024 #ifdef TARGET_SPARC64
4025             } else if (xop == 0x25) { /* sll, V9 sllx */
4026                 cpu_src1 = get_src1(dc, insn);
4027                 if (IS_IMM) {   /* immediate */
4028                     simm = GET_FIELDs(insn, 20, 31);
4029                     if (insn & (1 << 12)) {
4030                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
4031                     } else {
4032                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
4033                     }
4034                 } else {                /* register */
4035                     rs2 = GET_FIELD(insn, 27, 31);
4036                     cpu_src2 = gen_load_gpr(dc, rs2);
4037                     cpu_tmp0 = get_temp_tl(dc);
4038                     if (insn & (1 << 12)) {
4039                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4040                     } else {
4041                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4042                     }
4043                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
4044                 }
4045                 gen_store_gpr(dc, rd, cpu_dst);
4046             } else if (xop == 0x26) { /* srl, V9 srlx */
4047                 cpu_src1 = get_src1(dc, insn);
4048                 if (IS_IMM) {   /* immediate */
4049                     simm = GET_FIELDs(insn, 20, 31);
4050                     if (insn & (1 << 12)) {
4051                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
4052                     } else {
4053                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4054                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
4055                     }
4056                 } else {                /* register */
4057                     rs2 = GET_FIELD(insn, 27, 31);
4058                     cpu_src2 = gen_load_gpr(dc, rs2);
4059                     cpu_tmp0 = get_temp_tl(dc);
4060                     if (insn & (1 << 12)) {
4061                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4062                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
4063                     } else {
4064                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4065                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4066                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
4067                     }
4068                 }
4069                 gen_store_gpr(dc, rd, cpu_dst);
4070             } else if (xop == 0x27) { /* sra, V9 srax */
4071                 cpu_src1 = get_src1(dc, insn);
4072                 if (IS_IMM) {   /* immediate */
4073                     simm = GET_FIELDs(insn, 20, 31);
4074                     if (insn & (1 << 12)) {
4075                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
4076                     } else {
4077                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4078                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
4079                     }
4080                 } else {                /* register */
4081                     rs2 = GET_FIELD(insn, 27, 31);
4082                     cpu_src2 = gen_load_gpr(dc, rs2);
4083                     cpu_tmp0 = get_temp_tl(dc);
4084                     if (insn & (1 << 12)) {
4085                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4086                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
4087                     } else {
4088                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4089                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4090                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
4091                     }
4092                 }
4093                 gen_store_gpr(dc, rd, cpu_dst);
4094 #endif
4095             } else if (xop < 0x36) {
4096                 if (xop < 0x20) {
4097                     cpu_src1 = get_src1(dc, insn);
4098                     cpu_src2 = get_src2(dc, insn);
4099                     switch (xop & ~0x10) {
4100                     case 0x0: /* add */
4101                         if (xop & 0x10) {
4102                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4103                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4104                             dc->cc_op = CC_OP_ADD;
4105                         } else {
4106                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4107                         }
4108                         break;
4109                     case 0x1: /* and */
4110                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
4111                         if (xop & 0x10) {
4112                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4113                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4114                             dc->cc_op = CC_OP_LOGIC;
4115                         }
4116                         break;
4117                     case 0x2: /* or */
4118                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4119                         if (xop & 0x10) {
4120                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4121                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4122                             dc->cc_op = CC_OP_LOGIC;
4123                         }
4124                         break;
4125                     case 0x3: /* xor */
4126                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4127                         if (xop & 0x10) {
4128                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4129                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4130                             dc->cc_op = CC_OP_LOGIC;
4131                         }
4132                         break;
4133                     case 0x4: /* sub */
4134                         if (xop & 0x10) {
4135                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4136                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4137                             dc->cc_op = CC_OP_SUB;
4138                         } else {
4139                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4140                         }
4141                         break;
4142                     case 0x5: /* andn */
4143                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4144                         if (xop & 0x10) {
4145                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4146                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4147                             dc->cc_op = CC_OP_LOGIC;
4148                         }
4149                         break;
4150                     case 0x6: /* orn */
4151                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4152                         if (xop & 0x10) {
4153                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4154                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4155                             dc->cc_op = CC_OP_LOGIC;
4156                         }
4157                         break;
4158                     case 0x7: /* xorn */
4159                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4160                         if (xop & 0x10) {
4161                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4162                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4163                             dc->cc_op = CC_OP_LOGIC;
4164                         }
4165                         break;
4166                     case 0x8: /* addx, V9 addc */
4167                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4168                                         (xop & 0x10));
4169                         break;
4170 #ifdef TARGET_SPARC64
4171                     case 0x9: /* V9 mulx */
4172                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4173                         break;
4174 #endif
4175                     case 0xa: /* umul */
4176                         CHECK_IU_FEATURE(dc, MUL);
4177                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4178                         if (xop & 0x10) {
4179                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4180                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4181                             dc->cc_op = CC_OP_LOGIC;
4182                         }
4183                         break;
4184                     case 0xb: /* smul */
4185                         CHECK_IU_FEATURE(dc, MUL);
4186                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4187                         if (xop & 0x10) {
4188                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4189                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4190                             dc->cc_op = CC_OP_LOGIC;
4191                         }
4192                         break;
4193                     case 0xc: /* subx, V9 subc */
4194                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4195                                         (xop & 0x10));
4196                         break;
4197 #ifdef TARGET_SPARC64
4198                     case 0xd: /* V9 udivx */
4199                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4200                         break;
4201 #endif
4202                     case 0xe: /* udiv */
4203                         CHECK_IU_FEATURE(dc, DIV);
4204                         if (xop & 0x10) {
4205                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4206                                                cpu_src2);
4207                             dc->cc_op = CC_OP_DIV;
4208                         } else {
4209                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4210                                             cpu_src2);
4211                         }
4212                         break;
4213                     case 0xf: /* sdiv */
4214                         CHECK_IU_FEATURE(dc, DIV);
4215                         if (xop & 0x10) {
4216                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4217                                                cpu_src2);
4218                             dc->cc_op = CC_OP_DIV;
4219                         } else {
4220                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4221                                             cpu_src2);
4222                         }
4223                         break;
4224                     default:
4225                         goto illegal_insn;
4226                     }
4227                     gen_store_gpr(dc, rd, cpu_dst);
4228                 } else {
4229                     cpu_src1 = get_src1(dc, insn);
4230                     cpu_src2 = get_src2(dc, insn);
4231                     switch (xop) {
4232                     case 0x20: /* taddcc */
4233                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4234                         gen_store_gpr(dc, rd, cpu_dst);
4235                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4236                         dc->cc_op = CC_OP_TADD;
4237                         break;
4238                     case 0x21: /* tsubcc */
4239                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4240                         gen_store_gpr(dc, rd, cpu_dst);
4241                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4242                         dc->cc_op = CC_OP_TSUB;
4243                         break;
4244                     case 0x22: /* taddcctv */
4245                         gen_helper_taddcctv(cpu_dst, cpu_env,
4246                                             cpu_src1, cpu_src2);
4247                         gen_store_gpr(dc, rd, cpu_dst);
4248                         dc->cc_op = CC_OP_TADDTV;
4249                         break;
4250                     case 0x23: /* tsubcctv */
4251                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4252                                             cpu_src1, cpu_src2);
4253                         gen_store_gpr(dc, rd, cpu_dst);
4254                         dc->cc_op = CC_OP_TSUBTV;
4255                         break;
4256                     case 0x24: /* mulscc */
4257                         update_psr(dc);
4258                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4259                         gen_store_gpr(dc, rd, cpu_dst);
4260                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4261                         dc->cc_op = CC_OP_ADD;
4262                         break;
4263 #ifndef TARGET_SPARC64
4264                     case 0x25:  /* sll */
4265                         if (IS_IMM) { /* immediate */
4266                             simm = GET_FIELDs(insn, 20, 31);
4267                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4268                         } else { /* register */
4269                             cpu_tmp0 = get_temp_tl(dc);
4270                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4271                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4272                         }
4273                         gen_store_gpr(dc, rd, cpu_dst);
4274                         break;
4275                     case 0x26:  /* srl */
4276                         if (IS_IMM) { /* immediate */
4277                             simm = GET_FIELDs(insn, 20, 31);
4278                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4279                         } else { /* register */
4280                             cpu_tmp0 = get_temp_tl(dc);
4281                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4282                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4283                         }
4284                         gen_store_gpr(dc, rd, cpu_dst);
4285                         break;
4286                     case 0x27:  /* sra */
4287                         if (IS_IMM) { /* immediate */
4288                             simm = GET_FIELDs(insn, 20, 31);
4289                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4290                         } else { /* register */
4291                             cpu_tmp0 = get_temp_tl(dc);
4292                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4293                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4294                         }
4295                         gen_store_gpr(dc, rd, cpu_dst);
4296                         break;
4297 #endif
4298                     case 0x30:
4299                         {
4300                             cpu_tmp0 = get_temp_tl(dc);
4301                             switch(rd) {
4302                             case 0: /* wry */
4303                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4304                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4305                                 break;
4306 #ifndef TARGET_SPARC64
4307                             case 0x01 ... 0x0f: /* undefined in the
4308                                                    SPARCv8 manual, nop
4309                                                    on the microSPARC
4310                                                    II */
4311                             case 0x10 ... 0x1f: /* implementation-dependent
4312                                                    in the SPARCv8
4313                                                    manual, nop on the
4314                                                    microSPARC II */
4315                                 if ((rd == 0x13) && (dc->def->features &
4316                                                      CPU_FEATURE_POWERDOWN)) {
4317                                     /* LEON3 power-down */
4318                                     save_state(dc);
4319                                     gen_helper_power_down(cpu_env);
4320                                 }
4321                                 break;
4322 #else
4323                             case 0x2: /* V9 wrccr */
4324                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4325                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4326                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4327                                 dc->cc_op = CC_OP_FLAGS;
4328                                 break;
4329                             case 0x3: /* V9 wrasi */
4330                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4331                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4332                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4333                                                 offsetof(CPUSPARCState, asi));
4334                                 /* End TB to notice changed ASI.  */
4335                                 save_state(dc);
4336                                 gen_op_next_insn();
4337                                 tcg_gen_exit_tb(0);
4338                                 dc->is_br = 1;
4339                                 break;
4340                             case 0x6: /* V9 wrfprs */
4341                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4342                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4343                                 dc->fprs_dirty = 0;
4344                                 save_state(dc);
4345                                 gen_op_next_insn();
4346                                 tcg_gen_exit_tb(0);
4347                                 dc->is_br = 1;
4348                                 break;
4349                             case 0xf: /* V9 sir, nop if user */
4350 #if !defined(CONFIG_USER_ONLY)
4351                                 if (supervisor(dc)) {
4352                                     ; // XXX
4353                                 }
4354 #endif
4355                                 break;
4356                             case 0x13: /* Graphics Status */
4357                                 if (gen_trap_ifnofpu(dc)) {
4358                                     goto jmp_insn;
4359                                 }
4360                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4361                                 break;
4362                             case 0x14: /* Softint set */
4363                                 if (!supervisor(dc))
4364                                     goto illegal_insn;
4365                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4366                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4367                                 break;
4368                             case 0x15: /* Softint clear */
4369                                 if (!supervisor(dc))
4370                                     goto illegal_insn;
4371                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4372                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4373                                 break;
4374                             case 0x16: /* Softint write */
4375                                 if (!supervisor(dc))
4376                                     goto illegal_insn;
4377                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4378                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4379                                 break;
4380                             case 0x17: /* Tick compare */
4381 #if !defined(CONFIG_USER_ONLY)
4382                                 if (!supervisor(dc))
4383                                     goto illegal_insn;
4384 #endif
4385                                 {
4386                                     TCGv_ptr r_tickptr;
4387 
4388                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4389                                                    cpu_src2);
4390                                     r_tickptr = tcg_temp_new_ptr();
4391                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4392                                                    offsetof(CPUSPARCState, tick));
4393                                     gen_helper_tick_set_limit(r_tickptr,
4394                                                               cpu_tick_cmpr);
4395                                     tcg_temp_free_ptr(r_tickptr);
4396                                 }
4397                                 break;
4398                             case 0x18: /* System tick */
4399 #if !defined(CONFIG_USER_ONLY)
4400                                 if (!supervisor(dc))
4401                                     goto illegal_insn;
4402 #endif
4403                                 {
4404                                     TCGv_ptr r_tickptr;
4405 
4406                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4407                                                    cpu_src2);
4408                                     r_tickptr = tcg_temp_new_ptr();
4409                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4410                                                    offsetof(CPUSPARCState, stick));
4411                                     gen_helper_tick_set_count(r_tickptr,
4412                                                               cpu_tmp0);
4413                                     tcg_temp_free_ptr(r_tickptr);
4414                                 }
4415                                 break;
4416                             case 0x19: /* System tick compare */
4417 #if !defined(CONFIG_USER_ONLY)
4418                                 if (!supervisor(dc))
4419                                     goto illegal_insn;
4420 #endif
4421                                 {
4422                                     TCGv_ptr r_tickptr;
4423 
4424                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4425                                                    cpu_src2);
4426                                     r_tickptr = tcg_temp_new_ptr();
4427                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4428                                                    offsetof(CPUSPARCState, stick));
4429                                     gen_helper_tick_set_limit(r_tickptr,
4430                                                               cpu_stick_cmpr);
4431                                     tcg_temp_free_ptr(r_tickptr);
4432                                 }
4433                                 break;
4434 
4435                             case 0x10: /* Performance Control */
4436                             case 0x11: /* Performance Instrumentation
4437                                           Counter */
4438                             case 0x12: /* Dispatch Control */
4439 #endif
4440                             default:
4441                                 goto illegal_insn;
4442                             }
4443                         }
4444                         break;
4445 #if !defined(CONFIG_USER_ONLY)
4446                     case 0x31: /* wrpsr, V9 saved, restored */
4447                         {
4448                             if (!supervisor(dc))
4449                                 goto priv_insn;
4450 #ifdef TARGET_SPARC64
4451                             switch (rd) {
4452                             case 0:
4453                                 gen_helper_saved(cpu_env);
4454                                 break;
4455                             case 1:
4456                                 gen_helper_restored(cpu_env);
4457                                 break;
4458                             case 2: /* UA2005 allclean */
4459                             case 3: /* UA2005 otherw */
4460                             case 4: /* UA2005 normalw */
4461                             case 5: /* UA2005 invalw */
4462                                 // XXX
4463                             default:
4464                                 goto illegal_insn;
4465                             }
4466 #else
4467                             cpu_tmp0 = get_temp_tl(dc);
4468                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4469                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4470                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4471                             dc->cc_op = CC_OP_FLAGS;
4472                             save_state(dc);
4473                             gen_op_next_insn();
4474                             tcg_gen_exit_tb(0);
4475                             dc->is_br = 1;
4476 #endif
4477                         }
4478                         break;
4479                     case 0x32: /* wrwim, V9 wrpr */
4480                         {
4481                             if (!supervisor(dc))
4482                                 goto priv_insn;
4483                             cpu_tmp0 = get_temp_tl(dc);
4484                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4485 #ifdef TARGET_SPARC64
4486                             switch (rd) {
4487                             case 0: // tpc
4488                                 {
4489                                     TCGv_ptr r_tsptr;
4490 
4491                                     r_tsptr = tcg_temp_new_ptr();
4492                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4493                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4494                                                   offsetof(trap_state, tpc));
4495                                     tcg_temp_free_ptr(r_tsptr);
4496                                 }
4497                                 break;
4498                             case 1: // tnpc
4499                                 {
4500                                     TCGv_ptr r_tsptr;
4501 
4502                                     r_tsptr = tcg_temp_new_ptr();
4503                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4504                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4505                                                   offsetof(trap_state, tnpc));
4506                                     tcg_temp_free_ptr(r_tsptr);
4507                                 }
4508                                 break;
4509                             case 2: // tstate
4510                                 {
4511                                     TCGv_ptr r_tsptr;
4512 
4513                                     r_tsptr = tcg_temp_new_ptr();
4514                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4515                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4516                                                   offsetof(trap_state,
4517                                                            tstate));
4518                                     tcg_temp_free_ptr(r_tsptr);
4519                                 }
4520                                 break;
4521                             case 3: // tt
4522                                 {
4523                                     TCGv_ptr r_tsptr;
4524 
4525                                     r_tsptr = tcg_temp_new_ptr();
4526                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4527                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4528                                                     offsetof(trap_state, tt));
4529                                     tcg_temp_free_ptr(r_tsptr);
4530                                 }
4531                                 break;
4532                             case 4: // tick
4533                                 {
4534                                     TCGv_ptr r_tickptr;
4535 
4536                                     r_tickptr = tcg_temp_new_ptr();
4537                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4538                                                    offsetof(CPUSPARCState, tick));
4539                                     gen_helper_tick_set_count(r_tickptr,
4540                                                               cpu_tmp0);
4541                                     tcg_temp_free_ptr(r_tickptr);
4542                                 }
4543                                 break;
4544                             case 5: // tba
4545                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4546                                 break;
4547                             case 6: // pstate
4548                                 save_state(dc);
4549                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4550                                 dc->npc = DYNAMIC_PC;
4551                                 break;
4552                             case 7: // tl
4553                                 save_state(dc);
4554                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4555                                                offsetof(CPUSPARCState, tl));
4556                                 dc->npc = DYNAMIC_PC;
4557                                 break;
4558                             case 8: // pil
4559                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4560                                 break;
4561                             case 9: // cwp
4562                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4563                                 break;
4564                             case 10: // cansave
4565                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4566                                                 offsetof(CPUSPARCState,
4567                                                          cansave));
4568                                 break;
4569                             case 11: // canrestore
4570                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4571                                                 offsetof(CPUSPARCState,
4572                                                          canrestore));
4573                                 break;
4574                             case 12: // cleanwin
4575                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4576                                                 offsetof(CPUSPARCState,
4577                                                          cleanwin));
4578                                 break;
4579                             case 13: // otherwin
4580                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4581                                                 offsetof(CPUSPARCState,
4582                                                          otherwin));
4583                                 break;
4584                             case 14: // wstate
4585                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4586                                                 offsetof(CPUSPARCState,
4587                                                          wstate));
4588                                 break;
4589                             case 16: // UA2005 gl
4590                                 CHECK_IU_FEATURE(dc, GL);
4591                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4592                                 break;
4593                             case 26: // UA2005 strand status
4594                                 CHECK_IU_FEATURE(dc, HYPV);
4595                                 if (!hypervisor(dc))
4596                                     goto priv_insn;
4597                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4598                                 break;
4599                             default:
4600                                 goto illegal_insn;
4601                             }
4602 #else
4603                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4604                             if (dc->def->nwindows != 32) {
4605                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4606                                                 (1 << dc->def->nwindows) - 1);
4607                             }
4608 #endif
4609                         }
4610                         break;
4611                     case 0x33: /* wrtbr, UA2005 wrhpr */
4612                         {
4613 #ifndef TARGET_SPARC64
4614                             if (!supervisor(dc))
4615                                 goto priv_insn;
4616                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4617 #else
4618                             CHECK_IU_FEATURE(dc, HYPV);
4619                             if (!hypervisor(dc))
4620                                 goto priv_insn;
4621                             cpu_tmp0 = get_temp_tl(dc);
4622                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4623                             switch (rd) {
4624                             case 0: // hpstate
4625                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4626                                                offsetof(CPUSPARCState,
4627                                                         hpstate));
4628                                 save_state(dc);
4629                                 gen_op_next_insn();
4630                                 tcg_gen_exit_tb(0);
4631                                 dc->is_br = 1;
4632                                 break;
4633                             case 1: // htstate
4634                                 // XXX gen_op_wrhtstate();
4635                                 break;
4636                             case 3: // hintp
4637                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4638                                 break;
4639                             case 5: // htba
4640                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4641                                 break;
4642                             case 31: // hstick_cmpr
4643                                 {
4644                                     TCGv_ptr r_tickptr;
4645 
4646                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4647                                     r_tickptr = tcg_temp_new_ptr();
4648                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4649                                                    offsetof(CPUSPARCState, hstick));
4650                                     gen_helper_tick_set_limit(r_tickptr,
4651                                                               cpu_hstick_cmpr);
4652                                     tcg_temp_free_ptr(r_tickptr);
4653                                 }
4654                                 break;
4655                             case 6: // hver readonly
4656                             default:
4657                                 goto illegal_insn;
4658                             }
4659 #endif
4660                         }
4661                         break;
4662 #endif
4663 #ifdef TARGET_SPARC64
4664                     case 0x2c: /* V9 movcc */
4665                         {
4666                             int cc = GET_FIELD_SP(insn, 11, 12);
4667                             int cond = GET_FIELD_SP(insn, 14, 17);
4668                             DisasCompare cmp;
4669                             TCGv dst;
4670 
4671                             if (insn & (1 << 18)) {
4672                                 if (cc == 0) {
4673                                     gen_compare(&cmp, 0, cond, dc);
4674                                 } else if (cc == 2) {
4675                                     gen_compare(&cmp, 1, cond, dc);
4676                                 } else {
4677                                     goto illegal_insn;
4678                                 }
4679                             } else {
4680                                 gen_fcompare(&cmp, cc, cond);
4681                             }
4682 
4683                             /* The get_src2 above loaded the normal 13-bit
4684                                immediate field, not the 11-bit field we have
4685                                in movcc.  But it did handle the reg case.  */
4686                             if (IS_IMM) {
4687                                 simm = GET_FIELD_SPs(insn, 0, 10);
4688                                 tcg_gen_movi_tl(cpu_src2, simm);
4689                             }
4690 
4691                             dst = gen_load_gpr(dc, rd);
4692                             tcg_gen_movcond_tl(cmp.cond, dst,
4693                                                cmp.c1, cmp.c2,
4694                                                cpu_src2, dst);
4695                             free_compare(&cmp);
4696                             gen_store_gpr(dc, rd, dst);
4697                             break;
4698                         }
4699                     case 0x2d: /* V9 sdivx */
4700                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4701                         gen_store_gpr(dc, rd, cpu_dst);
4702                         break;
4703                     case 0x2e: /* V9 popc */
4704                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4705                         gen_store_gpr(dc, rd, cpu_dst);
4706                         break;
4707                     case 0x2f: /* V9 movr */
4708                         {
4709                             int cond = GET_FIELD_SP(insn, 10, 12);
4710                             DisasCompare cmp;
4711                             TCGv dst;
4712 
4713                             gen_compare_reg(&cmp, cond, cpu_src1);
4714 
4715                             /* The get_src2 above loaded the normal 13-bit
4716                                immediate field, not the 10-bit field we have
4717                                in movr.  But it did handle the reg case.  */
4718                             if (IS_IMM) {
4719                                 simm = GET_FIELD_SPs(insn, 0, 9);
4720                                 tcg_gen_movi_tl(cpu_src2, simm);
4721                             }
4722 
4723                             dst = gen_load_gpr(dc, rd);
4724                             tcg_gen_movcond_tl(cmp.cond, dst,
4725                                                cmp.c1, cmp.c2,
4726                                                cpu_src2, dst);
4727                             free_compare(&cmp);
4728                             gen_store_gpr(dc, rd, dst);
4729                             break;
4730                         }
4731 #endif
4732                     default:
4733                         goto illegal_insn;
4734                     }
4735                 }
4736             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4737 #ifdef TARGET_SPARC64
4738                 int opf = GET_FIELD_SP(insn, 5, 13);
4739                 rs1 = GET_FIELD(insn, 13, 17);
4740                 rs2 = GET_FIELD(insn, 27, 31);
4741                 if (gen_trap_ifnofpu(dc)) {
4742                     goto jmp_insn;
4743                 }
4744 
4745                 switch (opf) {
4746                 case 0x000: /* VIS I edge8cc */
4747                     CHECK_FPU_FEATURE(dc, VIS1);
4748                     cpu_src1 = gen_load_gpr(dc, rs1);
4749                     cpu_src2 = gen_load_gpr(dc, rs2);
4750                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4751                     gen_store_gpr(dc, rd, cpu_dst);
4752                     break;
4753                 case 0x001: /* VIS II edge8n */
4754                     CHECK_FPU_FEATURE(dc, VIS2);
4755                     cpu_src1 = gen_load_gpr(dc, rs1);
4756                     cpu_src2 = gen_load_gpr(dc, rs2);
4757                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4758                     gen_store_gpr(dc, rd, cpu_dst);
4759                     break;
4760                 case 0x002: /* VIS I edge8lcc */
4761                     CHECK_FPU_FEATURE(dc, VIS1);
4762                     cpu_src1 = gen_load_gpr(dc, rs1);
4763                     cpu_src2 = gen_load_gpr(dc, rs2);
4764                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4765                     gen_store_gpr(dc, rd, cpu_dst);
4766                     break;
4767                 case 0x003: /* VIS II edge8ln */
4768                     CHECK_FPU_FEATURE(dc, VIS2);
4769                     cpu_src1 = gen_load_gpr(dc, rs1);
4770                     cpu_src2 = gen_load_gpr(dc, rs2);
4771                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4772                     gen_store_gpr(dc, rd, cpu_dst);
4773                     break;
4774                 case 0x004: /* VIS I edge16cc */
4775                     CHECK_FPU_FEATURE(dc, VIS1);
4776                     cpu_src1 = gen_load_gpr(dc, rs1);
4777                     cpu_src2 = gen_load_gpr(dc, rs2);
4778                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4779                     gen_store_gpr(dc, rd, cpu_dst);
4780                     break;
4781                 case 0x005: /* VIS II edge16n */
4782                     CHECK_FPU_FEATURE(dc, VIS2);
4783                     cpu_src1 = gen_load_gpr(dc, rs1);
4784                     cpu_src2 = gen_load_gpr(dc, rs2);
4785                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4786                     gen_store_gpr(dc, rd, cpu_dst);
4787                     break;
4788                 case 0x006: /* VIS I edge16lcc */
4789                     CHECK_FPU_FEATURE(dc, VIS1);
4790                     cpu_src1 = gen_load_gpr(dc, rs1);
4791                     cpu_src2 = gen_load_gpr(dc, rs2);
4792                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4793                     gen_store_gpr(dc, rd, cpu_dst);
4794                     break;
4795                 case 0x007: /* VIS II edge16ln */
4796                     CHECK_FPU_FEATURE(dc, VIS2);
4797                     cpu_src1 = gen_load_gpr(dc, rs1);
4798                     cpu_src2 = gen_load_gpr(dc, rs2);
4799                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4800                     gen_store_gpr(dc, rd, cpu_dst);
4801                     break;
4802                 case 0x008: /* VIS I edge32cc */
4803                     CHECK_FPU_FEATURE(dc, VIS1);
4804                     cpu_src1 = gen_load_gpr(dc, rs1);
4805                     cpu_src2 = gen_load_gpr(dc, rs2);
4806                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4807                     gen_store_gpr(dc, rd, cpu_dst);
4808                     break;
4809                 case 0x009: /* VIS II edge32n */
4810                     CHECK_FPU_FEATURE(dc, VIS2);
4811                     cpu_src1 = gen_load_gpr(dc, rs1);
4812                     cpu_src2 = gen_load_gpr(dc, rs2);
4813                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4814                     gen_store_gpr(dc, rd, cpu_dst);
4815                     break;
4816                 case 0x00a: /* VIS I edge32lcc */
4817                     CHECK_FPU_FEATURE(dc, VIS1);
4818                     cpu_src1 = gen_load_gpr(dc, rs1);
4819                     cpu_src2 = gen_load_gpr(dc, rs2);
4820                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4821                     gen_store_gpr(dc, rd, cpu_dst);
4822                     break;
4823                 case 0x00b: /* VIS II edge32ln */
4824                     CHECK_FPU_FEATURE(dc, VIS2);
4825                     cpu_src1 = gen_load_gpr(dc, rs1);
4826                     cpu_src2 = gen_load_gpr(dc, rs2);
4827                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4828                     gen_store_gpr(dc, rd, cpu_dst);
4829                     break;
4830                 case 0x010: /* VIS I array8 */
4831                     CHECK_FPU_FEATURE(dc, VIS1);
4832                     cpu_src1 = gen_load_gpr(dc, rs1);
4833                     cpu_src2 = gen_load_gpr(dc, rs2);
4834                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4835                     gen_store_gpr(dc, rd, cpu_dst);
4836                     break;
4837                 case 0x012: /* VIS I array16 */
4838                     CHECK_FPU_FEATURE(dc, VIS1);
4839                     cpu_src1 = gen_load_gpr(dc, rs1);
4840                     cpu_src2 = gen_load_gpr(dc, rs2);
4841                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4842                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4843                     gen_store_gpr(dc, rd, cpu_dst);
4844                     break;
4845                 case 0x014: /* VIS I array32 */
4846                     CHECK_FPU_FEATURE(dc, VIS1);
4847                     cpu_src1 = gen_load_gpr(dc, rs1);
4848                     cpu_src2 = gen_load_gpr(dc, rs2);
4849                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4850                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4851                     gen_store_gpr(dc, rd, cpu_dst);
4852                     break;
4853                 case 0x018: /* VIS I alignaddr */
4854                     CHECK_FPU_FEATURE(dc, VIS1);
4855                     cpu_src1 = gen_load_gpr(dc, rs1);
4856                     cpu_src2 = gen_load_gpr(dc, rs2);
4857                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4858                     gen_store_gpr(dc, rd, cpu_dst);
4859                     break;
4860                 case 0x01a: /* VIS I alignaddrl */
4861                     CHECK_FPU_FEATURE(dc, VIS1);
4862                     cpu_src1 = gen_load_gpr(dc, rs1);
4863                     cpu_src2 = gen_load_gpr(dc, rs2);
4864                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4865                     gen_store_gpr(dc, rd, cpu_dst);
4866                     break;
4867                 case 0x019: /* VIS II bmask */
4868                     CHECK_FPU_FEATURE(dc, VIS2);
4869                     cpu_src1 = gen_load_gpr(dc, rs1);
4870                     cpu_src2 = gen_load_gpr(dc, rs2);
4871                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4872                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4873                     gen_store_gpr(dc, rd, cpu_dst);
4874                     break;
4875                 case 0x020: /* VIS I fcmple16 */
4876                     CHECK_FPU_FEATURE(dc, VIS1);
4877                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4878                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4879                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4880                     gen_store_gpr(dc, rd, cpu_dst);
4881                     break;
4882                 case 0x022: /* VIS I fcmpne16 */
4883                     CHECK_FPU_FEATURE(dc, VIS1);
4884                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4885                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4886                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4887                     gen_store_gpr(dc, rd, cpu_dst);
4888                     break;
4889                 case 0x024: /* VIS I fcmple32 */
4890                     CHECK_FPU_FEATURE(dc, VIS1);
4891                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4892                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4893                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4894                     gen_store_gpr(dc, rd, cpu_dst);
4895                     break;
4896                 case 0x026: /* VIS I fcmpne32 */
4897                     CHECK_FPU_FEATURE(dc, VIS1);
4898                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4899                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4900                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4901                     gen_store_gpr(dc, rd, cpu_dst);
4902                     break;
4903                 case 0x028: /* VIS I fcmpgt16 */
4904                     CHECK_FPU_FEATURE(dc, VIS1);
4905                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4906                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4907                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4908                     gen_store_gpr(dc, rd, cpu_dst);
4909                     break;
4910                 case 0x02a: /* VIS I fcmpeq16 */
4911                     CHECK_FPU_FEATURE(dc, VIS1);
4912                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4913                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4914                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4915                     gen_store_gpr(dc, rd, cpu_dst);
4916                     break;
4917                 case 0x02c: /* VIS I fcmpgt32 */
4918                     CHECK_FPU_FEATURE(dc, VIS1);
4919                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4920                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4921                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4922                     gen_store_gpr(dc, rd, cpu_dst);
4923                     break;
4924                 case 0x02e: /* VIS I fcmpeq32 */
4925                     CHECK_FPU_FEATURE(dc, VIS1);
4926                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4927                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4928                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4929                     gen_store_gpr(dc, rd, cpu_dst);
4930                     break;
4931                 case 0x031: /* VIS I fmul8x16 */
4932                     CHECK_FPU_FEATURE(dc, VIS1);
4933                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4934                     break;
4935                 case 0x033: /* VIS I fmul8x16au */
4936                     CHECK_FPU_FEATURE(dc, VIS1);
4937                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4938                     break;
4939                 case 0x035: /* VIS I fmul8x16al */
4940                     CHECK_FPU_FEATURE(dc, VIS1);
4941                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4942                     break;
4943                 case 0x036: /* VIS I fmul8sux16 */
4944                     CHECK_FPU_FEATURE(dc, VIS1);
4945                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4946                     break;
4947                 case 0x037: /* VIS I fmul8ulx16 */
4948                     CHECK_FPU_FEATURE(dc, VIS1);
4949                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4950                     break;
4951                 case 0x038: /* VIS I fmuld8sux16 */
4952                     CHECK_FPU_FEATURE(dc, VIS1);
4953                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4954                     break;
4955                 case 0x039: /* VIS I fmuld8ulx16 */
4956                     CHECK_FPU_FEATURE(dc, VIS1);
4957                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4958                     break;
4959                 case 0x03a: /* VIS I fpack32 */
4960                     CHECK_FPU_FEATURE(dc, VIS1);
4961                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4962                     break;
4963                 case 0x03b: /* VIS I fpack16 */
4964                     CHECK_FPU_FEATURE(dc, VIS1);
4965                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4966                     cpu_dst_32 = gen_dest_fpr_F(dc);
4967                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4968                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4969                     break;
4970                 case 0x03d: /* VIS I fpackfix */
4971                     CHECK_FPU_FEATURE(dc, VIS1);
4972                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4973                     cpu_dst_32 = gen_dest_fpr_F(dc);
4974                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4975                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4976                     break;
4977                 case 0x03e: /* VIS I pdist */
4978                     CHECK_FPU_FEATURE(dc, VIS1);
4979                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4980                     break;
4981                 case 0x048: /* VIS I faligndata */
4982                     CHECK_FPU_FEATURE(dc, VIS1);
4983                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4984                     break;
4985                 case 0x04b: /* VIS I fpmerge */
4986                     CHECK_FPU_FEATURE(dc, VIS1);
4987                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4988                     break;
4989                 case 0x04c: /* VIS II bshuffle */
4990                     CHECK_FPU_FEATURE(dc, VIS2);
4991                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4992                     break;
4993                 case 0x04d: /* VIS I fexpand */
4994                     CHECK_FPU_FEATURE(dc, VIS1);
4995                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4996                     break;
4997                 case 0x050: /* VIS I fpadd16 */
4998                     CHECK_FPU_FEATURE(dc, VIS1);
4999                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
5000                     break;
5001                 case 0x051: /* VIS I fpadd16s */
5002                     CHECK_FPU_FEATURE(dc, VIS1);
5003                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
5004                     break;
5005                 case 0x052: /* VIS I fpadd32 */
5006                     CHECK_FPU_FEATURE(dc, VIS1);
5007                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
5008                     break;
5009                 case 0x053: /* VIS I fpadd32s */
5010                     CHECK_FPU_FEATURE(dc, VIS1);
5011                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
5012                     break;
5013                 case 0x054: /* VIS I fpsub16 */
5014                     CHECK_FPU_FEATURE(dc, VIS1);
5015                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
5016                     break;
5017                 case 0x055: /* VIS I fpsub16s */
5018                     CHECK_FPU_FEATURE(dc, VIS1);
5019                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
5020                     break;
5021                 case 0x056: /* VIS I fpsub32 */
5022                     CHECK_FPU_FEATURE(dc, VIS1);
5023                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
5024                     break;
5025                 case 0x057: /* VIS I fpsub32s */
5026                     CHECK_FPU_FEATURE(dc, VIS1);
5027                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
5028                     break;
5029                 case 0x060: /* VIS I fzero */
5030                     CHECK_FPU_FEATURE(dc, VIS1);
5031                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5032                     tcg_gen_movi_i64(cpu_dst_64, 0);
5033                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5034                     break;
5035                 case 0x061: /* VIS I fzeros */
5036                     CHECK_FPU_FEATURE(dc, VIS1);
5037                     cpu_dst_32 = gen_dest_fpr_F(dc);
5038                     tcg_gen_movi_i32(cpu_dst_32, 0);
5039                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5040                     break;
5041                 case 0x062: /* VIS I fnor */
5042                     CHECK_FPU_FEATURE(dc, VIS1);
5043                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
5044                     break;
5045                 case 0x063: /* VIS I fnors */
5046                     CHECK_FPU_FEATURE(dc, VIS1);
5047                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
5048                     break;
5049                 case 0x064: /* VIS I fandnot2 */
5050                     CHECK_FPU_FEATURE(dc, VIS1);
5051                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
5052                     break;
5053                 case 0x065: /* VIS I fandnot2s */
5054                     CHECK_FPU_FEATURE(dc, VIS1);
5055                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
5056                     break;
5057                 case 0x066: /* VIS I fnot2 */
5058                     CHECK_FPU_FEATURE(dc, VIS1);
5059                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
5060                     break;
5061                 case 0x067: /* VIS I fnot2s */
5062                     CHECK_FPU_FEATURE(dc, VIS1);
5063                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
5064                     break;
5065                 case 0x068: /* VIS I fandnot1 */
5066                     CHECK_FPU_FEATURE(dc, VIS1);
5067                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
5068                     break;
5069                 case 0x069: /* VIS I fandnot1s */
5070                     CHECK_FPU_FEATURE(dc, VIS1);
5071                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
5072                     break;
5073                 case 0x06a: /* VIS I fnot1 */
5074                     CHECK_FPU_FEATURE(dc, VIS1);
5075                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
5076                     break;
5077                 case 0x06b: /* VIS I fnot1s */
5078                     CHECK_FPU_FEATURE(dc, VIS1);
5079                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
5080                     break;
5081                 case 0x06c: /* VIS I fxor */
5082                     CHECK_FPU_FEATURE(dc, VIS1);
5083                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
5084                     break;
5085                 case 0x06d: /* VIS I fxors */
5086                     CHECK_FPU_FEATURE(dc, VIS1);
5087                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
5088                     break;
5089                 case 0x06e: /* VIS I fnand */
5090                     CHECK_FPU_FEATURE(dc, VIS1);
5091                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
5092                     break;
5093                 case 0x06f: /* VIS I fnands */
5094                     CHECK_FPU_FEATURE(dc, VIS1);
5095                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
5096                     break;
5097                 case 0x070: /* VIS I fand */
5098                     CHECK_FPU_FEATURE(dc, VIS1);
5099                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
5100                     break;
5101                 case 0x071: /* VIS I fands */
5102                     CHECK_FPU_FEATURE(dc, VIS1);
5103                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
5104                     break;
5105                 case 0x072: /* VIS I fxnor */
5106                     CHECK_FPU_FEATURE(dc, VIS1);
5107                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5108                     break;
5109                 case 0x073: /* VIS I fxnors */
5110                     CHECK_FPU_FEATURE(dc, VIS1);
5111                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5112                     break;
5113                 case 0x074: /* VIS I fsrc1 */
5114                     CHECK_FPU_FEATURE(dc, VIS1);
5115                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5116                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5117                     break;
5118                 case 0x075: /* VIS I fsrc1s */
5119                     CHECK_FPU_FEATURE(dc, VIS1);
5120                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5121                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5122                     break;
5123                 case 0x076: /* VIS I fornot2 */
5124                     CHECK_FPU_FEATURE(dc, VIS1);
5125                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5126                     break;
5127                 case 0x077: /* VIS I fornot2s */
5128                     CHECK_FPU_FEATURE(dc, VIS1);
5129                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5130                     break;
5131                 case 0x078: /* VIS I fsrc2 */
5132                     CHECK_FPU_FEATURE(dc, VIS1);
5133                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5134                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5135                     break;
5136                 case 0x079: /* VIS I fsrc2s */
5137                     CHECK_FPU_FEATURE(dc, VIS1);
5138                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5139                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5140                     break;
5141                 case 0x07a: /* VIS I fornot1 */
5142                     CHECK_FPU_FEATURE(dc, VIS1);
5143                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5144                     break;
5145                 case 0x07b: /* VIS I fornot1s */
5146                     CHECK_FPU_FEATURE(dc, VIS1);
5147                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5148                     break;
5149                 case 0x07c: /* VIS I for */
5150                     CHECK_FPU_FEATURE(dc, VIS1);
5151                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5152                     break;
5153                 case 0x07d: /* VIS I fors */
5154                     CHECK_FPU_FEATURE(dc, VIS1);
5155                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5156                     break;
5157                 case 0x07e: /* VIS I fone */
5158                     CHECK_FPU_FEATURE(dc, VIS1);
5159                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5160                     tcg_gen_movi_i64(cpu_dst_64, -1);
5161                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5162                     break;
5163                 case 0x07f: /* VIS I fones */
5164                     CHECK_FPU_FEATURE(dc, VIS1);
5165                     cpu_dst_32 = gen_dest_fpr_F(dc);
5166                     tcg_gen_movi_i32(cpu_dst_32, -1);
5167                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5168                     break;
5169                 case 0x080: /* VIS I shutdown */
5170                 case 0x081: /* VIS II siam */
5171                     // XXX
5172                     goto illegal_insn;
5173                 default:
5174                     goto illegal_insn;
5175                 }
5176 #else
5177                 goto ncp_insn;
5178 #endif
5179             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5180 #ifdef TARGET_SPARC64
5181                 goto illegal_insn;
5182 #else
5183                 goto ncp_insn;
5184 #endif
5185 #ifdef TARGET_SPARC64
5186             } else if (xop == 0x39) { /* V9 return */
5187                 save_state(dc);
5188                 cpu_src1 = get_src1(dc, insn);
5189                 cpu_tmp0 = get_temp_tl(dc);
5190                 if (IS_IMM) {   /* immediate */
5191                     simm = GET_FIELDs(insn, 19, 31);
5192                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5193                 } else {                /* register */
5194                     rs2 = GET_FIELD(insn, 27, 31);
5195                     if (rs2) {
5196                         cpu_src2 = gen_load_gpr(dc, rs2);
5197                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5198                     } else {
5199                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5200                     }
5201                 }
5202                 gen_helper_restore(cpu_env);
5203                 gen_mov_pc_npc(dc);
5204                 gen_check_align(cpu_tmp0, 3);
5205                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5206                 dc->npc = DYNAMIC_PC;
5207                 goto jmp_insn;
5208 #endif
5209             } else {
5210                 cpu_src1 = get_src1(dc, insn);
5211                 cpu_tmp0 = get_temp_tl(dc);
5212                 if (IS_IMM) {   /* immediate */
5213                     simm = GET_FIELDs(insn, 19, 31);
5214                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5215                 } else {                /* register */
5216                     rs2 = GET_FIELD(insn, 27, 31);
5217                     if (rs2) {
5218                         cpu_src2 = gen_load_gpr(dc, rs2);
5219                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5220                     } else {
5221                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5222                     }
5223                 }
5224                 switch (xop) {
5225                 case 0x38:      /* jmpl */
5226                     {
5227                         TCGv t = gen_dest_gpr(dc, rd);
5228                         tcg_gen_movi_tl(t, dc->pc);
5229                         gen_store_gpr(dc, rd, t);
5230 
5231                         gen_mov_pc_npc(dc);
5232                         gen_check_align(cpu_tmp0, 3);
5233                         gen_address_mask(dc, cpu_tmp0);
5234                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5235                         dc->npc = DYNAMIC_PC;
5236                     }
5237                     goto jmp_insn;
5238 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5239                 case 0x39:      /* rett, V9 return */
5240                     {
5241                         if (!supervisor(dc))
5242                             goto priv_insn;
5243                         gen_mov_pc_npc(dc);
5244                         gen_check_align(cpu_tmp0, 3);
5245                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5246                         dc->npc = DYNAMIC_PC;
5247                         gen_helper_rett(cpu_env);
5248                     }
5249                     goto jmp_insn;
5250 #endif
5251                 case 0x3b: /* flush */
5252                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5253                         goto unimp_flush;
5254                     /* nop */
5255                     break;
5256                 case 0x3c:      /* save */
5257                     gen_helper_save(cpu_env);
5258                     gen_store_gpr(dc, rd, cpu_tmp0);
5259                     break;
5260                 case 0x3d:      /* restore */
5261                     gen_helper_restore(cpu_env);
5262                     gen_store_gpr(dc, rd, cpu_tmp0);
5263                     break;
5264 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5265                 case 0x3e:      /* V9 done/retry */
5266                     {
5267                         switch (rd) {
5268                         case 0:
5269                             if (!supervisor(dc))
5270                                 goto priv_insn;
5271                             dc->npc = DYNAMIC_PC;
5272                             dc->pc = DYNAMIC_PC;
5273                             gen_helper_done(cpu_env);
5274                             goto jmp_insn;
5275                         case 1:
5276                             if (!supervisor(dc))
5277                                 goto priv_insn;
5278                             dc->npc = DYNAMIC_PC;
5279                             dc->pc = DYNAMIC_PC;
5280                             gen_helper_retry(cpu_env);
5281                             goto jmp_insn;
5282                         default:
5283                             goto illegal_insn;
5284                         }
5285                     }
5286                     break;
5287 #endif
5288                 default:
5289                     goto illegal_insn;
5290                 }
5291             }
5292             break;
5293         }
5294         break;
5295     case 3:                     /* load/store instructions */
5296         {
5297             unsigned int xop = GET_FIELD(insn, 7, 12);
5298             /* ??? gen_address_mask prevents us from using a source
5299                register directly.  Always generate a temporary.  */
5300             TCGv cpu_addr = get_temp_tl(dc);
5301 
5302             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5303             if (xop == 0x3c || xop == 0x3e) {
5304                 /* V9 casa/casxa : no offset */
5305             } else if (IS_IMM) {     /* immediate */
5306                 simm = GET_FIELDs(insn, 19, 31);
5307                 if (simm != 0) {
5308                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5309                 }
5310             } else {            /* register */
5311                 rs2 = GET_FIELD(insn, 27, 31);
5312                 if (rs2 != 0) {
5313                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5314                 }
5315             }
5316             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5317                 (xop > 0x17 && xop <= 0x1d ) ||
5318                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5319                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5320 
5321                 switch (xop) {
5322                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5323                     gen_address_mask(dc, cpu_addr);
5324                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5325                     break;
5326                 case 0x1:       /* ldub, load unsigned byte */
5327                     gen_address_mask(dc, cpu_addr);
5328                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5329                     break;
5330                 case 0x2:       /* lduh, load unsigned halfword */
5331                     gen_address_mask(dc, cpu_addr);
5332                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5333                     break;
5334                 case 0x3:       /* ldd, load double word */
5335                     if (rd & 1)
5336                         goto illegal_insn;
5337                     else {
5338                         TCGv_i64 t64;
5339 
5340                         gen_address_mask(dc, cpu_addr);
5341                         t64 = tcg_temp_new_i64();
5342                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5343                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5344                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5345                         gen_store_gpr(dc, rd + 1, cpu_val);
5346                         tcg_gen_shri_i64(t64, t64, 32);
5347                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5348                         tcg_temp_free_i64(t64);
5349                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5350                     }
5351                     break;
5352                 case 0x9:       /* ldsb, load signed byte */
5353                     gen_address_mask(dc, cpu_addr);
5354                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5355                     break;
5356                 case 0xa:       /* ldsh, load signed halfword */
5357                     gen_address_mask(dc, cpu_addr);
5358                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5359                     break;
5360                 case 0xd:       /* ldstub */
5361                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5362                     break;
5363                 case 0x0f:
5364                     /* swap, swap register with memory. Also atomically */
5365                     CHECK_IU_FEATURE(dc, SWAP);
5366                     cpu_src1 = gen_load_gpr(dc, rd);
5367                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5368                              dc->mem_idx, MO_TEUL);
5369                     break;
5370 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5371                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5372                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5373                     break;
5374                 case 0x11:      /* lduba, load unsigned byte alternate */
5375                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5376                     break;
5377                 case 0x12:      /* lduha, load unsigned halfword alternate */
5378                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5379                     break;
5380                 case 0x13:      /* ldda, load double word alternate */
5381                     if (rd & 1) {
5382                         goto illegal_insn;
5383                     }
5384                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5385                     goto skip_move;
5386                 case 0x19:      /* ldsba, load signed byte alternate */
5387                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5388                     break;
5389                 case 0x1a:      /* ldsha, load signed halfword alternate */
5390                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5391                     break;
5392                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5393                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5394                     break;
5395                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5396                                    atomically */
5397                     CHECK_IU_FEATURE(dc, SWAP);
5398                     cpu_src1 = gen_load_gpr(dc, rd);
5399                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5400                     break;
5401 
5402 #ifndef TARGET_SPARC64
5403                 case 0x30: /* ldc */
5404                 case 0x31: /* ldcsr */
5405                 case 0x33: /* lddc */
5406                     goto ncp_insn;
5407 #endif
5408 #endif
5409 #ifdef TARGET_SPARC64
5410                 case 0x08: /* V9 ldsw */
5411                     gen_address_mask(dc, cpu_addr);
5412                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5413                     break;
5414                 case 0x0b: /* V9 ldx */
5415                     gen_address_mask(dc, cpu_addr);
5416                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5417                     break;
5418                 case 0x18: /* V9 ldswa */
5419                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5420                     break;
5421                 case 0x1b: /* V9 ldxa */
5422                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5423                     break;
5424                 case 0x2d: /* V9 prefetch, no effect */
5425                     goto skip_move;
5426                 case 0x30: /* V9 ldfa */
5427                     if (gen_trap_ifnofpu(dc)) {
5428                         goto jmp_insn;
5429                     }
5430                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5431                     gen_update_fprs_dirty(dc, rd);
5432                     goto skip_move;
5433                 case 0x33: /* V9 lddfa */
5434                     if (gen_trap_ifnofpu(dc)) {
5435                         goto jmp_insn;
5436                     }
5437                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5438                     gen_update_fprs_dirty(dc, DFPREG(rd));
5439                     goto skip_move;
5440                 case 0x3d: /* V9 prefetcha, no effect */
5441                     goto skip_move;
5442                 case 0x32: /* V9 ldqfa */
5443                     CHECK_FPU_FEATURE(dc, FLOAT128);
5444                     if (gen_trap_ifnofpu(dc)) {
5445                         goto jmp_insn;
5446                     }
5447                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5448                     gen_update_fprs_dirty(dc, QFPREG(rd));
5449                     goto skip_move;
5450 #endif
5451                 default:
5452                     goto illegal_insn;
5453                 }
5454                 gen_store_gpr(dc, rd, cpu_val);
5455 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5456             skip_move: ;
5457 #endif
5458             } else if (xop >= 0x20 && xop < 0x24) {
5459                 if (gen_trap_ifnofpu(dc)) {
5460                     goto jmp_insn;
5461                 }
5462                 switch (xop) {
5463                 case 0x20:      /* ldf, load fpreg */
5464                     gen_address_mask(dc, cpu_addr);
5465                     cpu_dst_32 = gen_dest_fpr_F(dc);
5466                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5467                                         dc->mem_idx, MO_TEUL);
5468                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5469                     break;
5470                 case 0x21:      /* ldfsr, V9 ldxfsr */
5471 #ifdef TARGET_SPARC64
5472                     gen_address_mask(dc, cpu_addr);
5473                     if (rd == 1) {
5474                         TCGv_i64 t64 = tcg_temp_new_i64();
5475                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5476                                             dc->mem_idx, MO_TEQ);
5477                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5478                         tcg_temp_free_i64(t64);
5479                         break;
5480                     }
5481 #endif
5482                     cpu_dst_32 = get_temp_i32(dc);
5483                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5484                                         dc->mem_idx, MO_TEUL);
5485                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5486                     break;
5487                 case 0x22:      /* ldqf, load quad fpreg */
5488                     CHECK_FPU_FEATURE(dc, FLOAT128);
5489                     gen_address_mask(dc, cpu_addr);
5490                     cpu_src1_64 = tcg_temp_new_i64();
5491                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5492                                         MO_TEQ | MO_ALIGN_4);
5493                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5494                     cpu_src2_64 = tcg_temp_new_i64();
5495                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5496                                         MO_TEQ | MO_ALIGN_4);
5497                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5498                     tcg_temp_free_i64(cpu_src1_64);
5499                     tcg_temp_free_i64(cpu_src2_64);
5500                     break;
5501                 case 0x23:      /* lddf, load double fpreg */
5502                     gen_address_mask(dc, cpu_addr);
5503                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5504                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5505                                         MO_TEQ | MO_ALIGN_4);
5506                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5507                     break;
5508                 default:
5509                     goto illegal_insn;
5510                 }
5511             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5512                        xop == 0xe || xop == 0x1e) {
5513                 TCGv cpu_val = gen_load_gpr(dc, rd);
5514 
5515                 switch (xop) {
5516                 case 0x4: /* st, store word */
5517                     gen_address_mask(dc, cpu_addr);
5518                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5519                     break;
5520                 case 0x5: /* stb, store byte */
5521                     gen_address_mask(dc, cpu_addr);
5522                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5523                     break;
5524                 case 0x6: /* sth, store halfword */
5525                     gen_address_mask(dc, cpu_addr);
5526                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5527                     break;
5528                 case 0x7: /* std, store double word */
5529                     if (rd & 1)
5530                         goto illegal_insn;
5531                     else {
5532                         TCGv_i64 t64;
5533                         TCGv lo;
5534 
5535                         gen_address_mask(dc, cpu_addr);
5536                         lo = gen_load_gpr(dc, rd + 1);
5537                         t64 = tcg_temp_new_i64();
5538                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5539                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5540                         tcg_temp_free_i64(t64);
5541                     }
5542                     break;
5543 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5544                 case 0x14: /* sta, V9 stwa, store word alternate */
5545                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5546                     break;
5547                 case 0x15: /* stba, store byte alternate */
5548                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5549                     break;
5550                 case 0x16: /* stha, store halfword alternate */
5551                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5552                     break;
5553                 case 0x17: /* stda, store double word alternate */
5554                     if (rd & 1) {
5555                         goto illegal_insn;
5556                     }
5557                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5558                     break;
5559 #endif
5560 #ifdef TARGET_SPARC64
5561                 case 0x0e: /* V9 stx */
5562                     gen_address_mask(dc, cpu_addr);
5563                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5564                     break;
5565                 case 0x1e: /* V9 stxa */
5566                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5567                     break;
5568 #endif
5569                 default:
5570                     goto illegal_insn;
5571                 }
5572             } else if (xop > 0x23 && xop < 0x28) {
5573                 if (gen_trap_ifnofpu(dc)) {
5574                     goto jmp_insn;
5575                 }
5576                 switch (xop) {
5577                 case 0x24: /* stf, store fpreg */
5578                     gen_address_mask(dc, cpu_addr);
5579                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5580                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5581                                         dc->mem_idx, MO_TEUL);
5582                     break;
5583                 case 0x25: /* stfsr, V9 stxfsr */
5584                     {
5585 #ifdef TARGET_SPARC64
5586                         gen_address_mask(dc, cpu_addr);
5587                         if (rd == 1) {
5588                             tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5589                             break;
5590                         }
5591 #endif
5592                         tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5593                     }
5594                     break;
5595                 case 0x26:
5596 #ifdef TARGET_SPARC64
5597                     /* V9 stqf, store quad fpreg */
5598                     CHECK_FPU_FEATURE(dc, FLOAT128);
5599                     gen_address_mask(dc, cpu_addr);
5600                     /* ??? While stqf only requires 4-byte alignment, it is
5601                        legal for the cpu to signal the unaligned exception.
5602                        The OS trap handler is then required to fix it up.
5603                        For qemu, this avoids having to probe the second page
5604                        before performing the first write.  */
5605                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5606                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5607                                         dc->mem_idx, MO_TEQ | MO_ALIGN_16);
5608                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5609                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5610                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5611                                         dc->mem_idx, MO_TEQ);
5612                     break;
5613 #else /* !TARGET_SPARC64 */
5614                     /* stdfq, store floating point queue */
5615 #if defined(CONFIG_USER_ONLY)
5616                     goto illegal_insn;
5617 #else
5618                     if (!supervisor(dc))
5619                         goto priv_insn;
5620                     if (gen_trap_ifnofpu(dc)) {
5621                         goto jmp_insn;
5622                     }
5623                     goto nfq_insn;
5624 #endif
5625 #endif
5626                 case 0x27: /* stdf, store double fpreg */
5627                     gen_address_mask(dc, cpu_addr);
5628                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5629                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5630                                         MO_TEQ | MO_ALIGN_4);
5631                     break;
5632                 default:
5633                     goto illegal_insn;
5634                 }
5635             } else if (xop > 0x33 && xop < 0x3f) {
5636                 switch (xop) {
5637 #ifdef TARGET_SPARC64
5638                 case 0x34: /* V9 stfa */
5639                     if (gen_trap_ifnofpu(dc)) {
5640                         goto jmp_insn;
5641                     }
5642                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5643                     break;
5644                 case 0x36: /* V9 stqfa */
5645                     {
5646                         CHECK_FPU_FEATURE(dc, FLOAT128);
5647                         if (gen_trap_ifnofpu(dc)) {
5648                             goto jmp_insn;
5649                         }
5650                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5651                     }
5652                     break;
5653                 case 0x37: /* V9 stdfa */
5654                     if (gen_trap_ifnofpu(dc)) {
5655                         goto jmp_insn;
5656                     }
5657                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5658                     break;
5659                 case 0x3e: /* V9 casxa */
5660                     rs2 = GET_FIELD(insn, 27, 31);
5661                     cpu_src2 = gen_load_gpr(dc, rs2);
5662                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5663                     break;
5664 #else
5665                 case 0x34: /* stc */
5666                 case 0x35: /* stcsr */
5667                 case 0x36: /* stdcq */
5668                 case 0x37: /* stdc */
5669                     goto ncp_insn;
5670 #endif
5671 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5672                 case 0x3c: /* V9 or LEON3 casa */
5673 #ifndef TARGET_SPARC64
5674                     CHECK_IU_FEATURE(dc, CASA);
5675 #endif
5676                     rs2 = GET_FIELD(insn, 27, 31);
5677                     cpu_src2 = gen_load_gpr(dc, rs2);
5678                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5679                     break;
5680 #endif
5681                 default:
5682                     goto illegal_insn;
5683                 }
5684             } else {
5685                 goto illegal_insn;
5686             }
5687         }
5688         break;
5689     }
5690     /* default case for non jump instructions */
5691     if (dc->npc == DYNAMIC_PC) {
5692         dc->pc = DYNAMIC_PC;
5693         gen_op_next_insn();
5694     } else if (dc->npc == JUMP_PC) {
5695         /* we can do a static jump */
5696         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5697         dc->is_br = 1;
5698     } else {
5699         dc->pc = dc->npc;
5700         dc->npc = dc->npc + 4;
5701     }
5702  jmp_insn:
5703     goto egress;
5704  illegal_insn:
5705     gen_exception(dc, TT_ILL_INSN);
5706     goto egress;
5707  unimp_flush:
5708     gen_exception(dc, TT_UNIMP_FLUSH);
5709     goto egress;
5710 #if !defined(CONFIG_USER_ONLY)
5711  priv_insn:
5712     gen_exception(dc, TT_PRIV_INSN);
5713     goto egress;
5714 #endif
5715  nfpu_insn:
5716     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5717     goto egress;
5718 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5719  nfq_insn:
5720     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5721     goto egress;
5722 #endif
5723 #ifndef TARGET_SPARC64
5724  ncp_insn:
5725     gen_exception(dc, TT_NCP_INSN);
5726     goto egress;
5727 #endif
5728  egress:
5729     if (dc->n_t32 != 0) {
5730         int i;
5731         for (i = dc->n_t32 - 1; i >= 0; --i) {
5732             tcg_temp_free_i32(dc->t32[i]);
5733         }
5734         dc->n_t32 = 0;
5735     }
5736     if (dc->n_ttl != 0) {
5737         int i;
5738         for (i = dc->n_ttl - 1; i >= 0; --i) {
5739             tcg_temp_free(dc->ttl[i]);
5740         }
5741         dc->n_ttl = 0;
5742     }
5743 }
5744 
5745 void gen_intermediate_code(CPUSPARCState * env, TranslationBlock * tb)
5746 {
5747     SPARCCPU *cpu = sparc_env_get_cpu(env);
5748     CPUState *cs = CPU(cpu);
5749     target_ulong pc_start, last_pc;
5750     DisasContext dc1, *dc = &dc1;
5751     int num_insns;
5752     int max_insns;
5753     unsigned int insn;
5754 
5755     memset(dc, 0, sizeof(DisasContext));
5756     dc->tb = tb;
5757     pc_start = tb->pc;
5758     dc->pc = pc_start;
5759     last_pc = dc->pc;
5760     dc->npc = (target_ulong) tb->cs_base;
5761     dc->cc_op = CC_OP_DYNAMIC;
5762     dc->mem_idx = tb->flags & TB_FLAG_MMU_MASK;
5763     dc->def = env->def;
5764     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5765     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5766     dc->singlestep = (cs->singlestep_enabled || singlestep);
5767 #ifndef CONFIG_USER_ONLY
5768     dc->supervisor = (tb->flags & TB_FLAG_SUPER) != 0;
5769 #endif
5770 #ifdef TARGET_SPARC64
5771     dc->fprs_dirty = 0;
5772     dc->asi = (tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5773 #ifndef CONFIG_USER_ONLY
5774     dc->hypervisor = (tb->flags & TB_FLAG_HYPER) != 0;
5775 #endif
5776 #endif
5777 
5778     num_insns = 0;
5779     max_insns = tb->cflags & CF_COUNT_MASK;
5780     if (max_insns == 0) {
5781         max_insns = CF_COUNT_MASK;
5782     }
5783     if (max_insns > TCG_MAX_INSNS) {
5784         max_insns = TCG_MAX_INSNS;
5785     }
5786 
5787     gen_tb_start(tb);
5788     do {
5789         if (dc->npc & JUMP_PC) {
5790             assert(dc->jump_pc[1] == dc->pc + 4);
5791             tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5792         } else {
5793             tcg_gen_insn_start(dc->pc, dc->npc);
5794         }
5795         num_insns++;
5796         last_pc = dc->pc;
5797 
5798         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5799             if (dc->pc != pc_start) {
5800                 save_state(dc);
5801             }
5802             gen_helper_debug(cpu_env);
5803             tcg_gen_exit_tb(0);
5804             dc->is_br = 1;
5805             goto exit_gen_loop;
5806         }
5807 
5808         if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
5809             gen_io_start();
5810         }
5811 
5812         insn = cpu_ldl_code(env, dc->pc);
5813 
5814         disas_sparc_insn(dc, insn);
5815 
5816         if (dc->is_br)
5817             break;
5818         /* if the next PC is different, we abort now */
5819         if (dc->pc != (last_pc + 4))
5820             break;
5821         /* if we reach a page boundary, we stop generation so that the
5822            PC of a TT_TFAULT exception is always in the right page */
5823         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5824             break;
5825         /* if single step mode, we generate only one instruction and
5826            generate an exception */
5827         if (dc->singlestep) {
5828             break;
5829         }
5830     } while (!tcg_op_buf_full() &&
5831              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5832              num_insns < max_insns);
5833 
5834  exit_gen_loop:
5835     if (tb->cflags & CF_LAST_IO) {
5836         gen_io_end();
5837     }
5838     if (!dc->is_br) {
5839         if (dc->pc != DYNAMIC_PC &&
5840             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5841             /* static PC and NPC: we can use direct chaining */
5842             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5843         } else {
5844             if (dc->pc != DYNAMIC_PC) {
5845                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5846             }
5847             save_npc(dc);
5848             tcg_gen_exit_tb(0);
5849         }
5850     }
5851     gen_tb_end(tb, num_insns);
5852 
5853     tb->size = last_pc + 4 - pc_start;
5854     tb->icount = num_insns;
5855 
5856 #ifdef DEBUG_DISAS
5857     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5858         && qemu_log_in_addr_range(pc_start)) {
5859         qemu_log_lock();
5860         qemu_log("--------------\n");
5861         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5862         log_target_disas(cs, pc_start, last_pc + 4 - pc_start, 0);
5863         qemu_log("\n");
5864         qemu_log_unlock();
5865     }
5866 #endif
5867 }
5868 
5869 void gen_intermediate_code_init(CPUSPARCState *env)
5870 {
5871     static int inited;
5872     static const char gregnames[32][4] = {
5873         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5874         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5875         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5876         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5877     };
5878     static const char fregnames[32][4] = {
5879         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5880         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5881         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5882         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5883     };
5884 
5885     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5886 #ifdef TARGET_SPARC64
5887         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5888         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5889 #else
5890         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5891 #endif
5892         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5893         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5894     };
5895 
5896     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5897 #ifdef TARGET_SPARC64
5898         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5899         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5900         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5901         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5902           "hstick_cmpr" },
5903         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5904         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5905         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5906         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5907         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5908 #endif
5909         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5910         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5911         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5912         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5913         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5914         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5915         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5916         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5917 #ifndef CONFIG_USER_ONLY
5918         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5919 #endif
5920     };
5921 
5922     unsigned int i;
5923 
5924     /* init various static tables */
5925     if (inited) {
5926         return;
5927     }
5928     inited = 1;
5929 
5930     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5931     tcg_ctx.tcg_env = cpu_env;
5932 
5933     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5934                                          offsetof(CPUSPARCState, regwptr),
5935                                          "regwptr");
5936 
5937     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5938         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5939     }
5940 
5941     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5942         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5943     }
5944 
5945     TCGV_UNUSED(cpu_regs[0]);
5946     for (i = 1; i < 8; ++i) {
5947         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5948                                          offsetof(CPUSPARCState, gregs[i]),
5949                                          gregnames[i]);
5950     }
5951 
5952     for (i = 8; i < 32; ++i) {
5953         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5954                                          (i - 8) * sizeof(target_ulong),
5955                                          gregnames[i]);
5956     }
5957 
5958     for (i = 0; i < TARGET_DPREGS; i++) {
5959         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5960                                             offsetof(CPUSPARCState, fpr[i]),
5961                                             fregnames[i]);
5962     }
5963 }
5964 
5965 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5966                           target_ulong *data)
5967 {
5968     target_ulong pc = data[0];
5969     target_ulong npc = data[1];
5970 
5971     env->pc = pc;
5972     if (npc == DYNAMIC_PC) {
5973         /* dynamic NPC: already stored */
5974     } else if (npc & JUMP_PC) {
5975         /* jump PC: use 'cond' and the jump targets of the translation */
5976         if (env->cond) {
5977             env->npc = npc & ~3;
5978         } else {
5979             env->npc = pc + 4;
5980         }
5981     } else {
5982         env->npc = npc;
5983     }
5984 }
5985