xref: /openbmc/qemu/target/sparc/translate.c (revision b1311c4a)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 
30 #include "exec/helper-gen.h"
31 
32 #include "trace-tcg.h"
33 #include "exec/log.h"
34 #include "asi.h"
35 
36 
37 #define DEBUG_DISAS
38 
39 #define DYNAMIC_PC  1 /* dynamic pc value */
40 #define JUMP_PC     2 /* dynamic pc value which takes only two values
41                          according to jump_pc[T2] */
42 
43 /* global register indexes */
44 static TCGv_env cpu_env;
45 static TCGv_ptr cpu_regwptr;
46 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
47 static TCGv_i32 cpu_cc_op;
48 static TCGv_i32 cpu_psr;
49 static TCGv cpu_fsr, cpu_pc, cpu_npc;
50 static TCGv cpu_regs[32];
51 static TCGv cpu_y;
52 #ifndef CONFIG_USER_ONLY
53 static TCGv cpu_tbr;
54 #endif
55 static TCGv cpu_cond;
56 #ifdef TARGET_SPARC64
57 static TCGv_i32 cpu_xcc, cpu_fprs;
58 static TCGv cpu_gsr;
59 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
60 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
61 #else
62 static TCGv cpu_wim;
63 #endif
64 /* Floating point registers */
65 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
66 
67 #include "exec/gen-icount.h"
68 
69 typedef struct DisasContext {
70     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
71     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
72     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
73     int is_br;
74     int mem_idx;
75     bool fpu_enabled;
76     bool address_mask_32bit;
77     bool singlestep;
78 #ifndef CONFIG_USER_ONLY
79     bool supervisor;
80 #ifdef TARGET_SPARC64
81     bool hypervisor;
82 #endif
83 #endif
84 
85     uint32_t cc_op;  /* current CC operation */
86     struct TranslationBlock *tb;
87     sparc_def_t *def;
88     TCGv_i32 t32[3];
89     TCGv ttl[5];
90     int n_t32;
91     int n_ttl;
92 #ifdef TARGET_SPARC64
93     int fprs_dirty;
94     int asi;
95 #endif
96 } DisasContext;
97 
98 typedef struct {
99     TCGCond cond;
100     bool is_bool;
101     bool g1, g2;
102     TCGv c1, c2;
103 } DisasCompare;
104 
105 // This function uses non-native bit order
106 #define GET_FIELD(X, FROM, TO)                                  \
107     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
108 
109 // This function uses the order in the manuals, i.e. bit 0 is 2^0
110 #define GET_FIELD_SP(X, FROM, TO)               \
111     GET_FIELD(X, 31 - (TO), 31 - (FROM))
112 
113 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
114 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
115 
116 #ifdef TARGET_SPARC64
117 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
118 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
119 #else
120 #define DFPREG(r) (r & 0x1e)
121 #define QFPREG(r) (r & 0x1c)
122 #endif
123 
124 #define UA2005_HTRAP_MASK 0xff
125 #define V8_TRAP_MASK 0x7f
126 
127 static int sign_extend(int x, int len)
128 {
129     len = 32 - len;
130     return (x << len) >> len;
131 }
132 
133 #define IS_IMM (insn & (1<<13))
134 
135 static inline TCGv_i32 get_temp_i32(DisasContext *dc)
136 {
137     TCGv_i32 t;
138     assert(dc->n_t32 < ARRAY_SIZE(dc->t32));
139     dc->t32[dc->n_t32++] = t = tcg_temp_new_i32();
140     return t;
141 }
142 
143 static inline TCGv get_temp_tl(DisasContext *dc)
144 {
145     TCGv t;
146     assert(dc->n_ttl < ARRAY_SIZE(dc->ttl));
147     dc->ttl[dc->n_ttl++] = t = tcg_temp_new();
148     return t;
149 }
150 
151 static inline void gen_update_fprs_dirty(DisasContext *dc, int rd)
152 {
153 #if defined(TARGET_SPARC64)
154     int bit = (rd < 32) ? 1 : 2;
155     /* If we know we've already set this bit within the TB,
156        we can avoid setting it again.  */
157     if (!(dc->fprs_dirty & bit)) {
158         dc->fprs_dirty |= bit;
159         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
160     }
161 #endif
162 }
163 
164 /* floating point registers moves */
165 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
166 {
167 #if TCG_TARGET_REG_BITS == 32
168     if (src & 1) {
169         return TCGV_LOW(cpu_fpr[src / 2]);
170     } else {
171         return TCGV_HIGH(cpu_fpr[src / 2]);
172     }
173 #else
174     TCGv_i32 ret = get_temp_i32(dc);
175     if (src & 1) {
176         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
177     } else {
178         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
179     }
180     return ret;
181 #endif
182 }
183 
184 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
185 {
186 #if TCG_TARGET_REG_BITS == 32
187     if (dst & 1) {
188         tcg_gen_mov_i32(TCGV_LOW(cpu_fpr[dst / 2]), v);
189     } else {
190         tcg_gen_mov_i32(TCGV_HIGH(cpu_fpr[dst / 2]), v);
191     }
192 #else
193     TCGv_i64 t = (TCGv_i64)v;
194     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
195                         (dst & 1 ? 0 : 32), 32);
196 #endif
197     gen_update_fprs_dirty(dc, dst);
198 }
199 
200 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
201 {
202     return get_temp_i32(dc);
203 }
204 
205 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
206 {
207     src = DFPREG(src);
208     return cpu_fpr[src / 2];
209 }
210 
211 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
212 {
213     dst = DFPREG(dst);
214     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
215     gen_update_fprs_dirty(dc, dst);
216 }
217 
218 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
219 {
220     return cpu_fpr[DFPREG(dst) / 2];
221 }
222 
223 static void gen_op_load_fpr_QT0(unsigned int src)
224 {
225     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
226                    offsetof(CPU_QuadU, ll.upper));
227     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
228                    offsetof(CPU_QuadU, ll.lower));
229 }
230 
231 static void gen_op_load_fpr_QT1(unsigned int src)
232 {
233     tcg_gen_st_i64(cpu_fpr[src / 2], cpu_env, offsetof(CPUSPARCState, qt1) +
234                    offsetof(CPU_QuadU, ll.upper));
235     tcg_gen_st_i64(cpu_fpr[src/2 + 1], cpu_env, offsetof(CPUSPARCState, qt1) +
236                    offsetof(CPU_QuadU, ll.lower));
237 }
238 
239 static void gen_op_store_QT0_fpr(unsigned int dst)
240 {
241     tcg_gen_ld_i64(cpu_fpr[dst / 2], cpu_env, offsetof(CPUSPARCState, qt0) +
242                    offsetof(CPU_QuadU, ll.upper));
243     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], cpu_env, offsetof(CPUSPARCState, qt0) +
244                    offsetof(CPU_QuadU, ll.lower));
245 }
246 
247 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
248                             TCGv_i64 v1, TCGv_i64 v2)
249 {
250     dst = QFPREG(dst);
251 
252     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
253     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
254     gen_update_fprs_dirty(dc, dst);
255 }
256 
257 #ifdef TARGET_SPARC64
258 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
259 {
260     src = QFPREG(src);
261     return cpu_fpr[src / 2];
262 }
263 
264 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
265 {
266     src = QFPREG(src);
267     return cpu_fpr[src / 2 + 1];
268 }
269 
270 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
271 {
272     rd = QFPREG(rd);
273     rs = QFPREG(rs);
274 
275     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
276     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
277     gen_update_fprs_dirty(dc, rd);
278 }
279 #endif
280 
281 /* moves */
282 #ifdef CONFIG_USER_ONLY
283 #define supervisor(dc) 0
284 #ifdef TARGET_SPARC64
285 #define hypervisor(dc) 0
286 #endif
287 #else
288 #ifdef TARGET_SPARC64
289 #define hypervisor(dc) (dc->hypervisor)
290 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
291 #else
292 #define supervisor(dc) (dc->supervisor)
293 #endif
294 #endif
295 
296 #ifdef TARGET_SPARC64
297 #ifndef TARGET_ABI32
298 #define AM_CHECK(dc) ((dc)->address_mask_32bit)
299 #else
300 #define AM_CHECK(dc) (1)
301 #endif
302 #endif
303 
304 static inline void gen_address_mask(DisasContext *dc, TCGv addr)
305 {
306 #ifdef TARGET_SPARC64
307     if (AM_CHECK(dc))
308         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
309 #endif
310 }
311 
312 static inline TCGv gen_load_gpr(DisasContext *dc, int reg)
313 {
314     if (reg > 0) {
315         assert(reg < 32);
316         return cpu_regs[reg];
317     } else {
318         TCGv t = get_temp_tl(dc);
319         tcg_gen_movi_tl(t, 0);
320         return t;
321     }
322 }
323 
324 static inline void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
325 {
326     if (reg > 0) {
327         assert(reg < 32);
328         tcg_gen_mov_tl(cpu_regs[reg], v);
329     }
330 }
331 
332 static inline TCGv gen_dest_gpr(DisasContext *dc, int reg)
333 {
334     if (reg > 0) {
335         assert(reg < 32);
336         return cpu_regs[reg];
337     } else {
338         return get_temp_tl(dc);
339     }
340 }
341 
342 static inline bool use_goto_tb(DisasContext *s, target_ulong pc,
343                                target_ulong npc)
344 {
345     if (unlikely(s->singlestep)) {
346         return false;
347     }
348 
349 #ifndef CONFIG_USER_ONLY
350     return (pc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK) &&
351            (npc & TARGET_PAGE_MASK) == (s->tb->pc & TARGET_PAGE_MASK);
352 #else
353     return true;
354 #endif
355 }
356 
357 static inline void gen_goto_tb(DisasContext *s, int tb_num,
358                                target_ulong pc, target_ulong npc)
359 {
360     if (use_goto_tb(s, pc, npc))  {
361         /* jump to same page: we can use a direct jump */
362         tcg_gen_goto_tb(tb_num);
363         tcg_gen_movi_tl(cpu_pc, pc);
364         tcg_gen_movi_tl(cpu_npc, npc);
365         tcg_gen_exit_tb((uintptr_t)s->tb + tb_num);
366     } else {
367         /* jump to another page: currently not optimized */
368         tcg_gen_movi_tl(cpu_pc, pc);
369         tcg_gen_movi_tl(cpu_npc, npc);
370         tcg_gen_exit_tb(0);
371     }
372 }
373 
374 // XXX suboptimal
375 static inline void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
376 {
377     tcg_gen_extu_i32_tl(reg, src);
378     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
379 }
380 
381 static inline void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
382 {
383     tcg_gen_extu_i32_tl(reg, src);
384     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
385 }
386 
387 static inline void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
388 {
389     tcg_gen_extu_i32_tl(reg, src);
390     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
391 }
392 
393 static inline void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
394 {
395     tcg_gen_extu_i32_tl(reg, src);
396     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
397 }
398 
399 static inline void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
400 {
401     tcg_gen_mov_tl(cpu_cc_src, src1);
402     tcg_gen_mov_tl(cpu_cc_src2, src2);
403     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
404     tcg_gen_mov_tl(dst, cpu_cc_dst);
405 }
406 
407 static TCGv_i32 gen_add32_carry32(void)
408 {
409     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
410 
411     /* Carry is computed from a previous add: (dst < src)  */
412 #if TARGET_LONG_BITS == 64
413     cc_src1_32 = tcg_temp_new_i32();
414     cc_src2_32 = tcg_temp_new_i32();
415     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
416     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
417 #else
418     cc_src1_32 = cpu_cc_dst;
419     cc_src2_32 = cpu_cc_src;
420 #endif
421 
422     carry_32 = tcg_temp_new_i32();
423     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
424 
425 #if TARGET_LONG_BITS == 64
426     tcg_temp_free_i32(cc_src1_32);
427     tcg_temp_free_i32(cc_src2_32);
428 #endif
429 
430     return carry_32;
431 }
432 
433 static TCGv_i32 gen_sub32_carry32(void)
434 {
435     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
436 
437     /* Carry is computed from a previous borrow: (src1 < src2)  */
438 #if TARGET_LONG_BITS == 64
439     cc_src1_32 = tcg_temp_new_i32();
440     cc_src2_32 = tcg_temp_new_i32();
441     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
442     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
443 #else
444     cc_src1_32 = cpu_cc_src;
445     cc_src2_32 = cpu_cc_src2;
446 #endif
447 
448     carry_32 = tcg_temp_new_i32();
449     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
450 
451 #if TARGET_LONG_BITS == 64
452     tcg_temp_free_i32(cc_src1_32);
453     tcg_temp_free_i32(cc_src2_32);
454 #endif
455 
456     return carry_32;
457 }
458 
459 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
460                             TCGv src2, int update_cc)
461 {
462     TCGv_i32 carry_32;
463     TCGv carry;
464 
465     switch (dc->cc_op) {
466     case CC_OP_DIV:
467     case CC_OP_LOGIC:
468         /* Carry is known to be zero.  Fall back to plain ADD.  */
469         if (update_cc) {
470             gen_op_add_cc(dst, src1, src2);
471         } else {
472             tcg_gen_add_tl(dst, src1, src2);
473         }
474         return;
475 
476     case CC_OP_ADD:
477     case CC_OP_TADD:
478     case CC_OP_TADDTV:
479         if (TARGET_LONG_BITS == 32) {
480             /* We can re-use the host's hardware carry generation by using
481                an ADD2 opcode.  We discard the low part of the output.
482                Ideally we'd combine this operation with the add that
483                generated the carry in the first place.  */
484             carry = tcg_temp_new();
485             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
486             tcg_temp_free(carry);
487             goto add_done;
488         }
489         carry_32 = gen_add32_carry32();
490         break;
491 
492     case CC_OP_SUB:
493     case CC_OP_TSUB:
494     case CC_OP_TSUBTV:
495         carry_32 = gen_sub32_carry32();
496         break;
497 
498     default:
499         /* We need external help to produce the carry.  */
500         carry_32 = tcg_temp_new_i32();
501         gen_helper_compute_C_icc(carry_32, cpu_env);
502         break;
503     }
504 
505 #if TARGET_LONG_BITS == 64
506     carry = tcg_temp_new();
507     tcg_gen_extu_i32_i64(carry, carry_32);
508 #else
509     carry = carry_32;
510 #endif
511 
512     tcg_gen_add_tl(dst, src1, src2);
513     tcg_gen_add_tl(dst, dst, carry);
514 
515     tcg_temp_free_i32(carry_32);
516 #if TARGET_LONG_BITS == 64
517     tcg_temp_free(carry);
518 #endif
519 
520  add_done:
521     if (update_cc) {
522         tcg_gen_mov_tl(cpu_cc_src, src1);
523         tcg_gen_mov_tl(cpu_cc_src2, src2);
524         tcg_gen_mov_tl(cpu_cc_dst, dst);
525         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
526         dc->cc_op = CC_OP_ADDX;
527     }
528 }
529 
530 static inline void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
531 {
532     tcg_gen_mov_tl(cpu_cc_src, src1);
533     tcg_gen_mov_tl(cpu_cc_src2, src2);
534     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
535     tcg_gen_mov_tl(dst, cpu_cc_dst);
536 }
537 
538 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
539                             TCGv src2, int update_cc)
540 {
541     TCGv_i32 carry_32;
542     TCGv carry;
543 
544     switch (dc->cc_op) {
545     case CC_OP_DIV:
546     case CC_OP_LOGIC:
547         /* Carry is known to be zero.  Fall back to plain SUB.  */
548         if (update_cc) {
549             gen_op_sub_cc(dst, src1, src2);
550         } else {
551             tcg_gen_sub_tl(dst, src1, src2);
552         }
553         return;
554 
555     case CC_OP_ADD:
556     case CC_OP_TADD:
557     case CC_OP_TADDTV:
558         carry_32 = gen_add32_carry32();
559         break;
560 
561     case CC_OP_SUB:
562     case CC_OP_TSUB:
563     case CC_OP_TSUBTV:
564         if (TARGET_LONG_BITS == 32) {
565             /* We can re-use the host's hardware carry generation by using
566                a SUB2 opcode.  We discard the low part of the output.
567                Ideally we'd combine this operation with the add that
568                generated the carry in the first place.  */
569             carry = tcg_temp_new();
570             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
571             tcg_temp_free(carry);
572             goto sub_done;
573         }
574         carry_32 = gen_sub32_carry32();
575         break;
576 
577     default:
578         /* We need external help to produce the carry.  */
579         carry_32 = tcg_temp_new_i32();
580         gen_helper_compute_C_icc(carry_32, cpu_env);
581         break;
582     }
583 
584 #if TARGET_LONG_BITS == 64
585     carry = tcg_temp_new();
586     tcg_gen_extu_i32_i64(carry, carry_32);
587 #else
588     carry = carry_32;
589 #endif
590 
591     tcg_gen_sub_tl(dst, src1, src2);
592     tcg_gen_sub_tl(dst, dst, carry);
593 
594     tcg_temp_free_i32(carry_32);
595 #if TARGET_LONG_BITS == 64
596     tcg_temp_free(carry);
597 #endif
598 
599  sub_done:
600     if (update_cc) {
601         tcg_gen_mov_tl(cpu_cc_src, src1);
602         tcg_gen_mov_tl(cpu_cc_src2, src2);
603         tcg_gen_mov_tl(cpu_cc_dst, dst);
604         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
605         dc->cc_op = CC_OP_SUBX;
606     }
607 }
608 
609 static inline void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
610 {
611     TCGv r_temp, zero, t0;
612 
613     r_temp = tcg_temp_new();
614     t0 = tcg_temp_new();
615 
616     /* old op:
617     if (!(env->y & 1))
618         T1 = 0;
619     */
620     zero = tcg_const_tl(0);
621     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
622     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
623     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
624     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
625                        zero, cpu_cc_src2);
626     tcg_temp_free(zero);
627 
628     // b2 = T0 & 1;
629     // env->y = (b2 << 31) | (env->y >> 1);
630     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
631     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
632 
633     // b1 = N ^ V;
634     gen_mov_reg_N(t0, cpu_psr);
635     gen_mov_reg_V(r_temp, cpu_psr);
636     tcg_gen_xor_tl(t0, t0, r_temp);
637     tcg_temp_free(r_temp);
638 
639     // T0 = (b1 << 31) | (T0 >> 1);
640     // src1 = T0;
641     tcg_gen_shli_tl(t0, t0, 31);
642     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
643     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
644     tcg_temp_free(t0);
645 
646     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
647 
648     tcg_gen_mov_tl(dst, cpu_cc_dst);
649 }
650 
651 static inline void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
652 {
653 #if TARGET_LONG_BITS == 32
654     if (sign_ext) {
655         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
656     } else {
657         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
658     }
659 #else
660     TCGv t0 = tcg_temp_new_i64();
661     TCGv t1 = tcg_temp_new_i64();
662 
663     if (sign_ext) {
664         tcg_gen_ext32s_i64(t0, src1);
665         tcg_gen_ext32s_i64(t1, src2);
666     } else {
667         tcg_gen_ext32u_i64(t0, src1);
668         tcg_gen_ext32u_i64(t1, src2);
669     }
670 
671     tcg_gen_mul_i64(dst, t0, t1);
672     tcg_temp_free(t0);
673     tcg_temp_free(t1);
674 
675     tcg_gen_shri_i64(cpu_y, dst, 32);
676 #endif
677 }
678 
679 static inline void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
680 {
681     /* zero-extend truncated operands before multiplication */
682     gen_op_multiply(dst, src1, src2, 0);
683 }
684 
685 static inline void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
686 {
687     /* sign-extend truncated operands before multiplication */
688     gen_op_multiply(dst, src1, src2, 1);
689 }
690 
691 // 1
692 static inline void gen_op_eval_ba(TCGv dst)
693 {
694     tcg_gen_movi_tl(dst, 1);
695 }
696 
697 // Z
698 static inline void gen_op_eval_be(TCGv dst, TCGv_i32 src)
699 {
700     gen_mov_reg_Z(dst, src);
701 }
702 
703 // Z | (N ^ V)
704 static inline void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
705 {
706     TCGv t0 = tcg_temp_new();
707     gen_mov_reg_N(t0, src);
708     gen_mov_reg_V(dst, src);
709     tcg_gen_xor_tl(dst, dst, t0);
710     gen_mov_reg_Z(t0, src);
711     tcg_gen_or_tl(dst, dst, t0);
712     tcg_temp_free(t0);
713 }
714 
715 // N ^ V
716 static inline void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
717 {
718     TCGv t0 = tcg_temp_new();
719     gen_mov_reg_V(t0, src);
720     gen_mov_reg_N(dst, src);
721     tcg_gen_xor_tl(dst, dst, t0);
722     tcg_temp_free(t0);
723 }
724 
725 // C | Z
726 static inline void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
727 {
728     TCGv t0 = tcg_temp_new();
729     gen_mov_reg_Z(t0, src);
730     gen_mov_reg_C(dst, src);
731     tcg_gen_or_tl(dst, dst, t0);
732     tcg_temp_free(t0);
733 }
734 
735 // C
736 static inline void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
737 {
738     gen_mov_reg_C(dst, src);
739 }
740 
741 // V
742 static inline void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
743 {
744     gen_mov_reg_V(dst, src);
745 }
746 
747 // 0
748 static inline void gen_op_eval_bn(TCGv dst)
749 {
750     tcg_gen_movi_tl(dst, 0);
751 }
752 
753 // N
754 static inline void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
755 {
756     gen_mov_reg_N(dst, src);
757 }
758 
759 // !Z
760 static inline void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
761 {
762     gen_mov_reg_Z(dst, src);
763     tcg_gen_xori_tl(dst, dst, 0x1);
764 }
765 
766 // !(Z | (N ^ V))
767 static inline void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
768 {
769     gen_op_eval_ble(dst, src);
770     tcg_gen_xori_tl(dst, dst, 0x1);
771 }
772 
773 // !(N ^ V)
774 static inline void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
775 {
776     gen_op_eval_bl(dst, src);
777     tcg_gen_xori_tl(dst, dst, 0x1);
778 }
779 
780 // !(C | Z)
781 static inline void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
782 {
783     gen_op_eval_bleu(dst, src);
784     tcg_gen_xori_tl(dst, dst, 0x1);
785 }
786 
787 // !C
788 static inline void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
789 {
790     gen_mov_reg_C(dst, src);
791     tcg_gen_xori_tl(dst, dst, 0x1);
792 }
793 
794 // !N
795 static inline void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
796 {
797     gen_mov_reg_N(dst, src);
798     tcg_gen_xori_tl(dst, dst, 0x1);
799 }
800 
801 // !V
802 static inline void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
803 {
804     gen_mov_reg_V(dst, src);
805     tcg_gen_xori_tl(dst, dst, 0x1);
806 }
807 
808 /*
809   FPSR bit field FCC1 | FCC0:
810    0 =
811    1 <
812    2 >
813    3 unordered
814 */
815 static inline void gen_mov_reg_FCC0(TCGv reg, TCGv src,
816                                     unsigned int fcc_offset)
817 {
818     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
819     tcg_gen_andi_tl(reg, reg, 0x1);
820 }
821 
822 static inline void gen_mov_reg_FCC1(TCGv reg, TCGv src,
823                                     unsigned int fcc_offset)
824 {
825     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
826     tcg_gen_andi_tl(reg, reg, 0x1);
827 }
828 
829 // !0: FCC0 | FCC1
830 static inline void gen_op_eval_fbne(TCGv dst, TCGv src,
831                                     unsigned int fcc_offset)
832 {
833     TCGv t0 = tcg_temp_new();
834     gen_mov_reg_FCC0(dst, src, fcc_offset);
835     gen_mov_reg_FCC1(t0, src, fcc_offset);
836     tcg_gen_or_tl(dst, dst, t0);
837     tcg_temp_free(t0);
838 }
839 
840 // 1 or 2: FCC0 ^ FCC1
841 static inline void gen_op_eval_fblg(TCGv dst, TCGv src,
842                                     unsigned int fcc_offset)
843 {
844     TCGv t0 = tcg_temp_new();
845     gen_mov_reg_FCC0(dst, src, fcc_offset);
846     gen_mov_reg_FCC1(t0, src, fcc_offset);
847     tcg_gen_xor_tl(dst, dst, t0);
848     tcg_temp_free(t0);
849 }
850 
851 // 1 or 3: FCC0
852 static inline void gen_op_eval_fbul(TCGv dst, TCGv src,
853                                     unsigned int fcc_offset)
854 {
855     gen_mov_reg_FCC0(dst, src, fcc_offset);
856 }
857 
858 // 1: FCC0 & !FCC1
859 static inline void gen_op_eval_fbl(TCGv dst, TCGv src,
860                                     unsigned int fcc_offset)
861 {
862     TCGv t0 = tcg_temp_new();
863     gen_mov_reg_FCC0(dst, src, fcc_offset);
864     gen_mov_reg_FCC1(t0, src, fcc_offset);
865     tcg_gen_andc_tl(dst, dst, t0);
866     tcg_temp_free(t0);
867 }
868 
869 // 2 or 3: FCC1
870 static inline void gen_op_eval_fbug(TCGv dst, TCGv src,
871                                     unsigned int fcc_offset)
872 {
873     gen_mov_reg_FCC1(dst, src, fcc_offset);
874 }
875 
876 // 2: !FCC0 & FCC1
877 static inline void gen_op_eval_fbg(TCGv dst, TCGv src,
878                                     unsigned int fcc_offset)
879 {
880     TCGv t0 = tcg_temp_new();
881     gen_mov_reg_FCC0(dst, src, fcc_offset);
882     gen_mov_reg_FCC1(t0, src, fcc_offset);
883     tcg_gen_andc_tl(dst, t0, dst);
884     tcg_temp_free(t0);
885 }
886 
887 // 3: FCC0 & FCC1
888 static inline void gen_op_eval_fbu(TCGv dst, TCGv src,
889                                     unsigned int fcc_offset)
890 {
891     TCGv t0 = tcg_temp_new();
892     gen_mov_reg_FCC0(dst, src, fcc_offset);
893     gen_mov_reg_FCC1(t0, src, fcc_offset);
894     tcg_gen_and_tl(dst, dst, t0);
895     tcg_temp_free(t0);
896 }
897 
898 // 0: !(FCC0 | FCC1)
899 static inline void gen_op_eval_fbe(TCGv dst, TCGv src,
900                                     unsigned int fcc_offset)
901 {
902     TCGv t0 = tcg_temp_new();
903     gen_mov_reg_FCC0(dst, src, fcc_offset);
904     gen_mov_reg_FCC1(t0, src, fcc_offset);
905     tcg_gen_or_tl(dst, dst, t0);
906     tcg_gen_xori_tl(dst, dst, 0x1);
907     tcg_temp_free(t0);
908 }
909 
910 // 0 or 3: !(FCC0 ^ FCC1)
911 static inline void gen_op_eval_fbue(TCGv dst, TCGv src,
912                                     unsigned int fcc_offset)
913 {
914     TCGv t0 = tcg_temp_new();
915     gen_mov_reg_FCC0(dst, src, fcc_offset);
916     gen_mov_reg_FCC1(t0, src, fcc_offset);
917     tcg_gen_xor_tl(dst, dst, t0);
918     tcg_gen_xori_tl(dst, dst, 0x1);
919     tcg_temp_free(t0);
920 }
921 
922 // 0 or 2: !FCC0
923 static inline void gen_op_eval_fbge(TCGv dst, TCGv src,
924                                     unsigned int fcc_offset)
925 {
926     gen_mov_reg_FCC0(dst, src, fcc_offset);
927     tcg_gen_xori_tl(dst, dst, 0x1);
928 }
929 
930 // !1: !(FCC0 & !FCC1)
931 static inline void gen_op_eval_fbuge(TCGv dst, TCGv src,
932                                     unsigned int fcc_offset)
933 {
934     TCGv t0 = tcg_temp_new();
935     gen_mov_reg_FCC0(dst, src, fcc_offset);
936     gen_mov_reg_FCC1(t0, src, fcc_offset);
937     tcg_gen_andc_tl(dst, dst, t0);
938     tcg_gen_xori_tl(dst, dst, 0x1);
939     tcg_temp_free(t0);
940 }
941 
942 // 0 or 1: !FCC1
943 static inline void gen_op_eval_fble(TCGv dst, TCGv src,
944                                     unsigned int fcc_offset)
945 {
946     gen_mov_reg_FCC1(dst, src, fcc_offset);
947     tcg_gen_xori_tl(dst, dst, 0x1);
948 }
949 
950 // !2: !(!FCC0 & FCC1)
951 static inline void gen_op_eval_fbule(TCGv dst, TCGv src,
952                                     unsigned int fcc_offset)
953 {
954     TCGv t0 = tcg_temp_new();
955     gen_mov_reg_FCC0(dst, src, fcc_offset);
956     gen_mov_reg_FCC1(t0, src, fcc_offset);
957     tcg_gen_andc_tl(dst, t0, dst);
958     tcg_gen_xori_tl(dst, dst, 0x1);
959     tcg_temp_free(t0);
960 }
961 
962 // !3: !(FCC0 & FCC1)
963 static inline void gen_op_eval_fbo(TCGv dst, TCGv src,
964                                     unsigned int fcc_offset)
965 {
966     TCGv t0 = tcg_temp_new();
967     gen_mov_reg_FCC0(dst, src, fcc_offset);
968     gen_mov_reg_FCC1(t0, src, fcc_offset);
969     tcg_gen_and_tl(dst, dst, t0);
970     tcg_gen_xori_tl(dst, dst, 0x1);
971     tcg_temp_free(t0);
972 }
973 
974 static inline void gen_branch2(DisasContext *dc, target_ulong pc1,
975                                target_ulong pc2, TCGv r_cond)
976 {
977     TCGLabel *l1 = gen_new_label();
978 
979     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
980 
981     gen_goto_tb(dc, 0, pc1, pc1 + 4);
982 
983     gen_set_label(l1);
984     gen_goto_tb(dc, 1, pc2, pc2 + 4);
985 }
986 
987 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
988 {
989     TCGLabel *l1 = gen_new_label();
990     target_ulong npc = dc->npc;
991 
992     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
993 
994     gen_goto_tb(dc, 0, npc, pc1);
995 
996     gen_set_label(l1);
997     gen_goto_tb(dc, 1, npc + 4, npc + 8);
998 
999     dc->is_br = 1;
1000 }
1001 
1002 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
1003 {
1004     target_ulong npc = dc->npc;
1005 
1006     if (likely(npc != DYNAMIC_PC)) {
1007         dc->pc = npc;
1008         dc->jump_pc[0] = pc1;
1009         dc->jump_pc[1] = npc + 4;
1010         dc->npc = JUMP_PC;
1011     } else {
1012         TCGv t, z;
1013 
1014         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1015 
1016         tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1017         t = tcg_const_tl(pc1);
1018         z = tcg_const_tl(0);
1019         tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, z, t, cpu_npc);
1020         tcg_temp_free(t);
1021         tcg_temp_free(z);
1022 
1023         dc->pc = DYNAMIC_PC;
1024     }
1025 }
1026 
1027 static inline void gen_generic_branch(DisasContext *dc)
1028 {
1029     TCGv npc0 = tcg_const_tl(dc->jump_pc[0]);
1030     TCGv npc1 = tcg_const_tl(dc->jump_pc[1]);
1031     TCGv zero = tcg_const_tl(0);
1032 
1033     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
1034 
1035     tcg_temp_free(npc0);
1036     tcg_temp_free(npc1);
1037     tcg_temp_free(zero);
1038 }
1039 
1040 /* call this function before using the condition register as it may
1041    have been set for a jump */
1042 static inline void flush_cond(DisasContext *dc)
1043 {
1044     if (dc->npc == JUMP_PC) {
1045         gen_generic_branch(dc);
1046         dc->npc = DYNAMIC_PC;
1047     }
1048 }
1049 
1050 static inline void save_npc(DisasContext *dc)
1051 {
1052     if (dc->npc == JUMP_PC) {
1053         gen_generic_branch(dc);
1054         dc->npc = DYNAMIC_PC;
1055     } else if (dc->npc != DYNAMIC_PC) {
1056         tcg_gen_movi_tl(cpu_npc, dc->npc);
1057     }
1058 }
1059 
1060 static inline void update_psr(DisasContext *dc)
1061 {
1062     if (dc->cc_op != CC_OP_FLAGS) {
1063         dc->cc_op = CC_OP_FLAGS;
1064         gen_helper_compute_psr(cpu_env);
1065     }
1066 }
1067 
1068 static inline void save_state(DisasContext *dc)
1069 {
1070     tcg_gen_movi_tl(cpu_pc, dc->pc);
1071     save_npc(dc);
1072 }
1073 
1074 static void gen_exception(DisasContext *dc, int which)
1075 {
1076     TCGv_i32 t;
1077 
1078     save_state(dc);
1079     t = tcg_const_i32(which);
1080     gen_helper_raise_exception(cpu_env, t);
1081     tcg_temp_free_i32(t);
1082     dc->is_br = 1;
1083 }
1084 
1085 static void gen_check_align(TCGv addr, int mask)
1086 {
1087     TCGv_i32 r_mask = tcg_const_i32(mask);
1088     gen_helper_check_align(cpu_env, addr, r_mask);
1089     tcg_temp_free_i32(r_mask);
1090 }
1091 
1092 static inline void gen_mov_pc_npc(DisasContext *dc)
1093 {
1094     if (dc->npc == JUMP_PC) {
1095         gen_generic_branch(dc);
1096         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1097         dc->pc = DYNAMIC_PC;
1098     } else if (dc->npc == DYNAMIC_PC) {
1099         tcg_gen_mov_tl(cpu_pc, cpu_npc);
1100         dc->pc = DYNAMIC_PC;
1101     } else {
1102         dc->pc = dc->npc;
1103     }
1104 }
1105 
1106 static inline void gen_op_next_insn(void)
1107 {
1108     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1109     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1110 }
1111 
1112 static void free_compare(DisasCompare *cmp)
1113 {
1114     if (!cmp->g1) {
1115         tcg_temp_free(cmp->c1);
1116     }
1117     if (!cmp->g2) {
1118         tcg_temp_free(cmp->c2);
1119     }
1120 }
1121 
1122 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1123                         DisasContext *dc)
1124 {
1125     static int subcc_cond[16] = {
1126         TCG_COND_NEVER,
1127         TCG_COND_EQ,
1128         TCG_COND_LE,
1129         TCG_COND_LT,
1130         TCG_COND_LEU,
1131         TCG_COND_LTU,
1132         -1, /* neg */
1133         -1, /* overflow */
1134         TCG_COND_ALWAYS,
1135         TCG_COND_NE,
1136         TCG_COND_GT,
1137         TCG_COND_GE,
1138         TCG_COND_GTU,
1139         TCG_COND_GEU,
1140         -1, /* pos */
1141         -1, /* no overflow */
1142     };
1143 
1144     static int logic_cond[16] = {
1145         TCG_COND_NEVER,
1146         TCG_COND_EQ,     /* eq:  Z */
1147         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1148         TCG_COND_LT,     /* lt:  N ^ V -> N */
1149         TCG_COND_EQ,     /* leu: C | Z -> Z */
1150         TCG_COND_NEVER,  /* ltu: C -> 0 */
1151         TCG_COND_LT,     /* neg: N */
1152         TCG_COND_NEVER,  /* vs:  V -> 0 */
1153         TCG_COND_ALWAYS,
1154         TCG_COND_NE,     /* ne:  !Z */
1155         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1156         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1157         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1158         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1159         TCG_COND_GE,     /* pos: !N */
1160         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1161     };
1162 
1163     TCGv_i32 r_src;
1164     TCGv r_dst;
1165 
1166 #ifdef TARGET_SPARC64
1167     if (xcc) {
1168         r_src = cpu_xcc;
1169     } else {
1170         r_src = cpu_psr;
1171     }
1172 #else
1173     r_src = cpu_psr;
1174 #endif
1175 
1176     switch (dc->cc_op) {
1177     case CC_OP_LOGIC:
1178         cmp->cond = logic_cond[cond];
1179     do_compare_dst_0:
1180         cmp->is_bool = false;
1181         cmp->g2 = false;
1182         cmp->c2 = tcg_const_tl(0);
1183 #ifdef TARGET_SPARC64
1184         if (!xcc) {
1185             cmp->g1 = false;
1186             cmp->c1 = tcg_temp_new();
1187             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1188             break;
1189         }
1190 #endif
1191         cmp->g1 = true;
1192         cmp->c1 = cpu_cc_dst;
1193         break;
1194 
1195     case CC_OP_SUB:
1196         switch (cond) {
1197         case 6:  /* neg */
1198         case 14: /* pos */
1199             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1200             goto do_compare_dst_0;
1201 
1202         case 7: /* overflow */
1203         case 15: /* !overflow */
1204             goto do_dynamic;
1205 
1206         default:
1207             cmp->cond = subcc_cond[cond];
1208             cmp->is_bool = false;
1209 #ifdef TARGET_SPARC64
1210             if (!xcc) {
1211                 /* Note that sign-extension works for unsigned compares as
1212                    long as both operands are sign-extended.  */
1213                 cmp->g1 = cmp->g2 = false;
1214                 cmp->c1 = tcg_temp_new();
1215                 cmp->c2 = tcg_temp_new();
1216                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1217                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1218                 break;
1219             }
1220 #endif
1221             cmp->g1 = cmp->g2 = true;
1222             cmp->c1 = cpu_cc_src;
1223             cmp->c2 = cpu_cc_src2;
1224             break;
1225         }
1226         break;
1227 
1228     default:
1229     do_dynamic:
1230         gen_helper_compute_psr(cpu_env);
1231         dc->cc_op = CC_OP_FLAGS;
1232         /* FALLTHRU */
1233 
1234     case CC_OP_FLAGS:
1235         /* We're going to generate a boolean result.  */
1236         cmp->cond = TCG_COND_NE;
1237         cmp->is_bool = true;
1238         cmp->g1 = cmp->g2 = false;
1239         cmp->c1 = r_dst = tcg_temp_new();
1240         cmp->c2 = tcg_const_tl(0);
1241 
1242         switch (cond) {
1243         case 0x0:
1244             gen_op_eval_bn(r_dst);
1245             break;
1246         case 0x1:
1247             gen_op_eval_be(r_dst, r_src);
1248             break;
1249         case 0x2:
1250             gen_op_eval_ble(r_dst, r_src);
1251             break;
1252         case 0x3:
1253             gen_op_eval_bl(r_dst, r_src);
1254             break;
1255         case 0x4:
1256             gen_op_eval_bleu(r_dst, r_src);
1257             break;
1258         case 0x5:
1259             gen_op_eval_bcs(r_dst, r_src);
1260             break;
1261         case 0x6:
1262             gen_op_eval_bneg(r_dst, r_src);
1263             break;
1264         case 0x7:
1265             gen_op_eval_bvs(r_dst, r_src);
1266             break;
1267         case 0x8:
1268             gen_op_eval_ba(r_dst);
1269             break;
1270         case 0x9:
1271             gen_op_eval_bne(r_dst, r_src);
1272             break;
1273         case 0xa:
1274             gen_op_eval_bg(r_dst, r_src);
1275             break;
1276         case 0xb:
1277             gen_op_eval_bge(r_dst, r_src);
1278             break;
1279         case 0xc:
1280             gen_op_eval_bgu(r_dst, r_src);
1281             break;
1282         case 0xd:
1283             gen_op_eval_bcc(r_dst, r_src);
1284             break;
1285         case 0xe:
1286             gen_op_eval_bpos(r_dst, r_src);
1287             break;
1288         case 0xf:
1289             gen_op_eval_bvc(r_dst, r_src);
1290             break;
1291         }
1292         break;
1293     }
1294 }
1295 
1296 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1297 {
1298     unsigned int offset;
1299     TCGv r_dst;
1300 
1301     /* For now we still generate a straight boolean result.  */
1302     cmp->cond = TCG_COND_NE;
1303     cmp->is_bool = true;
1304     cmp->g1 = cmp->g2 = false;
1305     cmp->c1 = r_dst = tcg_temp_new();
1306     cmp->c2 = tcg_const_tl(0);
1307 
1308     switch (cc) {
1309     default:
1310     case 0x0:
1311         offset = 0;
1312         break;
1313     case 0x1:
1314         offset = 32 - 10;
1315         break;
1316     case 0x2:
1317         offset = 34 - 10;
1318         break;
1319     case 0x3:
1320         offset = 36 - 10;
1321         break;
1322     }
1323 
1324     switch (cond) {
1325     case 0x0:
1326         gen_op_eval_bn(r_dst);
1327         break;
1328     case 0x1:
1329         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1330         break;
1331     case 0x2:
1332         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1333         break;
1334     case 0x3:
1335         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1336         break;
1337     case 0x4:
1338         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1339         break;
1340     case 0x5:
1341         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1342         break;
1343     case 0x6:
1344         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1345         break;
1346     case 0x7:
1347         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1348         break;
1349     case 0x8:
1350         gen_op_eval_ba(r_dst);
1351         break;
1352     case 0x9:
1353         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1354         break;
1355     case 0xa:
1356         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1357         break;
1358     case 0xb:
1359         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1360         break;
1361     case 0xc:
1362         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1363         break;
1364     case 0xd:
1365         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1366         break;
1367     case 0xe:
1368         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1369         break;
1370     case 0xf:
1371         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1372         break;
1373     }
1374 }
1375 
1376 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1377                      DisasContext *dc)
1378 {
1379     DisasCompare cmp;
1380     gen_compare(&cmp, cc, cond, dc);
1381 
1382     /* The interface is to return a boolean in r_dst.  */
1383     if (cmp.is_bool) {
1384         tcg_gen_mov_tl(r_dst, cmp.c1);
1385     } else {
1386         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1387     }
1388 
1389     free_compare(&cmp);
1390 }
1391 
1392 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1393 {
1394     DisasCompare cmp;
1395     gen_fcompare(&cmp, cc, cond);
1396 
1397     /* The interface is to return a boolean in r_dst.  */
1398     if (cmp.is_bool) {
1399         tcg_gen_mov_tl(r_dst, cmp.c1);
1400     } else {
1401         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1402     }
1403 
1404     free_compare(&cmp);
1405 }
1406 
1407 #ifdef TARGET_SPARC64
1408 // Inverted logic
1409 static const int gen_tcg_cond_reg[8] = {
1410     -1,
1411     TCG_COND_NE,
1412     TCG_COND_GT,
1413     TCG_COND_GE,
1414     -1,
1415     TCG_COND_EQ,
1416     TCG_COND_LE,
1417     TCG_COND_LT,
1418 };
1419 
1420 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1421 {
1422     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1423     cmp->is_bool = false;
1424     cmp->g1 = true;
1425     cmp->g2 = false;
1426     cmp->c1 = r_src;
1427     cmp->c2 = tcg_const_tl(0);
1428 }
1429 
1430 static inline void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1431 {
1432     DisasCompare cmp;
1433     gen_compare_reg(&cmp, cond, r_src);
1434 
1435     /* The interface is to return a boolean in r_dst.  */
1436     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1437 
1438     free_compare(&cmp);
1439 }
1440 #endif
1441 
1442 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1443 {
1444     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1445     target_ulong target = dc->pc + offset;
1446 
1447 #ifdef TARGET_SPARC64
1448     if (unlikely(AM_CHECK(dc))) {
1449         target &= 0xffffffffULL;
1450     }
1451 #endif
1452     if (cond == 0x0) {
1453         /* unconditional not taken */
1454         if (a) {
1455             dc->pc = dc->npc + 4;
1456             dc->npc = dc->pc + 4;
1457         } else {
1458             dc->pc = dc->npc;
1459             dc->npc = dc->pc + 4;
1460         }
1461     } else if (cond == 0x8) {
1462         /* unconditional taken */
1463         if (a) {
1464             dc->pc = target;
1465             dc->npc = dc->pc + 4;
1466         } else {
1467             dc->pc = dc->npc;
1468             dc->npc = target;
1469             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1470         }
1471     } else {
1472         flush_cond(dc);
1473         gen_cond(cpu_cond, cc, cond, dc);
1474         if (a) {
1475             gen_branch_a(dc, target);
1476         } else {
1477             gen_branch_n(dc, target);
1478         }
1479     }
1480 }
1481 
1482 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1483 {
1484     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1485     target_ulong target = dc->pc + offset;
1486 
1487 #ifdef TARGET_SPARC64
1488     if (unlikely(AM_CHECK(dc))) {
1489         target &= 0xffffffffULL;
1490     }
1491 #endif
1492     if (cond == 0x0) {
1493         /* unconditional not taken */
1494         if (a) {
1495             dc->pc = dc->npc + 4;
1496             dc->npc = dc->pc + 4;
1497         } else {
1498             dc->pc = dc->npc;
1499             dc->npc = dc->pc + 4;
1500         }
1501     } else if (cond == 0x8) {
1502         /* unconditional taken */
1503         if (a) {
1504             dc->pc = target;
1505             dc->npc = dc->pc + 4;
1506         } else {
1507             dc->pc = dc->npc;
1508             dc->npc = target;
1509             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1510         }
1511     } else {
1512         flush_cond(dc);
1513         gen_fcond(cpu_cond, cc, cond);
1514         if (a) {
1515             gen_branch_a(dc, target);
1516         } else {
1517             gen_branch_n(dc, target);
1518         }
1519     }
1520 }
1521 
1522 #ifdef TARGET_SPARC64
1523 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1524                           TCGv r_reg)
1525 {
1526     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1527     target_ulong target = dc->pc + offset;
1528 
1529     if (unlikely(AM_CHECK(dc))) {
1530         target &= 0xffffffffULL;
1531     }
1532     flush_cond(dc);
1533     gen_cond_reg(cpu_cond, cond, r_reg);
1534     if (a) {
1535         gen_branch_a(dc, target);
1536     } else {
1537         gen_branch_n(dc, target);
1538     }
1539 }
1540 
1541 static inline void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1542 {
1543     switch (fccno) {
1544     case 0:
1545         gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1546         break;
1547     case 1:
1548         gen_helper_fcmps_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1549         break;
1550     case 2:
1551         gen_helper_fcmps_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1552         break;
1553     case 3:
1554         gen_helper_fcmps_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1555         break;
1556     }
1557 }
1558 
1559 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1560 {
1561     switch (fccno) {
1562     case 0:
1563         gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1564         break;
1565     case 1:
1566         gen_helper_fcmpd_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1567         break;
1568     case 2:
1569         gen_helper_fcmpd_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1570         break;
1571     case 3:
1572         gen_helper_fcmpd_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1573         break;
1574     }
1575 }
1576 
1577 static inline void gen_op_fcmpq(int fccno)
1578 {
1579     switch (fccno) {
1580     case 0:
1581         gen_helper_fcmpq(cpu_fsr, cpu_env);
1582         break;
1583     case 1:
1584         gen_helper_fcmpq_fcc1(cpu_fsr, cpu_env);
1585         break;
1586     case 2:
1587         gen_helper_fcmpq_fcc2(cpu_fsr, cpu_env);
1588         break;
1589     case 3:
1590         gen_helper_fcmpq_fcc3(cpu_fsr, cpu_env);
1591         break;
1592     }
1593 }
1594 
1595 static inline void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1596 {
1597     switch (fccno) {
1598     case 0:
1599         gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1600         break;
1601     case 1:
1602         gen_helper_fcmpes_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1603         break;
1604     case 2:
1605         gen_helper_fcmpes_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1606         break;
1607     case 3:
1608         gen_helper_fcmpes_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1609         break;
1610     }
1611 }
1612 
1613 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1614 {
1615     switch (fccno) {
1616     case 0:
1617         gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1618         break;
1619     case 1:
1620         gen_helper_fcmped_fcc1(cpu_fsr, cpu_env, r_rs1, r_rs2);
1621         break;
1622     case 2:
1623         gen_helper_fcmped_fcc2(cpu_fsr, cpu_env, r_rs1, r_rs2);
1624         break;
1625     case 3:
1626         gen_helper_fcmped_fcc3(cpu_fsr, cpu_env, r_rs1, r_rs2);
1627         break;
1628     }
1629 }
1630 
1631 static inline void gen_op_fcmpeq(int fccno)
1632 {
1633     switch (fccno) {
1634     case 0:
1635         gen_helper_fcmpeq(cpu_fsr, cpu_env);
1636         break;
1637     case 1:
1638         gen_helper_fcmpeq_fcc1(cpu_fsr, cpu_env);
1639         break;
1640     case 2:
1641         gen_helper_fcmpeq_fcc2(cpu_fsr, cpu_env);
1642         break;
1643     case 3:
1644         gen_helper_fcmpeq_fcc3(cpu_fsr, cpu_env);
1645         break;
1646     }
1647 }
1648 
1649 #else
1650 
1651 static inline void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1652 {
1653     gen_helper_fcmps(cpu_fsr, cpu_env, r_rs1, r_rs2);
1654 }
1655 
1656 static inline void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1657 {
1658     gen_helper_fcmpd(cpu_fsr, cpu_env, r_rs1, r_rs2);
1659 }
1660 
1661 static inline void gen_op_fcmpq(int fccno)
1662 {
1663     gen_helper_fcmpq(cpu_fsr, cpu_env);
1664 }
1665 
1666 static inline void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1667 {
1668     gen_helper_fcmpes(cpu_fsr, cpu_env, r_rs1, r_rs2);
1669 }
1670 
1671 static inline void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1672 {
1673     gen_helper_fcmped(cpu_fsr, cpu_env, r_rs1, r_rs2);
1674 }
1675 
1676 static inline void gen_op_fcmpeq(int fccno)
1677 {
1678     gen_helper_fcmpeq(cpu_fsr, cpu_env);
1679 }
1680 #endif
1681 
1682 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1683 {
1684     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1685     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1686     gen_exception(dc, TT_FP_EXCP);
1687 }
1688 
1689 static int gen_trap_ifnofpu(DisasContext *dc)
1690 {
1691 #if !defined(CONFIG_USER_ONLY)
1692     if (!dc->fpu_enabled) {
1693         gen_exception(dc, TT_NFPU_INSN);
1694         return 1;
1695     }
1696 #endif
1697     return 0;
1698 }
1699 
1700 static inline void gen_op_clear_ieee_excp_and_FTT(void)
1701 {
1702     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1703 }
1704 
1705 static inline void gen_fop_FF(DisasContext *dc, int rd, int rs,
1706                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1707 {
1708     TCGv_i32 dst, src;
1709 
1710     src = gen_load_fpr_F(dc, rs);
1711     dst = gen_dest_fpr_F(dc);
1712 
1713     gen(dst, cpu_env, src);
1714     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1715 
1716     gen_store_fpr_F(dc, rd, dst);
1717 }
1718 
1719 static inline void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1720                                  void (*gen)(TCGv_i32, TCGv_i32))
1721 {
1722     TCGv_i32 dst, src;
1723 
1724     src = gen_load_fpr_F(dc, rs);
1725     dst = gen_dest_fpr_F(dc);
1726 
1727     gen(dst, src);
1728 
1729     gen_store_fpr_F(dc, rd, dst);
1730 }
1731 
1732 static inline void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1733                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1734 {
1735     TCGv_i32 dst, src1, src2;
1736 
1737     src1 = gen_load_fpr_F(dc, rs1);
1738     src2 = gen_load_fpr_F(dc, rs2);
1739     dst = gen_dest_fpr_F(dc);
1740 
1741     gen(dst, cpu_env, src1, src2);
1742     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1743 
1744     gen_store_fpr_F(dc, rd, dst);
1745 }
1746 
1747 #ifdef TARGET_SPARC64
1748 static inline void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1749                                   void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1750 {
1751     TCGv_i32 dst, src1, src2;
1752 
1753     src1 = gen_load_fpr_F(dc, rs1);
1754     src2 = gen_load_fpr_F(dc, rs2);
1755     dst = gen_dest_fpr_F(dc);
1756 
1757     gen(dst, src1, src2);
1758 
1759     gen_store_fpr_F(dc, rd, dst);
1760 }
1761 #endif
1762 
1763 static inline void gen_fop_DD(DisasContext *dc, int rd, int rs,
1764                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1765 {
1766     TCGv_i64 dst, src;
1767 
1768     src = gen_load_fpr_D(dc, rs);
1769     dst = gen_dest_fpr_D(dc, rd);
1770 
1771     gen(dst, cpu_env, src);
1772     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1773 
1774     gen_store_fpr_D(dc, rd, dst);
1775 }
1776 
1777 #ifdef TARGET_SPARC64
1778 static inline void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1779                                  void (*gen)(TCGv_i64, TCGv_i64))
1780 {
1781     TCGv_i64 dst, src;
1782 
1783     src = gen_load_fpr_D(dc, rs);
1784     dst = gen_dest_fpr_D(dc, rd);
1785 
1786     gen(dst, src);
1787 
1788     gen_store_fpr_D(dc, rd, dst);
1789 }
1790 #endif
1791 
1792 static inline void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1793                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1794 {
1795     TCGv_i64 dst, src1, src2;
1796 
1797     src1 = gen_load_fpr_D(dc, rs1);
1798     src2 = gen_load_fpr_D(dc, rs2);
1799     dst = gen_dest_fpr_D(dc, rd);
1800 
1801     gen(dst, cpu_env, src1, src2);
1802     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1803 
1804     gen_store_fpr_D(dc, rd, dst);
1805 }
1806 
1807 #ifdef TARGET_SPARC64
1808 static inline void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1809                                   void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1810 {
1811     TCGv_i64 dst, src1, src2;
1812 
1813     src1 = gen_load_fpr_D(dc, rs1);
1814     src2 = gen_load_fpr_D(dc, rs2);
1815     dst = gen_dest_fpr_D(dc, rd);
1816 
1817     gen(dst, src1, src2);
1818 
1819     gen_store_fpr_D(dc, rd, dst);
1820 }
1821 
1822 static inline void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1823                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1824 {
1825     TCGv_i64 dst, src1, src2;
1826 
1827     src1 = gen_load_fpr_D(dc, rs1);
1828     src2 = gen_load_fpr_D(dc, rs2);
1829     dst = gen_dest_fpr_D(dc, rd);
1830 
1831     gen(dst, cpu_gsr, src1, src2);
1832 
1833     gen_store_fpr_D(dc, rd, dst);
1834 }
1835 
1836 static inline void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1837                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1838 {
1839     TCGv_i64 dst, src0, src1, src2;
1840 
1841     src1 = gen_load_fpr_D(dc, rs1);
1842     src2 = gen_load_fpr_D(dc, rs2);
1843     src0 = gen_load_fpr_D(dc, rd);
1844     dst = gen_dest_fpr_D(dc, rd);
1845 
1846     gen(dst, src0, src1, src2);
1847 
1848     gen_store_fpr_D(dc, rd, dst);
1849 }
1850 #endif
1851 
1852 static inline void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1853                               void (*gen)(TCGv_ptr))
1854 {
1855     gen_op_load_fpr_QT1(QFPREG(rs));
1856 
1857     gen(cpu_env);
1858     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1859 
1860     gen_op_store_QT0_fpr(QFPREG(rd));
1861     gen_update_fprs_dirty(dc, QFPREG(rd));
1862 }
1863 
1864 #ifdef TARGET_SPARC64
1865 static inline void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1866                                  void (*gen)(TCGv_ptr))
1867 {
1868     gen_op_load_fpr_QT1(QFPREG(rs));
1869 
1870     gen(cpu_env);
1871 
1872     gen_op_store_QT0_fpr(QFPREG(rd));
1873     gen_update_fprs_dirty(dc, QFPREG(rd));
1874 }
1875 #endif
1876 
1877 static inline void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1878                                void (*gen)(TCGv_ptr))
1879 {
1880     gen_op_load_fpr_QT0(QFPREG(rs1));
1881     gen_op_load_fpr_QT1(QFPREG(rs2));
1882 
1883     gen(cpu_env);
1884     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1885 
1886     gen_op_store_QT0_fpr(QFPREG(rd));
1887     gen_update_fprs_dirty(dc, QFPREG(rd));
1888 }
1889 
1890 static inline void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1891                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1892 {
1893     TCGv_i64 dst;
1894     TCGv_i32 src1, src2;
1895 
1896     src1 = gen_load_fpr_F(dc, rs1);
1897     src2 = gen_load_fpr_F(dc, rs2);
1898     dst = gen_dest_fpr_D(dc, rd);
1899 
1900     gen(dst, cpu_env, src1, src2);
1901     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1902 
1903     gen_store_fpr_D(dc, rd, dst);
1904 }
1905 
1906 static inline void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1907                                void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1908 {
1909     TCGv_i64 src1, src2;
1910 
1911     src1 = gen_load_fpr_D(dc, rs1);
1912     src2 = gen_load_fpr_D(dc, rs2);
1913 
1914     gen(cpu_env, src1, src2);
1915     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1916 
1917     gen_op_store_QT0_fpr(QFPREG(rd));
1918     gen_update_fprs_dirty(dc, QFPREG(rd));
1919 }
1920 
1921 #ifdef TARGET_SPARC64
1922 static inline void gen_fop_DF(DisasContext *dc, int rd, int rs,
1923                               void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1924 {
1925     TCGv_i64 dst;
1926     TCGv_i32 src;
1927 
1928     src = gen_load_fpr_F(dc, rs);
1929     dst = gen_dest_fpr_D(dc, rd);
1930 
1931     gen(dst, cpu_env, src);
1932     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1933 
1934     gen_store_fpr_D(dc, rd, dst);
1935 }
1936 #endif
1937 
1938 static inline void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1939                                  void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1940 {
1941     TCGv_i64 dst;
1942     TCGv_i32 src;
1943 
1944     src = gen_load_fpr_F(dc, rs);
1945     dst = gen_dest_fpr_D(dc, rd);
1946 
1947     gen(dst, cpu_env, src);
1948 
1949     gen_store_fpr_D(dc, rd, dst);
1950 }
1951 
1952 static inline void gen_fop_FD(DisasContext *dc, int rd, int rs,
1953                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1954 {
1955     TCGv_i32 dst;
1956     TCGv_i64 src;
1957 
1958     src = gen_load_fpr_D(dc, rs);
1959     dst = gen_dest_fpr_F(dc);
1960 
1961     gen(dst, cpu_env, src);
1962     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1963 
1964     gen_store_fpr_F(dc, rd, dst);
1965 }
1966 
1967 static inline void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1968                               void (*gen)(TCGv_i32, TCGv_ptr))
1969 {
1970     TCGv_i32 dst;
1971 
1972     gen_op_load_fpr_QT1(QFPREG(rs));
1973     dst = gen_dest_fpr_F(dc);
1974 
1975     gen(dst, cpu_env);
1976     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1977 
1978     gen_store_fpr_F(dc, rd, dst);
1979 }
1980 
1981 static inline void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1982                               void (*gen)(TCGv_i64, TCGv_ptr))
1983 {
1984     TCGv_i64 dst;
1985 
1986     gen_op_load_fpr_QT1(QFPREG(rs));
1987     dst = gen_dest_fpr_D(dc, rd);
1988 
1989     gen(dst, cpu_env);
1990     gen_helper_check_ieee_exceptions(cpu_fsr, cpu_env);
1991 
1992     gen_store_fpr_D(dc, rd, dst);
1993 }
1994 
1995 static inline void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1996                                  void (*gen)(TCGv_ptr, TCGv_i32))
1997 {
1998     TCGv_i32 src;
1999 
2000     src = gen_load_fpr_F(dc, rs);
2001 
2002     gen(cpu_env, src);
2003 
2004     gen_op_store_QT0_fpr(QFPREG(rd));
2005     gen_update_fprs_dirty(dc, QFPREG(rd));
2006 }
2007 
2008 static inline void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
2009                                  void (*gen)(TCGv_ptr, TCGv_i64))
2010 {
2011     TCGv_i64 src;
2012 
2013     src = gen_load_fpr_D(dc, rs);
2014 
2015     gen(cpu_env, src);
2016 
2017     gen_op_store_QT0_fpr(QFPREG(rd));
2018     gen_update_fprs_dirty(dc, QFPREG(rd));
2019 }
2020 
2021 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
2022                      TCGv addr, int mmu_idx, TCGMemOp memop)
2023 {
2024     gen_address_mask(dc, addr);
2025     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop);
2026 }
2027 
2028 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
2029 {
2030     TCGv m1 = tcg_const_tl(0xff);
2031     gen_address_mask(dc, addr);
2032     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
2033     tcg_temp_free(m1);
2034 }
2035 
2036 /* asi moves */
2037 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
2038 typedef enum {
2039     GET_ASI_HELPER,
2040     GET_ASI_EXCP,
2041     GET_ASI_DIRECT,
2042     GET_ASI_DTWINX,
2043     GET_ASI_BLOCK,
2044     GET_ASI_SHORT,
2045     GET_ASI_BCOPY,
2046     GET_ASI_BFILL,
2047 } ASIType;
2048 
2049 typedef struct {
2050     ASIType type;
2051     int asi;
2052     int mem_idx;
2053     TCGMemOp memop;
2054 } DisasASI;
2055 
2056 static DisasASI get_asi(DisasContext *dc, int insn, TCGMemOp memop)
2057 {
2058     int asi = GET_FIELD(insn, 19, 26);
2059     ASIType type = GET_ASI_HELPER;
2060     int mem_idx = dc->mem_idx;
2061 
2062 #ifndef TARGET_SPARC64
2063     /* Before v9, all asis are immediate and privileged.  */
2064     if (IS_IMM) {
2065         gen_exception(dc, TT_ILL_INSN);
2066         type = GET_ASI_EXCP;
2067     } else if (supervisor(dc)
2068                /* Note that LEON accepts ASI_USERDATA in user mode, for
2069                   use with CASA.  Also note that previous versions of
2070                   QEMU allowed (and old versions of gcc emitted) ASI_P
2071                   for LEON, which is incorrect.  */
2072                || (asi == ASI_USERDATA
2073                    && (dc->def->features & CPU_FEATURE_CASA))) {
2074         switch (asi) {
2075         case ASI_USERDATA:   /* User data access */
2076             mem_idx = MMU_USER_IDX;
2077             type = GET_ASI_DIRECT;
2078             break;
2079         case ASI_KERNELDATA: /* Supervisor data access */
2080             mem_idx = MMU_KERNEL_IDX;
2081             type = GET_ASI_DIRECT;
2082             break;
2083         case ASI_M_BYPASS:    /* MMU passthrough */
2084         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2085             mem_idx = MMU_PHYS_IDX;
2086             type = GET_ASI_DIRECT;
2087             break;
2088         case ASI_M_BCOPY: /* Block copy, sta access */
2089             mem_idx = MMU_KERNEL_IDX;
2090             type = GET_ASI_BCOPY;
2091             break;
2092         case ASI_M_BFILL: /* Block fill, stda access */
2093             mem_idx = MMU_KERNEL_IDX;
2094             type = GET_ASI_BFILL;
2095             break;
2096         }
2097     } else {
2098         gen_exception(dc, TT_PRIV_INSN);
2099         type = GET_ASI_EXCP;
2100     }
2101 #else
2102     if (IS_IMM) {
2103         asi = dc->asi;
2104     }
2105     /* With v9, all asis below 0x80 are privileged.  */
2106     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2107        down that bit into DisasContext.  For the moment that's ok,
2108        since the direct implementations below doesn't have any ASIs
2109        in the restricted [0x30, 0x7f] range, and the check will be
2110        done properly in the helper.  */
2111     if (!supervisor(dc) && asi < 0x80) {
2112         gen_exception(dc, TT_PRIV_ACT);
2113         type = GET_ASI_EXCP;
2114     } else {
2115         switch (asi) {
2116         case ASI_REAL:      /* Bypass */
2117         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2118         case ASI_REAL_L:    /* Bypass LE */
2119         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2120         case ASI_TWINX_REAL:   /* Real address, twinx */
2121         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2122         case ASI_QUAD_LDD_PHYS:
2123         case ASI_QUAD_LDD_PHYS_L:
2124             mem_idx = MMU_PHYS_IDX;
2125             break;
2126         case ASI_N:  /* Nucleus */
2127         case ASI_NL: /* Nucleus LE */
2128         case ASI_TWINX_N:
2129         case ASI_TWINX_NL:
2130         case ASI_NUCLEUS_QUAD_LDD:
2131         case ASI_NUCLEUS_QUAD_LDD_L:
2132             if (hypervisor(dc)) {
2133                 mem_idx = MMU_PHYS_IDX;
2134             } else {
2135                 mem_idx = MMU_NUCLEUS_IDX;
2136             }
2137             break;
2138         case ASI_AIUP:  /* As if user primary */
2139         case ASI_AIUPL: /* As if user primary LE */
2140         case ASI_TWINX_AIUP:
2141         case ASI_TWINX_AIUP_L:
2142         case ASI_BLK_AIUP_4V:
2143         case ASI_BLK_AIUP_L_4V:
2144         case ASI_BLK_AIUP:
2145         case ASI_BLK_AIUPL:
2146             mem_idx = MMU_USER_IDX;
2147             break;
2148         case ASI_AIUS:  /* As if user secondary */
2149         case ASI_AIUSL: /* As if user secondary LE */
2150         case ASI_TWINX_AIUS:
2151         case ASI_TWINX_AIUS_L:
2152         case ASI_BLK_AIUS_4V:
2153         case ASI_BLK_AIUS_L_4V:
2154         case ASI_BLK_AIUS:
2155         case ASI_BLK_AIUSL:
2156             mem_idx = MMU_USER_SECONDARY_IDX;
2157             break;
2158         case ASI_S:  /* Secondary */
2159         case ASI_SL: /* Secondary LE */
2160         case ASI_TWINX_S:
2161         case ASI_TWINX_SL:
2162         case ASI_BLK_COMMIT_S:
2163         case ASI_BLK_S:
2164         case ASI_BLK_SL:
2165         case ASI_FL8_S:
2166         case ASI_FL8_SL:
2167         case ASI_FL16_S:
2168         case ASI_FL16_SL:
2169             if (mem_idx == MMU_USER_IDX) {
2170                 mem_idx = MMU_USER_SECONDARY_IDX;
2171             } else if (mem_idx == MMU_KERNEL_IDX) {
2172                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2173             }
2174             break;
2175         case ASI_P:  /* Primary */
2176         case ASI_PL: /* Primary LE */
2177         case ASI_TWINX_P:
2178         case ASI_TWINX_PL:
2179         case ASI_BLK_COMMIT_P:
2180         case ASI_BLK_P:
2181         case ASI_BLK_PL:
2182         case ASI_FL8_P:
2183         case ASI_FL8_PL:
2184         case ASI_FL16_P:
2185         case ASI_FL16_PL:
2186             break;
2187         }
2188         switch (asi) {
2189         case ASI_REAL:
2190         case ASI_REAL_IO:
2191         case ASI_REAL_L:
2192         case ASI_REAL_IO_L:
2193         case ASI_N:
2194         case ASI_NL:
2195         case ASI_AIUP:
2196         case ASI_AIUPL:
2197         case ASI_AIUS:
2198         case ASI_AIUSL:
2199         case ASI_S:
2200         case ASI_SL:
2201         case ASI_P:
2202         case ASI_PL:
2203             type = GET_ASI_DIRECT;
2204             break;
2205         case ASI_TWINX_REAL:
2206         case ASI_TWINX_REAL_L:
2207         case ASI_TWINX_N:
2208         case ASI_TWINX_NL:
2209         case ASI_TWINX_AIUP:
2210         case ASI_TWINX_AIUP_L:
2211         case ASI_TWINX_AIUS:
2212         case ASI_TWINX_AIUS_L:
2213         case ASI_TWINX_P:
2214         case ASI_TWINX_PL:
2215         case ASI_TWINX_S:
2216         case ASI_TWINX_SL:
2217         case ASI_QUAD_LDD_PHYS:
2218         case ASI_QUAD_LDD_PHYS_L:
2219         case ASI_NUCLEUS_QUAD_LDD:
2220         case ASI_NUCLEUS_QUAD_LDD_L:
2221             type = GET_ASI_DTWINX;
2222             break;
2223         case ASI_BLK_COMMIT_P:
2224         case ASI_BLK_COMMIT_S:
2225         case ASI_BLK_AIUP_4V:
2226         case ASI_BLK_AIUP_L_4V:
2227         case ASI_BLK_AIUP:
2228         case ASI_BLK_AIUPL:
2229         case ASI_BLK_AIUS_4V:
2230         case ASI_BLK_AIUS_L_4V:
2231         case ASI_BLK_AIUS:
2232         case ASI_BLK_AIUSL:
2233         case ASI_BLK_S:
2234         case ASI_BLK_SL:
2235         case ASI_BLK_P:
2236         case ASI_BLK_PL:
2237             type = GET_ASI_BLOCK;
2238             break;
2239         case ASI_FL8_S:
2240         case ASI_FL8_SL:
2241         case ASI_FL8_P:
2242         case ASI_FL8_PL:
2243             memop = MO_UB;
2244             type = GET_ASI_SHORT;
2245             break;
2246         case ASI_FL16_S:
2247         case ASI_FL16_SL:
2248         case ASI_FL16_P:
2249         case ASI_FL16_PL:
2250             memop = MO_TEUW;
2251             type = GET_ASI_SHORT;
2252             break;
2253         }
2254         /* The little-endian asis all have bit 3 set.  */
2255         if (asi & 8) {
2256             memop ^= MO_BSWAP;
2257         }
2258     }
2259 #endif
2260 
2261     return (DisasASI){ type, asi, mem_idx, memop };
2262 }
2263 
2264 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2265                        int insn, TCGMemOp memop)
2266 {
2267     DisasASI da = get_asi(dc, insn, memop);
2268 
2269     switch (da.type) {
2270     case GET_ASI_EXCP:
2271         break;
2272     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2273         gen_exception(dc, TT_ILL_INSN);
2274         break;
2275     case GET_ASI_DIRECT:
2276         gen_address_mask(dc, addr);
2277         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop);
2278         break;
2279     default:
2280         {
2281             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2282             TCGv_i32 r_mop = tcg_const_i32(memop);
2283 
2284             save_state(dc);
2285 #ifdef TARGET_SPARC64
2286             gen_helper_ld_asi(dst, cpu_env, addr, r_asi, r_mop);
2287 #else
2288             {
2289                 TCGv_i64 t64 = tcg_temp_new_i64();
2290                 gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2291                 tcg_gen_trunc_i64_tl(dst, t64);
2292                 tcg_temp_free_i64(t64);
2293             }
2294 #endif
2295             tcg_temp_free_i32(r_mop);
2296             tcg_temp_free_i32(r_asi);
2297         }
2298         break;
2299     }
2300 }
2301 
2302 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2303                        int insn, TCGMemOp memop)
2304 {
2305     DisasASI da = get_asi(dc, insn, memop);
2306 
2307     switch (da.type) {
2308     case GET_ASI_EXCP:
2309         break;
2310     case GET_ASI_DTWINX: /* Reserved for stda.  */
2311 #ifndef TARGET_SPARC64
2312         gen_exception(dc, TT_ILL_INSN);
2313         break;
2314 #else
2315         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2316             /* Pre OpenSPARC CPUs don't have these */
2317             gen_exception(dc, TT_ILL_INSN);
2318             return;
2319         }
2320         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2321          * are ST_BLKINIT_ ASIs */
2322         /* fall through */
2323 #endif
2324     case GET_ASI_DIRECT:
2325         gen_address_mask(dc, addr);
2326         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop);
2327         break;
2328 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2329     case GET_ASI_BCOPY:
2330         /* Copy 32 bytes from the address in SRC to ADDR.  */
2331         /* ??? The original qemu code suggests 4-byte alignment, dropping
2332            the low bits, but the only place I can see this used is in the
2333            Linux kernel with 32 byte alignment, which would make more sense
2334            as a cacheline-style operation.  */
2335         {
2336             TCGv saddr = tcg_temp_new();
2337             TCGv daddr = tcg_temp_new();
2338             TCGv four = tcg_const_tl(4);
2339             TCGv_i32 tmp = tcg_temp_new_i32();
2340             int i;
2341 
2342             tcg_gen_andi_tl(saddr, src, -4);
2343             tcg_gen_andi_tl(daddr, addr, -4);
2344             for (i = 0; i < 32; i += 4) {
2345                 /* Since the loads and stores are paired, allow the
2346                    copy to happen in the host endianness.  */
2347                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2348                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2349                 tcg_gen_add_tl(saddr, saddr, four);
2350                 tcg_gen_add_tl(daddr, daddr, four);
2351             }
2352 
2353             tcg_temp_free(saddr);
2354             tcg_temp_free(daddr);
2355             tcg_temp_free(four);
2356             tcg_temp_free_i32(tmp);
2357         }
2358         break;
2359 #endif
2360     default:
2361         {
2362             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2363             TCGv_i32 r_mop = tcg_const_i32(memop & MO_SIZE);
2364 
2365             save_state(dc);
2366 #ifdef TARGET_SPARC64
2367             gen_helper_st_asi(cpu_env, addr, src, r_asi, r_mop);
2368 #else
2369             {
2370                 TCGv_i64 t64 = tcg_temp_new_i64();
2371                 tcg_gen_extu_tl_i64(t64, src);
2372                 gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2373                 tcg_temp_free_i64(t64);
2374             }
2375 #endif
2376             tcg_temp_free_i32(r_mop);
2377             tcg_temp_free_i32(r_asi);
2378 
2379             /* A write to a TLB register may alter page maps.  End the TB. */
2380             dc->npc = DYNAMIC_PC;
2381         }
2382         break;
2383     }
2384 }
2385 
2386 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2387                          TCGv addr, int insn)
2388 {
2389     DisasASI da = get_asi(dc, insn, MO_TEUL);
2390 
2391     switch (da.type) {
2392     case GET_ASI_EXCP:
2393         break;
2394     case GET_ASI_DIRECT:
2395         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2396         break;
2397     default:
2398         /* ??? Should be DAE_invalid_asi.  */
2399         gen_exception(dc, TT_DATA_ACCESS);
2400         break;
2401     }
2402 }
2403 
2404 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2405                         int insn, int rd)
2406 {
2407     DisasASI da = get_asi(dc, insn, MO_TEUL);
2408     TCGv oldv;
2409 
2410     switch (da.type) {
2411     case GET_ASI_EXCP:
2412         return;
2413     case GET_ASI_DIRECT:
2414         oldv = tcg_temp_new();
2415         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2416                                   da.mem_idx, da.memop);
2417         gen_store_gpr(dc, rd, oldv);
2418         tcg_temp_free(oldv);
2419         break;
2420     default:
2421         /* ??? Should be DAE_invalid_asi.  */
2422         gen_exception(dc, TT_DATA_ACCESS);
2423         break;
2424     }
2425 }
2426 
2427 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2428 {
2429     DisasASI da = get_asi(dc, insn, MO_UB);
2430 
2431     switch (da.type) {
2432     case GET_ASI_EXCP:
2433         break;
2434     case GET_ASI_DIRECT:
2435         gen_ldstub(dc, dst, addr, da.mem_idx);
2436         break;
2437     default:
2438         /* ??? In theory, this should be raise DAE_invalid_asi.
2439            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2440         if (tb_cflags(dc->tb) & CF_PARALLEL) {
2441             gen_helper_exit_atomic(cpu_env);
2442         } else {
2443             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2444             TCGv_i32 r_mop = tcg_const_i32(MO_UB);
2445             TCGv_i64 s64, t64;
2446 
2447             save_state(dc);
2448             t64 = tcg_temp_new_i64();
2449             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2450 
2451             s64 = tcg_const_i64(0xff);
2452             gen_helper_st_asi(cpu_env, addr, s64, r_asi, r_mop);
2453             tcg_temp_free_i64(s64);
2454             tcg_temp_free_i32(r_mop);
2455             tcg_temp_free_i32(r_asi);
2456 
2457             tcg_gen_trunc_i64_tl(dst, t64);
2458             tcg_temp_free_i64(t64);
2459 
2460             /* End the TB.  */
2461             dc->npc = DYNAMIC_PC;
2462         }
2463         break;
2464     }
2465 }
2466 #endif
2467 
2468 #ifdef TARGET_SPARC64
2469 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2470                         int insn, int size, int rd)
2471 {
2472     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2473     TCGv_i32 d32;
2474     TCGv_i64 d64;
2475 
2476     switch (da.type) {
2477     case GET_ASI_EXCP:
2478         break;
2479 
2480     case GET_ASI_DIRECT:
2481         gen_address_mask(dc, addr);
2482         switch (size) {
2483         case 4:
2484             d32 = gen_dest_fpr_F(dc);
2485             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop);
2486             gen_store_fpr_F(dc, rd, d32);
2487             break;
2488         case 8:
2489             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2490                                 da.memop | MO_ALIGN_4);
2491             break;
2492         case 16:
2493             d64 = tcg_temp_new_i64();
2494             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2495             tcg_gen_addi_tl(addr, addr, 8);
2496             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2497                                 da.memop | MO_ALIGN_4);
2498             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2499             tcg_temp_free_i64(d64);
2500             break;
2501         default:
2502             g_assert_not_reached();
2503         }
2504         break;
2505 
2506     case GET_ASI_BLOCK:
2507         /* Valid for lddfa on aligned registers only.  */
2508         if (size == 8 && (rd & 7) == 0) {
2509             TCGMemOp memop;
2510             TCGv eight;
2511             int i;
2512 
2513             gen_address_mask(dc, addr);
2514 
2515             /* The first operation checks required alignment.  */
2516             memop = da.memop | MO_ALIGN_64;
2517             eight = tcg_const_tl(8);
2518             for (i = 0; ; ++i) {
2519                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2520                                     da.mem_idx, memop);
2521                 if (i == 7) {
2522                     break;
2523                 }
2524                 tcg_gen_add_tl(addr, addr, eight);
2525                 memop = da.memop;
2526             }
2527             tcg_temp_free(eight);
2528         } else {
2529             gen_exception(dc, TT_ILL_INSN);
2530         }
2531         break;
2532 
2533     case GET_ASI_SHORT:
2534         /* Valid for lddfa only.  */
2535         if (size == 8) {
2536             gen_address_mask(dc, addr);
2537             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2538         } else {
2539             gen_exception(dc, TT_ILL_INSN);
2540         }
2541         break;
2542 
2543     default:
2544         {
2545             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2546             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2547 
2548             save_state(dc);
2549             /* According to the table in the UA2011 manual, the only
2550                other asis that are valid for ldfa/lddfa/ldqfa are
2551                the NO_FAULT asis.  We still need a helper for these,
2552                but we can just use the integer asi helper for them.  */
2553             switch (size) {
2554             case 4:
2555                 d64 = tcg_temp_new_i64();
2556                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2557                 d32 = gen_dest_fpr_F(dc);
2558                 tcg_gen_extrl_i64_i32(d32, d64);
2559                 tcg_temp_free_i64(d64);
2560                 gen_store_fpr_F(dc, rd, d32);
2561                 break;
2562             case 8:
2563                 gen_helper_ld_asi(cpu_fpr[rd / 2], cpu_env, addr, r_asi, r_mop);
2564                 break;
2565             case 16:
2566                 d64 = tcg_temp_new_i64();
2567                 gen_helper_ld_asi(d64, cpu_env, addr, r_asi, r_mop);
2568                 tcg_gen_addi_tl(addr, addr, 8);
2569                 gen_helper_ld_asi(cpu_fpr[rd/2+1], cpu_env, addr, r_asi, r_mop);
2570                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2571                 tcg_temp_free_i64(d64);
2572                 break;
2573             default:
2574                 g_assert_not_reached();
2575             }
2576             tcg_temp_free_i32(r_mop);
2577             tcg_temp_free_i32(r_asi);
2578         }
2579         break;
2580     }
2581 }
2582 
2583 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2584                         int insn, int size, int rd)
2585 {
2586     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEQ));
2587     TCGv_i32 d32;
2588 
2589     switch (da.type) {
2590     case GET_ASI_EXCP:
2591         break;
2592 
2593     case GET_ASI_DIRECT:
2594         gen_address_mask(dc, addr);
2595         switch (size) {
2596         case 4:
2597             d32 = gen_load_fpr_F(dc, rd);
2598             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop);
2599             break;
2600         case 8:
2601             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2602                                 da.memop | MO_ALIGN_4);
2603             break;
2604         case 16:
2605             /* Only 4-byte alignment required.  However, it is legal for the
2606                cpu to signal the alignment fault, and the OS trap handler is
2607                required to fix it up.  Requiring 16-byte alignment here avoids
2608                having to probe the second page before performing the first
2609                write.  */
2610             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2611                                 da.memop | MO_ALIGN_16);
2612             tcg_gen_addi_tl(addr, addr, 8);
2613             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2614             break;
2615         default:
2616             g_assert_not_reached();
2617         }
2618         break;
2619 
2620     case GET_ASI_BLOCK:
2621         /* Valid for stdfa on aligned registers only.  */
2622         if (size == 8 && (rd & 7) == 0) {
2623             TCGMemOp memop;
2624             TCGv eight;
2625             int i;
2626 
2627             gen_address_mask(dc, addr);
2628 
2629             /* The first operation checks required alignment.  */
2630             memop = da.memop | MO_ALIGN_64;
2631             eight = tcg_const_tl(8);
2632             for (i = 0; ; ++i) {
2633                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2634                                     da.mem_idx, memop);
2635                 if (i == 7) {
2636                     break;
2637                 }
2638                 tcg_gen_add_tl(addr, addr, eight);
2639                 memop = da.memop;
2640             }
2641             tcg_temp_free(eight);
2642         } else {
2643             gen_exception(dc, TT_ILL_INSN);
2644         }
2645         break;
2646 
2647     case GET_ASI_SHORT:
2648         /* Valid for stdfa only.  */
2649         if (size == 8) {
2650             gen_address_mask(dc, addr);
2651             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx, da.memop);
2652         } else {
2653             gen_exception(dc, TT_ILL_INSN);
2654         }
2655         break;
2656 
2657     default:
2658         /* According to the table in the UA2011 manual, the only
2659            other asis that are valid for ldfa/lddfa/ldqfa are
2660            the PST* asis, which aren't currently handled.  */
2661         gen_exception(dc, TT_ILL_INSN);
2662         break;
2663     }
2664 }
2665 
2666 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2667 {
2668     DisasASI da = get_asi(dc, insn, MO_TEQ);
2669     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2670     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2671 
2672     switch (da.type) {
2673     case GET_ASI_EXCP:
2674         return;
2675 
2676     case GET_ASI_DTWINX:
2677         gen_address_mask(dc, addr);
2678         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2679         tcg_gen_addi_tl(addr, addr, 8);
2680         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2681         break;
2682 
2683     case GET_ASI_DIRECT:
2684         {
2685             TCGv_i64 tmp = tcg_temp_new_i64();
2686 
2687             gen_address_mask(dc, addr);
2688             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop);
2689 
2690             /* Note that LE ldda acts as if each 32-bit register
2691                result is byte swapped.  Having just performed one
2692                64-bit bswap, we need now to swap the writebacks.  */
2693             if ((da.memop & MO_BSWAP) == MO_TE) {
2694                 tcg_gen_extr32_i64(lo, hi, tmp);
2695             } else {
2696                 tcg_gen_extr32_i64(hi, lo, tmp);
2697             }
2698             tcg_temp_free_i64(tmp);
2699         }
2700         break;
2701 
2702     default:
2703         /* ??? In theory we've handled all of the ASIs that are valid
2704            for ldda, and this should raise DAE_invalid_asi.  However,
2705            real hardware allows others.  This can be seen with e.g.
2706            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2707         {
2708             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2709             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2710             TCGv_i64 tmp = tcg_temp_new_i64();
2711 
2712             save_state(dc);
2713             gen_helper_ld_asi(tmp, cpu_env, addr, r_asi, r_mop);
2714             tcg_temp_free_i32(r_asi);
2715             tcg_temp_free_i32(r_mop);
2716 
2717             /* See above.  */
2718             if ((da.memop & MO_BSWAP) == MO_TE) {
2719                 tcg_gen_extr32_i64(lo, hi, tmp);
2720             } else {
2721                 tcg_gen_extr32_i64(hi, lo, tmp);
2722             }
2723             tcg_temp_free_i64(tmp);
2724         }
2725         break;
2726     }
2727 
2728     gen_store_gpr(dc, rd, hi);
2729     gen_store_gpr(dc, rd + 1, lo);
2730 }
2731 
2732 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2733                          int insn, int rd)
2734 {
2735     DisasASI da = get_asi(dc, insn, MO_TEQ);
2736     TCGv lo = gen_load_gpr(dc, rd + 1);
2737 
2738     switch (da.type) {
2739     case GET_ASI_EXCP:
2740         break;
2741 
2742     case GET_ASI_DTWINX:
2743         gen_address_mask(dc, addr);
2744         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2745         tcg_gen_addi_tl(addr, addr, 8);
2746         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2747         break;
2748 
2749     case GET_ASI_DIRECT:
2750         {
2751             TCGv_i64 t64 = tcg_temp_new_i64();
2752 
2753             /* Note that LE stda acts as if each 32-bit register result is
2754                byte swapped.  We will perform one 64-bit LE store, so now
2755                we must swap the order of the construction.  */
2756             if ((da.memop & MO_BSWAP) == MO_TE) {
2757                 tcg_gen_concat32_i64(t64, lo, hi);
2758             } else {
2759                 tcg_gen_concat32_i64(t64, hi, lo);
2760             }
2761             gen_address_mask(dc, addr);
2762             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2763             tcg_temp_free_i64(t64);
2764         }
2765         break;
2766 
2767     default:
2768         /* ??? In theory we've handled all of the ASIs that are valid
2769            for stda, and this should raise DAE_invalid_asi.  */
2770         {
2771             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2772             TCGv_i32 r_mop = tcg_const_i32(da.memop);
2773             TCGv_i64 t64 = tcg_temp_new_i64();
2774 
2775             /* See above.  */
2776             if ((da.memop & MO_BSWAP) == MO_TE) {
2777                 tcg_gen_concat32_i64(t64, lo, hi);
2778             } else {
2779                 tcg_gen_concat32_i64(t64, hi, lo);
2780             }
2781 
2782             save_state(dc);
2783             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2784             tcg_temp_free_i32(r_mop);
2785             tcg_temp_free_i32(r_asi);
2786             tcg_temp_free_i64(t64);
2787         }
2788         break;
2789     }
2790 }
2791 
2792 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2793                          int insn, int rd)
2794 {
2795     DisasASI da = get_asi(dc, insn, MO_TEQ);
2796     TCGv oldv;
2797 
2798     switch (da.type) {
2799     case GET_ASI_EXCP:
2800         return;
2801     case GET_ASI_DIRECT:
2802         oldv = tcg_temp_new();
2803         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2804                                   da.mem_idx, da.memop);
2805         gen_store_gpr(dc, rd, oldv);
2806         tcg_temp_free(oldv);
2807         break;
2808     default:
2809         /* ??? Should be DAE_invalid_asi.  */
2810         gen_exception(dc, TT_DATA_ACCESS);
2811         break;
2812     }
2813 }
2814 
2815 #elif !defined(CONFIG_USER_ONLY)
2816 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2817 {
2818     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2819        whereby "rd + 1" elicits "error: array subscript is above array".
2820        Since we have already asserted that rd is even, the semantics
2821        are unchanged.  */
2822     TCGv lo = gen_dest_gpr(dc, rd | 1);
2823     TCGv hi = gen_dest_gpr(dc, rd);
2824     TCGv_i64 t64 = tcg_temp_new_i64();
2825     DisasASI da = get_asi(dc, insn, MO_TEQ);
2826 
2827     switch (da.type) {
2828     case GET_ASI_EXCP:
2829         tcg_temp_free_i64(t64);
2830         return;
2831     case GET_ASI_DIRECT:
2832         gen_address_mask(dc, addr);
2833         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop);
2834         break;
2835     default:
2836         {
2837             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2838             TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2839 
2840             save_state(dc);
2841             gen_helper_ld_asi(t64, cpu_env, addr, r_asi, r_mop);
2842             tcg_temp_free_i32(r_mop);
2843             tcg_temp_free_i32(r_asi);
2844         }
2845         break;
2846     }
2847 
2848     tcg_gen_extr_i64_i32(lo, hi, t64);
2849     tcg_temp_free_i64(t64);
2850     gen_store_gpr(dc, rd | 1, lo);
2851     gen_store_gpr(dc, rd, hi);
2852 }
2853 
2854 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2855                          int insn, int rd)
2856 {
2857     DisasASI da = get_asi(dc, insn, MO_TEQ);
2858     TCGv lo = gen_load_gpr(dc, rd + 1);
2859     TCGv_i64 t64 = tcg_temp_new_i64();
2860 
2861     tcg_gen_concat_tl_i64(t64, lo, hi);
2862 
2863     switch (da.type) {
2864     case GET_ASI_EXCP:
2865         break;
2866     case GET_ASI_DIRECT:
2867         gen_address_mask(dc, addr);
2868         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop);
2869         break;
2870     case GET_ASI_BFILL:
2871         /* Store 32 bytes of T64 to ADDR.  */
2872         /* ??? The original qemu code suggests 8-byte alignment, dropping
2873            the low bits, but the only place I can see this used is in the
2874            Linux kernel with 32 byte alignment, which would make more sense
2875            as a cacheline-style operation.  */
2876         {
2877             TCGv d_addr = tcg_temp_new();
2878             TCGv eight = tcg_const_tl(8);
2879             int i;
2880 
2881             tcg_gen_andi_tl(d_addr, addr, -8);
2882             for (i = 0; i < 32; i += 8) {
2883                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2884                 tcg_gen_add_tl(d_addr, d_addr, eight);
2885             }
2886 
2887             tcg_temp_free(d_addr);
2888             tcg_temp_free(eight);
2889         }
2890         break;
2891     default:
2892         {
2893             TCGv_i32 r_asi = tcg_const_i32(da.asi);
2894             TCGv_i32 r_mop = tcg_const_i32(MO_Q);
2895 
2896             save_state(dc);
2897             gen_helper_st_asi(cpu_env, addr, t64, r_asi, r_mop);
2898             tcg_temp_free_i32(r_mop);
2899             tcg_temp_free_i32(r_asi);
2900         }
2901         break;
2902     }
2903 
2904     tcg_temp_free_i64(t64);
2905 }
2906 #endif
2907 
2908 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2909 {
2910     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2911     return gen_load_gpr(dc, rs1);
2912 }
2913 
2914 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2915 {
2916     if (IS_IMM) { /* immediate */
2917         target_long simm = GET_FIELDs(insn, 19, 31);
2918         TCGv t = get_temp_tl(dc);
2919         tcg_gen_movi_tl(t, simm);
2920         return t;
2921     } else {      /* register */
2922         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2923         return gen_load_gpr(dc, rs2);
2924     }
2925 }
2926 
2927 #ifdef TARGET_SPARC64
2928 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2929 {
2930     TCGv_i32 c32, zero, dst, s1, s2;
2931 
2932     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2933        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2934        the later.  */
2935     c32 = tcg_temp_new_i32();
2936     if (cmp->is_bool) {
2937         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2938     } else {
2939         TCGv_i64 c64 = tcg_temp_new_i64();
2940         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2941         tcg_gen_extrl_i64_i32(c32, c64);
2942         tcg_temp_free_i64(c64);
2943     }
2944 
2945     s1 = gen_load_fpr_F(dc, rs);
2946     s2 = gen_load_fpr_F(dc, rd);
2947     dst = gen_dest_fpr_F(dc);
2948     zero = tcg_const_i32(0);
2949 
2950     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2951 
2952     tcg_temp_free_i32(c32);
2953     tcg_temp_free_i32(zero);
2954     gen_store_fpr_F(dc, rd, dst);
2955 }
2956 
2957 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2958 {
2959     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2960     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2961                         gen_load_fpr_D(dc, rs),
2962                         gen_load_fpr_D(dc, rd));
2963     gen_store_fpr_D(dc, rd, dst);
2964 }
2965 
2966 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2967 {
2968     int qd = QFPREG(rd);
2969     int qs = QFPREG(rs);
2970 
2971     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2972                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2973     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2974                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2975 
2976     gen_update_fprs_dirty(dc, qd);
2977 }
2978 
2979 #ifndef CONFIG_USER_ONLY
2980 static inline void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env cpu_env)
2981 {
2982     TCGv_i32 r_tl = tcg_temp_new_i32();
2983 
2984     /* load env->tl into r_tl */
2985     tcg_gen_ld_i32(r_tl, cpu_env, offsetof(CPUSPARCState, tl));
2986 
2987     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2988     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2989 
2990     /* calculate offset to current trap state from env->ts, reuse r_tl */
2991     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2992     tcg_gen_addi_ptr(r_tsptr, cpu_env, offsetof(CPUSPARCState, ts));
2993 
2994     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2995     {
2996         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2997         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2998         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2999         tcg_temp_free_ptr(r_tl_tmp);
3000     }
3001 
3002     tcg_temp_free_i32(r_tl);
3003 }
3004 #endif
3005 
3006 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
3007                      int width, bool cc, bool left)
3008 {
3009     TCGv lo1, lo2, t1, t2;
3010     uint64_t amask, tabl, tabr;
3011     int shift, imask, omask;
3012 
3013     if (cc) {
3014         tcg_gen_mov_tl(cpu_cc_src, s1);
3015         tcg_gen_mov_tl(cpu_cc_src2, s2);
3016         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
3017         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3018         dc->cc_op = CC_OP_SUB;
3019     }
3020 
3021     /* Theory of operation: there are two tables, left and right (not to
3022        be confused with the left and right versions of the opcode).  These
3023        are indexed by the low 3 bits of the inputs.  To make things "easy",
3024        these tables are loaded into two constants, TABL and TABR below.
3025        The operation index = (input & imask) << shift calculates the index
3026        into the constant, while val = (table >> index) & omask calculates
3027        the value we're looking for.  */
3028     switch (width) {
3029     case 8:
3030         imask = 0x7;
3031         shift = 3;
3032         omask = 0xff;
3033         if (left) {
3034             tabl = 0x80c0e0f0f8fcfeffULL;
3035             tabr = 0xff7f3f1f0f070301ULL;
3036         } else {
3037             tabl = 0x0103070f1f3f7fffULL;
3038             tabr = 0xfffefcf8f0e0c080ULL;
3039         }
3040         break;
3041     case 16:
3042         imask = 0x6;
3043         shift = 1;
3044         omask = 0xf;
3045         if (left) {
3046             tabl = 0x8cef;
3047             tabr = 0xf731;
3048         } else {
3049             tabl = 0x137f;
3050             tabr = 0xfec8;
3051         }
3052         break;
3053     case 32:
3054         imask = 0x4;
3055         shift = 0;
3056         omask = 0x3;
3057         if (left) {
3058             tabl = (2 << 2) | 3;
3059             tabr = (3 << 2) | 1;
3060         } else {
3061             tabl = (1 << 2) | 3;
3062             tabr = (3 << 2) | 2;
3063         }
3064         break;
3065     default:
3066         abort();
3067     }
3068 
3069     lo1 = tcg_temp_new();
3070     lo2 = tcg_temp_new();
3071     tcg_gen_andi_tl(lo1, s1, imask);
3072     tcg_gen_andi_tl(lo2, s2, imask);
3073     tcg_gen_shli_tl(lo1, lo1, shift);
3074     tcg_gen_shli_tl(lo2, lo2, shift);
3075 
3076     t1 = tcg_const_tl(tabl);
3077     t2 = tcg_const_tl(tabr);
3078     tcg_gen_shr_tl(lo1, t1, lo1);
3079     tcg_gen_shr_tl(lo2, t2, lo2);
3080     tcg_gen_andi_tl(dst, lo1, omask);
3081     tcg_gen_andi_tl(lo2, lo2, omask);
3082 
3083     amask = -8;
3084     if (AM_CHECK(dc)) {
3085         amask &= 0xffffffffULL;
3086     }
3087     tcg_gen_andi_tl(s1, s1, amask);
3088     tcg_gen_andi_tl(s2, s2, amask);
3089 
3090     /* We want to compute
3091         dst = (s1 == s2 ? lo1 : lo1 & lo2).
3092        We've already done dst = lo1, so this reduces to
3093         dst &= (s1 == s2 ? -1 : lo2)
3094        Which we perform by
3095         lo2 |= -(s1 == s2)
3096         dst &= lo2
3097     */
3098     tcg_gen_setcond_tl(TCG_COND_EQ, t1, s1, s2);
3099     tcg_gen_neg_tl(t1, t1);
3100     tcg_gen_or_tl(lo2, lo2, t1);
3101     tcg_gen_and_tl(dst, dst, lo2);
3102 
3103     tcg_temp_free(lo1);
3104     tcg_temp_free(lo2);
3105     tcg_temp_free(t1);
3106     tcg_temp_free(t2);
3107 }
3108 
3109 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
3110 {
3111     TCGv tmp = tcg_temp_new();
3112 
3113     tcg_gen_add_tl(tmp, s1, s2);
3114     tcg_gen_andi_tl(dst, tmp, -8);
3115     if (left) {
3116         tcg_gen_neg_tl(tmp, tmp);
3117     }
3118     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
3119 
3120     tcg_temp_free(tmp);
3121 }
3122 
3123 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
3124 {
3125     TCGv t1, t2, shift;
3126 
3127     t1 = tcg_temp_new();
3128     t2 = tcg_temp_new();
3129     shift = tcg_temp_new();
3130 
3131     tcg_gen_andi_tl(shift, gsr, 7);
3132     tcg_gen_shli_tl(shift, shift, 3);
3133     tcg_gen_shl_tl(t1, s1, shift);
3134 
3135     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
3136        shift of (up to 63) followed by a constant shift of 1.  */
3137     tcg_gen_xori_tl(shift, shift, 63);
3138     tcg_gen_shr_tl(t2, s2, shift);
3139     tcg_gen_shri_tl(t2, t2, 1);
3140 
3141     tcg_gen_or_tl(dst, t1, t2);
3142 
3143     tcg_temp_free(t1);
3144     tcg_temp_free(t2);
3145     tcg_temp_free(shift);
3146 }
3147 #endif
3148 
3149 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3150     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3151         goto illegal_insn;
3152 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3153     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3154         goto nfpu_insn;
3155 
3156 /* before an instruction, dc->pc must be static */
3157 static void disas_sparc_insn(DisasContext * dc, unsigned int insn)
3158 {
3159     unsigned int opc, rs1, rs2, rd;
3160     TCGv cpu_src1, cpu_src2;
3161     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3162     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3163     target_long simm;
3164 
3165     opc = GET_FIELD(insn, 0, 1);
3166     rd = GET_FIELD(insn, 2, 6);
3167 
3168     switch (opc) {
3169     case 0:                     /* branches/sethi */
3170         {
3171             unsigned int xop = GET_FIELD(insn, 7, 9);
3172             int32_t target;
3173             switch (xop) {
3174 #ifdef TARGET_SPARC64
3175             case 0x1:           /* V9 BPcc */
3176                 {
3177                     int cc;
3178 
3179                     target = GET_FIELD_SP(insn, 0, 18);
3180                     target = sign_extend(target, 19);
3181                     target <<= 2;
3182                     cc = GET_FIELD_SP(insn, 20, 21);
3183                     if (cc == 0)
3184                         do_branch(dc, target, insn, 0);
3185                     else if (cc == 2)
3186                         do_branch(dc, target, insn, 1);
3187                     else
3188                         goto illegal_insn;
3189                     goto jmp_insn;
3190                 }
3191             case 0x3:           /* V9 BPr */
3192                 {
3193                     target = GET_FIELD_SP(insn, 0, 13) |
3194                         (GET_FIELD_SP(insn, 20, 21) << 14);
3195                     target = sign_extend(target, 16);
3196                     target <<= 2;
3197                     cpu_src1 = get_src1(dc, insn);
3198                     do_branch_reg(dc, target, insn, cpu_src1);
3199                     goto jmp_insn;
3200                 }
3201             case 0x5:           /* V9 FBPcc */
3202                 {
3203                     int cc = GET_FIELD_SP(insn, 20, 21);
3204                     if (gen_trap_ifnofpu(dc)) {
3205                         goto jmp_insn;
3206                     }
3207                     target = GET_FIELD_SP(insn, 0, 18);
3208                     target = sign_extend(target, 19);
3209                     target <<= 2;
3210                     do_fbranch(dc, target, insn, cc);
3211                     goto jmp_insn;
3212                 }
3213 #else
3214             case 0x7:           /* CBN+x */
3215                 {
3216                     goto ncp_insn;
3217                 }
3218 #endif
3219             case 0x2:           /* BN+x */
3220                 {
3221                     target = GET_FIELD(insn, 10, 31);
3222                     target = sign_extend(target, 22);
3223                     target <<= 2;
3224                     do_branch(dc, target, insn, 0);
3225                     goto jmp_insn;
3226                 }
3227             case 0x6:           /* FBN+x */
3228                 {
3229                     if (gen_trap_ifnofpu(dc)) {
3230                         goto jmp_insn;
3231                     }
3232                     target = GET_FIELD(insn, 10, 31);
3233                     target = sign_extend(target, 22);
3234                     target <<= 2;
3235                     do_fbranch(dc, target, insn, 0);
3236                     goto jmp_insn;
3237                 }
3238             case 0x4:           /* SETHI */
3239                 /* Special-case %g0 because that's the canonical nop.  */
3240                 if (rd) {
3241                     uint32_t value = GET_FIELD(insn, 10, 31);
3242                     TCGv t = gen_dest_gpr(dc, rd);
3243                     tcg_gen_movi_tl(t, value << 10);
3244                     gen_store_gpr(dc, rd, t);
3245                 }
3246                 break;
3247             case 0x0:           /* UNIMPL */
3248             default:
3249                 goto illegal_insn;
3250             }
3251             break;
3252         }
3253         break;
3254     case 1:                     /*CALL*/
3255         {
3256             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3257             TCGv o7 = gen_dest_gpr(dc, 15);
3258 
3259             tcg_gen_movi_tl(o7, dc->pc);
3260             gen_store_gpr(dc, 15, o7);
3261             target += dc->pc;
3262             gen_mov_pc_npc(dc);
3263 #ifdef TARGET_SPARC64
3264             if (unlikely(AM_CHECK(dc))) {
3265                 target &= 0xffffffffULL;
3266             }
3267 #endif
3268             dc->npc = target;
3269         }
3270         goto jmp_insn;
3271     case 2:                     /* FPU & Logical Operations */
3272         {
3273             unsigned int xop = GET_FIELD(insn, 7, 12);
3274             TCGv cpu_dst = get_temp_tl(dc);
3275             TCGv cpu_tmp0;
3276 
3277             if (xop == 0x3a) {  /* generate trap */
3278                 int cond = GET_FIELD(insn, 3, 6);
3279                 TCGv_i32 trap;
3280                 TCGLabel *l1 = NULL;
3281                 int mask;
3282 
3283                 if (cond == 0) {
3284                     /* Trap never.  */
3285                     break;
3286                 }
3287 
3288                 save_state(dc);
3289 
3290                 if (cond != 8) {
3291                     /* Conditional trap.  */
3292                     DisasCompare cmp;
3293 #ifdef TARGET_SPARC64
3294                     /* V9 icc/xcc */
3295                     int cc = GET_FIELD_SP(insn, 11, 12);
3296                     if (cc == 0) {
3297                         gen_compare(&cmp, 0, cond, dc);
3298                     } else if (cc == 2) {
3299                         gen_compare(&cmp, 1, cond, dc);
3300                     } else {
3301                         goto illegal_insn;
3302                     }
3303 #else
3304                     gen_compare(&cmp, 0, cond, dc);
3305 #endif
3306                     l1 = gen_new_label();
3307                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3308                                       cmp.c1, cmp.c2, l1);
3309                     free_compare(&cmp);
3310                 }
3311 
3312                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3313                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3314 
3315                 /* Don't use the normal temporaries, as they may well have
3316                    gone out of scope with the branch above.  While we're
3317                    doing that we might as well pre-truncate to 32-bit.  */
3318                 trap = tcg_temp_new_i32();
3319 
3320                 rs1 = GET_FIELD_SP(insn, 14, 18);
3321                 if (IS_IMM) {
3322                     rs2 = GET_FIELD_SP(insn, 0, 7);
3323                     if (rs1 == 0) {
3324                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3325                         /* Signal that the trap value is fully constant.  */
3326                         mask = 0;
3327                     } else {
3328                         TCGv t1 = gen_load_gpr(dc, rs1);
3329                         tcg_gen_trunc_tl_i32(trap, t1);
3330                         tcg_gen_addi_i32(trap, trap, rs2);
3331                     }
3332                 } else {
3333                     TCGv t1, t2;
3334                     rs2 = GET_FIELD_SP(insn, 0, 4);
3335                     t1 = gen_load_gpr(dc, rs1);
3336                     t2 = gen_load_gpr(dc, rs2);
3337                     tcg_gen_add_tl(t1, t1, t2);
3338                     tcg_gen_trunc_tl_i32(trap, t1);
3339                 }
3340                 if (mask != 0) {
3341                     tcg_gen_andi_i32(trap, trap, mask);
3342                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3343                 }
3344 
3345                 gen_helper_raise_exception(cpu_env, trap);
3346                 tcg_temp_free_i32(trap);
3347 
3348                 if (cond == 8) {
3349                     /* An unconditional trap ends the TB.  */
3350                     dc->is_br = 1;
3351                     goto jmp_insn;
3352                 } else {
3353                     /* A conditional trap falls through to the next insn.  */
3354                     gen_set_label(l1);
3355                     break;
3356                 }
3357             } else if (xop == 0x28) {
3358                 rs1 = GET_FIELD(insn, 13, 17);
3359                 switch(rs1) {
3360                 case 0: /* rdy */
3361 #ifndef TARGET_SPARC64
3362                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3363                                        manual, rdy on the microSPARC
3364                                        II */
3365                 case 0x0f:          /* stbar in the SPARCv8 manual,
3366                                        rdy on the microSPARC II */
3367                 case 0x10 ... 0x1f: /* implementation-dependent in the
3368                                        SPARCv8 manual, rdy on the
3369                                        microSPARC II */
3370                     /* Read Asr17 */
3371                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3372                         TCGv t = gen_dest_gpr(dc, rd);
3373                         /* Read Asr17 for a Leon3 monoprocessor */
3374                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3375                         gen_store_gpr(dc, rd, t);
3376                         break;
3377                     }
3378 #endif
3379                     gen_store_gpr(dc, rd, cpu_y);
3380                     break;
3381 #ifdef TARGET_SPARC64
3382                 case 0x2: /* V9 rdccr */
3383                     update_psr(dc);
3384                     gen_helper_rdccr(cpu_dst, cpu_env);
3385                     gen_store_gpr(dc, rd, cpu_dst);
3386                     break;
3387                 case 0x3: /* V9 rdasi */
3388                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3389                     gen_store_gpr(dc, rd, cpu_dst);
3390                     break;
3391                 case 0x4: /* V9 rdtick */
3392                     {
3393                         TCGv_ptr r_tickptr;
3394                         TCGv_i32 r_const;
3395 
3396                         r_tickptr = tcg_temp_new_ptr();
3397                         r_const = tcg_const_i32(dc->mem_idx);
3398                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3399                                        offsetof(CPUSPARCState, tick));
3400                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3401                                                   r_const);
3402                         tcg_temp_free_ptr(r_tickptr);
3403                         tcg_temp_free_i32(r_const);
3404                         gen_store_gpr(dc, rd, cpu_dst);
3405                     }
3406                     break;
3407                 case 0x5: /* V9 rdpc */
3408                     {
3409                         TCGv t = gen_dest_gpr(dc, rd);
3410                         if (unlikely(AM_CHECK(dc))) {
3411                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3412                         } else {
3413                             tcg_gen_movi_tl(t, dc->pc);
3414                         }
3415                         gen_store_gpr(dc, rd, t);
3416                     }
3417                     break;
3418                 case 0x6: /* V9 rdfprs */
3419                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3420                     gen_store_gpr(dc, rd, cpu_dst);
3421                     break;
3422                 case 0xf: /* V9 membar */
3423                     break; /* no effect */
3424                 case 0x13: /* Graphics Status */
3425                     if (gen_trap_ifnofpu(dc)) {
3426                         goto jmp_insn;
3427                     }
3428                     gen_store_gpr(dc, rd, cpu_gsr);
3429                     break;
3430                 case 0x16: /* Softint */
3431                     tcg_gen_ld32s_tl(cpu_dst, cpu_env,
3432                                      offsetof(CPUSPARCState, softint));
3433                     gen_store_gpr(dc, rd, cpu_dst);
3434                     break;
3435                 case 0x17: /* Tick compare */
3436                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3437                     break;
3438                 case 0x18: /* System tick */
3439                     {
3440                         TCGv_ptr r_tickptr;
3441                         TCGv_i32 r_const;
3442 
3443                         r_tickptr = tcg_temp_new_ptr();
3444                         r_const = tcg_const_i32(dc->mem_idx);
3445                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3446                                        offsetof(CPUSPARCState, stick));
3447                         gen_helper_tick_get_count(cpu_dst, cpu_env, r_tickptr,
3448                                                   r_const);
3449                         tcg_temp_free_ptr(r_tickptr);
3450                         tcg_temp_free_i32(r_const);
3451                         gen_store_gpr(dc, rd, cpu_dst);
3452                     }
3453                     break;
3454                 case 0x19: /* System tick compare */
3455                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3456                     break;
3457                 case 0x1a: /* UltraSPARC-T1 Strand status */
3458                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3459                      * this ASR as impl. dep
3460                      */
3461                     CHECK_IU_FEATURE(dc, HYPV);
3462                     {
3463                         TCGv t = gen_dest_gpr(dc, rd);
3464                         tcg_gen_movi_tl(t, 1UL);
3465                         gen_store_gpr(dc, rd, t);
3466                     }
3467                     break;
3468                 case 0x10: /* Performance Control */
3469                 case 0x11: /* Performance Instrumentation Counter */
3470                 case 0x12: /* Dispatch Control */
3471                 case 0x14: /* Softint set, WO */
3472                 case 0x15: /* Softint clear, WO */
3473 #endif
3474                 default:
3475                     goto illegal_insn;
3476                 }
3477 #if !defined(CONFIG_USER_ONLY)
3478             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3479 #ifndef TARGET_SPARC64
3480                 if (!supervisor(dc)) {
3481                     goto priv_insn;
3482                 }
3483                 update_psr(dc);
3484                 gen_helper_rdpsr(cpu_dst, cpu_env);
3485 #else
3486                 CHECK_IU_FEATURE(dc, HYPV);
3487                 if (!hypervisor(dc))
3488                     goto priv_insn;
3489                 rs1 = GET_FIELD(insn, 13, 17);
3490                 switch (rs1) {
3491                 case 0: // hpstate
3492                     tcg_gen_ld_i64(cpu_dst, cpu_env,
3493                                    offsetof(CPUSPARCState, hpstate));
3494                     break;
3495                 case 1: // htstate
3496                     // gen_op_rdhtstate();
3497                     break;
3498                 case 3: // hintp
3499                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3500                     break;
3501                 case 5: // htba
3502                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3503                     break;
3504                 case 6: // hver
3505                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3506                     break;
3507                 case 31: // hstick_cmpr
3508                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3509                     break;
3510                 default:
3511                     goto illegal_insn;
3512                 }
3513 #endif
3514                 gen_store_gpr(dc, rd, cpu_dst);
3515                 break;
3516             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3517                 if (!supervisor(dc)) {
3518                     goto priv_insn;
3519                 }
3520                 cpu_tmp0 = get_temp_tl(dc);
3521 #ifdef TARGET_SPARC64
3522                 rs1 = GET_FIELD(insn, 13, 17);
3523                 switch (rs1) {
3524                 case 0: // tpc
3525                     {
3526                         TCGv_ptr r_tsptr;
3527 
3528                         r_tsptr = tcg_temp_new_ptr();
3529                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3530                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3531                                       offsetof(trap_state, tpc));
3532                         tcg_temp_free_ptr(r_tsptr);
3533                     }
3534                     break;
3535                 case 1: // tnpc
3536                     {
3537                         TCGv_ptr r_tsptr;
3538 
3539                         r_tsptr = tcg_temp_new_ptr();
3540                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3541                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3542                                       offsetof(trap_state, tnpc));
3543                         tcg_temp_free_ptr(r_tsptr);
3544                     }
3545                     break;
3546                 case 2: // tstate
3547                     {
3548                         TCGv_ptr r_tsptr;
3549 
3550                         r_tsptr = tcg_temp_new_ptr();
3551                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3552                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3553                                       offsetof(trap_state, tstate));
3554                         tcg_temp_free_ptr(r_tsptr);
3555                     }
3556                     break;
3557                 case 3: // tt
3558                     {
3559                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3560 
3561                         gen_load_trap_state_at_tl(r_tsptr, cpu_env);
3562                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3563                                          offsetof(trap_state, tt));
3564                         tcg_temp_free_ptr(r_tsptr);
3565                     }
3566                     break;
3567                 case 4: // tick
3568                     {
3569                         TCGv_ptr r_tickptr;
3570                         TCGv_i32 r_const;
3571 
3572                         r_tickptr = tcg_temp_new_ptr();
3573                         r_const = tcg_const_i32(dc->mem_idx);
3574                         tcg_gen_ld_ptr(r_tickptr, cpu_env,
3575                                        offsetof(CPUSPARCState, tick));
3576                         gen_helper_tick_get_count(cpu_tmp0, cpu_env,
3577                                                   r_tickptr, r_const);
3578                         tcg_temp_free_ptr(r_tickptr);
3579                         tcg_temp_free_i32(r_const);
3580                     }
3581                     break;
3582                 case 5: // tba
3583                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3584                     break;
3585                 case 6: // pstate
3586                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3587                                      offsetof(CPUSPARCState, pstate));
3588                     break;
3589                 case 7: // tl
3590                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3591                                      offsetof(CPUSPARCState, tl));
3592                     break;
3593                 case 8: // pil
3594                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3595                                      offsetof(CPUSPARCState, psrpil));
3596                     break;
3597                 case 9: // cwp
3598                     gen_helper_rdcwp(cpu_tmp0, cpu_env);
3599                     break;
3600                 case 10: // cansave
3601                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3602                                      offsetof(CPUSPARCState, cansave));
3603                     break;
3604                 case 11: // canrestore
3605                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3606                                      offsetof(CPUSPARCState, canrestore));
3607                     break;
3608                 case 12: // cleanwin
3609                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3610                                      offsetof(CPUSPARCState, cleanwin));
3611                     break;
3612                 case 13: // otherwin
3613                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3614                                      offsetof(CPUSPARCState, otherwin));
3615                     break;
3616                 case 14: // wstate
3617                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3618                                      offsetof(CPUSPARCState, wstate));
3619                     break;
3620                 case 16: // UA2005 gl
3621                     CHECK_IU_FEATURE(dc, GL);
3622                     tcg_gen_ld32s_tl(cpu_tmp0, cpu_env,
3623                                      offsetof(CPUSPARCState, gl));
3624                     break;
3625                 case 26: // UA2005 strand status
3626                     CHECK_IU_FEATURE(dc, HYPV);
3627                     if (!hypervisor(dc))
3628                         goto priv_insn;
3629                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3630                     break;
3631                 case 31: // ver
3632                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3633                     break;
3634                 case 15: // fq
3635                 default:
3636                     goto illegal_insn;
3637                 }
3638 #else
3639                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3640 #endif
3641                 gen_store_gpr(dc, rd, cpu_tmp0);
3642                 break;
3643             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3644 #ifdef TARGET_SPARC64
3645                 gen_helper_flushw(cpu_env);
3646 #else
3647                 if (!supervisor(dc))
3648                     goto priv_insn;
3649                 gen_store_gpr(dc, rd, cpu_tbr);
3650 #endif
3651                 break;
3652 #endif
3653             } else if (xop == 0x34) {   /* FPU Operations */
3654                 if (gen_trap_ifnofpu(dc)) {
3655                     goto jmp_insn;
3656                 }
3657                 gen_op_clear_ieee_excp_and_FTT();
3658                 rs1 = GET_FIELD(insn, 13, 17);
3659                 rs2 = GET_FIELD(insn, 27, 31);
3660                 xop = GET_FIELD(insn, 18, 26);
3661 
3662                 switch (xop) {
3663                 case 0x1: /* fmovs */
3664                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3665                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3666                     break;
3667                 case 0x5: /* fnegs */
3668                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3669                     break;
3670                 case 0x9: /* fabss */
3671                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3672                     break;
3673                 case 0x29: /* fsqrts */
3674                     CHECK_FPU_FEATURE(dc, FSQRT);
3675                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3676                     break;
3677                 case 0x2a: /* fsqrtd */
3678                     CHECK_FPU_FEATURE(dc, FSQRT);
3679                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3680                     break;
3681                 case 0x2b: /* fsqrtq */
3682                     CHECK_FPU_FEATURE(dc, FLOAT128);
3683                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3684                     break;
3685                 case 0x41: /* fadds */
3686                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3687                     break;
3688                 case 0x42: /* faddd */
3689                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3690                     break;
3691                 case 0x43: /* faddq */
3692                     CHECK_FPU_FEATURE(dc, FLOAT128);
3693                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3694                     break;
3695                 case 0x45: /* fsubs */
3696                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3697                     break;
3698                 case 0x46: /* fsubd */
3699                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3700                     break;
3701                 case 0x47: /* fsubq */
3702                     CHECK_FPU_FEATURE(dc, FLOAT128);
3703                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3704                     break;
3705                 case 0x49: /* fmuls */
3706                     CHECK_FPU_FEATURE(dc, FMUL);
3707                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3708                     break;
3709                 case 0x4a: /* fmuld */
3710                     CHECK_FPU_FEATURE(dc, FMUL);
3711                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3712                     break;
3713                 case 0x4b: /* fmulq */
3714                     CHECK_FPU_FEATURE(dc, FLOAT128);
3715                     CHECK_FPU_FEATURE(dc, FMUL);
3716                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3717                     break;
3718                 case 0x4d: /* fdivs */
3719                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3720                     break;
3721                 case 0x4e: /* fdivd */
3722                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3723                     break;
3724                 case 0x4f: /* fdivq */
3725                     CHECK_FPU_FEATURE(dc, FLOAT128);
3726                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3727                     break;
3728                 case 0x69: /* fsmuld */
3729                     CHECK_FPU_FEATURE(dc, FSMULD);
3730                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3731                     break;
3732                 case 0x6e: /* fdmulq */
3733                     CHECK_FPU_FEATURE(dc, FLOAT128);
3734                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3735                     break;
3736                 case 0xc4: /* fitos */
3737                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3738                     break;
3739                 case 0xc6: /* fdtos */
3740                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3741                     break;
3742                 case 0xc7: /* fqtos */
3743                     CHECK_FPU_FEATURE(dc, FLOAT128);
3744                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3745                     break;
3746                 case 0xc8: /* fitod */
3747                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3748                     break;
3749                 case 0xc9: /* fstod */
3750                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3751                     break;
3752                 case 0xcb: /* fqtod */
3753                     CHECK_FPU_FEATURE(dc, FLOAT128);
3754                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3755                     break;
3756                 case 0xcc: /* fitoq */
3757                     CHECK_FPU_FEATURE(dc, FLOAT128);
3758                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3759                     break;
3760                 case 0xcd: /* fstoq */
3761                     CHECK_FPU_FEATURE(dc, FLOAT128);
3762                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3763                     break;
3764                 case 0xce: /* fdtoq */
3765                     CHECK_FPU_FEATURE(dc, FLOAT128);
3766                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3767                     break;
3768                 case 0xd1: /* fstoi */
3769                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3770                     break;
3771                 case 0xd2: /* fdtoi */
3772                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3773                     break;
3774                 case 0xd3: /* fqtoi */
3775                     CHECK_FPU_FEATURE(dc, FLOAT128);
3776                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3777                     break;
3778 #ifdef TARGET_SPARC64
3779                 case 0x2: /* V9 fmovd */
3780                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3781                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3782                     break;
3783                 case 0x3: /* V9 fmovq */
3784                     CHECK_FPU_FEATURE(dc, FLOAT128);
3785                     gen_move_Q(dc, rd, rs2);
3786                     break;
3787                 case 0x6: /* V9 fnegd */
3788                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3789                     break;
3790                 case 0x7: /* V9 fnegq */
3791                     CHECK_FPU_FEATURE(dc, FLOAT128);
3792                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3793                     break;
3794                 case 0xa: /* V9 fabsd */
3795                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3796                     break;
3797                 case 0xb: /* V9 fabsq */
3798                     CHECK_FPU_FEATURE(dc, FLOAT128);
3799                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3800                     break;
3801                 case 0x81: /* V9 fstox */
3802                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3803                     break;
3804                 case 0x82: /* V9 fdtox */
3805                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3806                     break;
3807                 case 0x83: /* V9 fqtox */
3808                     CHECK_FPU_FEATURE(dc, FLOAT128);
3809                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3810                     break;
3811                 case 0x84: /* V9 fxtos */
3812                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3813                     break;
3814                 case 0x88: /* V9 fxtod */
3815                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3816                     break;
3817                 case 0x8c: /* V9 fxtoq */
3818                     CHECK_FPU_FEATURE(dc, FLOAT128);
3819                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3820                     break;
3821 #endif
3822                 default:
3823                     goto illegal_insn;
3824                 }
3825             } else if (xop == 0x35) {   /* FPU Operations */
3826 #ifdef TARGET_SPARC64
3827                 int cond;
3828 #endif
3829                 if (gen_trap_ifnofpu(dc)) {
3830                     goto jmp_insn;
3831                 }
3832                 gen_op_clear_ieee_excp_and_FTT();
3833                 rs1 = GET_FIELD(insn, 13, 17);
3834                 rs2 = GET_FIELD(insn, 27, 31);
3835                 xop = GET_FIELD(insn, 18, 26);
3836 
3837 #ifdef TARGET_SPARC64
3838 #define FMOVR(sz)                                                  \
3839                 do {                                               \
3840                     DisasCompare cmp;                              \
3841                     cond = GET_FIELD_SP(insn, 10, 12);             \
3842                     cpu_src1 = get_src1(dc, insn);                 \
3843                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3844                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3845                     free_compare(&cmp);                            \
3846                 } while (0)
3847 
3848                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3849                     FMOVR(s);
3850                     break;
3851                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3852                     FMOVR(d);
3853                     break;
3854                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3855                     CHECK_FPU_FEATURE(dc, FLOAT128);
3856                     FMOVR(q);
3857                     break;
3858                 }
3859 #undef FMOVR
3860 #endif
3861                 switch (xop) {
3862 #ifdef TARGET_SPARC64
3863 #define FMOVCC(fcc, sz)                                                 \
3864                     do {                                                \
3865                         DisasCompare cmp;                               \
3866                         cond = GET_FIELD_SP(insn, 14, 17);              \
3867                         gen_fcompare(&cmp, fcc, cond);                  \
3868                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3869                         free_compare(&cmp);                             \
3870                     } while (0)
3871 
3872                     case 0x001: /* V9 fmovscc %fcc0 */
3873                         FMOVCC(0, s);
3874                         break;
3875                     case 0x002: /* V9 fmovdcc %fcc0 */
3876                         FMOVCC(0, d);
3877                         break;
3878                     case 0x003: /* V9 fmovqcc %fcc0 */
3879                         CHECK_FPU_FEATURE(dc, FLOAT128);
3880                         FMOVCC(0, q);
3881                         break;
3882                     case 0x041: /* V9 fmovscc %fcc1 */
3883                         FMOVCC(1, s);
3884                         break;
3885                     case 0x042: /* V9 fmovdcc %fcc1 */
3886                         FMOVCC(1, d);
3887                         break;
3888                     case 0x043: /* V9 fmovqcc %fcc1 */
3889                         CHECK_FPU_FEATURE(dc, FLOAT128);
3890                         FMOVCC(1, q);
3891                         break;
3892                     case 0x081: /* V9 fmovscc %fcc2 */
3893                         FMOVCC(2, s);
3894                         break;
3895                     case 0x082: /* V9 fmovdcc %fcc2 */
3896                         FMOVCC(2, d);
3897                         break;
3898                     case 0x083: /* V9 fmovqcc %fcc2 */
3899                         CHECK_FPU_FEATURE(dc, FLOAT128);
3900                         FMOVCC(2, q);
3901                         break;
3902                     case 0x0c1: /* V9 fmovscc %fcc3 */
3903                         FMOVCC(3, s);
3904                         break;
3905                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3906                         FMOVCC(3, d);
3907                         break;
3908                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3909                         CHECK_FPU_FEATURE(dc, FLOAT128);
3910                         FMOVCC(3, q);
3911                         break;
3912 #undef FMOVCC
3913 #define FMOVCC(xcc, sz)                                                 \
3914                     do {                                                \
3915                         DisasCompare cmp;                               \
3916                         cond = GET_FIELD_SP(insn, 14, 17);              \
3917                         gen_compare(&cmp, xcc, cond, dc);               \
3918                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3919                         free_compare(&cmp);                             \
3920                     } while (0)
3921 
3922                     case 0x101: /* V9 fmovscc %icc */
3923                         FMOVCC(0, s);
3924                         break;
3925                     case 0x102: /* V9 fmovdcc %icc */
3926                         FMOVCC(0, d);
3927                         break;
3928                     case 0x103: /* V9 fmovqcc %icc */
3929                         CHECK_FPU_FEATURE(dc, FLOAT128);
3930                         FMOVCC(0, q);
3931                         break;
3932                     case 0x181: /* V9 fmovscc %xcc */
3933                         FMOVCC(1, s);
3934                         break;
3935                     case 0x182: /* V9 fmovdcc %xcc */
3936                         FMOVCC(1, d);
3937                         break;
3938                     case 0x183: /* V9 fmovqcc %xcc */
3939                         CHECK_FPU_FEATURE(dc, FLOAT128);
3940                         FMOVCC(1, q);
3941                         break;
3942 #undef FMOVCC
3943 #endif
3944                     case 0x51: /* fcmps, V9 %fcc */
3945                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3946                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3947                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3948                         break;
3949                     case 0x52: /* fcmpd, V9 %fcc */
3950                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3951                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3952                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3953                         break;
3954                     case 0x53: /* fcmpq, V9 %fcc */
3955                         CHECK_FPU_FEATURE(dc, FLOAT128);
3956                         gen_op_load_fpr_QT0(QFPREG(rs1));
3957                         gen_op_load_fpr_QT1(QFPREG(rs2));
3958                         gen_op_fcmpq(rd & 3);
3959                         break;
3960                     case 0x55: /* fcmpes, V9 %fcc */
3961                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3962                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3963                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3964                         break;
3965                     case 0x56: /* fcmped, V9 %fcc */
3966                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3967                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3968                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3969                         break;
3970                     case 0x57: /* fcmpeq, V9 %fcc */
3971                         CHECK_FPU_FEATURE(dc, FLOAT128);
3972                         gen_op_load_fpr_QT0(QFPREG(rs1));
3973                         gen_op_load_fpr_QT1(QFPREG(rs2));
3974                         gen_op_fcmpeq(rd & 3);
3975                         break;
3976                     default:
3977                         goto illegal_insn;
3978                 }
3979             } else if (xop == 0x2) {
3980                 TCGv dst = gen_dest_gpr(dc, rd);
3981                 rs1 = GET_FIELD(insn, 13, 17);
3982                 if (rs1 == 0) {
3983                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3984                     if (IS_IMM) {       /* immediate */
3985                         simm = GET_FIELDs(insn, 19, 31);
3986                         tcg_gen_movi_tl(dst, simm);
3987                         gen_store_gpr(dc, rd, dst);
3988                     } else {            /* register */
3989                         rs2 = GET_FIELD(insn, 27, 31);
3990                         if (rs2 == 0) {
3991                             tcg_gen_movi_tl(dst, 0);
3992                             gen_store_gpr(dc, rd, dst);
3993                         } else {
3994                             cpu_src2 = gen_load_gpr(dc, rs2);
3995                             gen_store_gpr(dc, rd, cpu_src2);
3996                         }
3997                     }
3998                 } else {
3999                     cpu_src1 = get_src1(dc, insn);
4000                     if (IS_IMM) {       /* immediate */
4001                         simm = GET_FIELDs(insn, 19, 31);
4002                         tcg_gen_ori_tl(dst, cpu_src1, simm);
4003                         gen_store_gpr(dc, rd, dst);
4004                     } else {            /* register */
4005                         rs2 = GET_FIELD(insn, 27, 31);
4006                         if (rs2 == 0) {
4007                             /* mov shortcut:  or x, %g0, y -> mov x, y */
4008                             gen_store_gpr(dc, rd, cpu_src1);
4009                         } else {
4010                             cpu_src2 = gen_load_gpr(dc, rs2);
4011                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
4012                             gen_store_gpr(dc, rd, dst);
4013                         }
4014                     }
4015                 }
4016 #ifdef TARGET_SPARC64
4017             } else if (xop == 0x25) { /* sll, V9 sllx */
4018                 cpu_src1 = get_src1(dc, insn);
4019                 if (IS_IMM) {   /* immediate */
4020                     simm = GET_FIELDs(insn, 20, 31);
4021                     if (insn & (1 << 12)) {
4022                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
4023                     } else {
4024                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
4025                     }
4026                 } else {                /* register */
4027                     rs2 = GET_FIELD(insn, 27, 31);
4028                     cpu_src2 = gen_load_gpr(dc, rs2);
4029                     cpu_tmp0 = get_temp_tl(dc);
4030                     if (insn & (1 << 12)) {
4031                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4032                     } else {
4033                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4034                     }
4035                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
4036                 }
4037                 gen_store_gpr(dc, rd, cpu_dst);
4038             } else if (xop == 0x26) { /* srl, V9 srlx */
4039                 cpu_src1 = get_src1(dc, insn);
4040                 if (IS_IMM) {   /* immediate */
4041                     simm = GET_FIELDs(insn, 20, 31);
4042                     if (insn & (1 << 12)) {
4043                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
4044                     } else {
4045                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4046                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
4047                     }
4048                 } else {                /* register */
4049                     rs2 = GET_FIELD(insn, 27, 31);
4050                     cpu_src2 = gen_load_gpr(dc, rs2);
4051                     cpu_tmp0 = get_temp_tl(dc);
4052                     if (insn & (1 << 12)) {
4053                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4054                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
4055                     } else {
4056                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4057                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
4058                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
4059                     }
4060                 }
4061                 gen_store_gpr(dc, rd, cpu_dst);
4062             } else if (xop == 0x27) { /* sra, V9 srax */
4063                 cpu_src1 = get_src1(dc, insn);
4064                 if (IS_IMM) {   /* immediate */
4065                     simm = GET_FIELDs(insn, 20, 31);
4066                     if (insn & (1 << 12)) {
4067                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
4068                     } else {
4069                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4070                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
4071                     }
4072                 } else {                /* register */
4073                     rs2 = GET_FIELD(insn, 27, 31);
4074                     cpu_src2 = gen_load_gpr(dc, rs2);
4075                     cpu_tmp0 = get_temp_tl(dc);
4076                     if (insn & (1 << 12)) {
4077                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
4078                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
4079                     } else {
4080                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
4081                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
4082                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
4083                     }
4084                 }
4085                 gen_store_gpr(dc, rd, cpu_dst);
4086 #endif
4087             } else if (xop < 0x36) {
4088                 if (xop < 0x20) {
4089                     cpu_src1 = get_src1(dc, insn);
4090                     cpu_src2 = get_src2(dc, insn);
4091                     switch (xop & ~0x10) {
4092                     case 0x0: /* add */
4093                         if (xop & 0x10) {
4094                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4095                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4096                             dc->cc_op = CC_OP_ADD;
4097                         } else {
4098                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4099                         }
4100                         break;
4101                     case 0x1: /* and */
4102                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
4103                         if (xop & 0x10) {
4104                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4105                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4106                             dc->cc_op = CC_OP_LOGIC;
4107                         }
4108                         break;
4109                     case 0x2: /* or */
4110                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
4111                         if (xop & 0x10) {
4112                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4113                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4114                             dc->cc_op = CC_OP_LOGIC;
4115                         }
4116                         break;
4117                     case 0x3: /* xor */
4118                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4119                         if (xop & 0x10) {
4120                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4121                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4122                             dc->cc_op = CC_OP_LOGIC;
4123                         }
4124                         break;
4125                     case 0x4: /* sub */
4126                         if (xop & 0x10) {
4127                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4128                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4129                             dc->cc_op = CC_OP_SUB;
4130                         } else {
4131                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4132                         }
4133                         break;
4134                     case 0x5: /* andn */
4135                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4136                         if (xop & 0x10) {
4137                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4138                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4139                             dc->cc_op = CC_OP_LOGIC;
4140                         }
4141                         break;
4142                     case 0x6: /* orn */
4143                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4144                         if (xop & 0x10) {
4145                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4146                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4147                             dc->cc_op = CC_OP_LOGIC;
4148                         }
4149                         break;
4150                     case 0x7: /* xorn */
4151                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4152                         if (xop & 0x10) {
4153                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4154                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4155                             dc->cc_op = CC_OP_LOGIC;
4156                         }
4157                         break;
4158                     case 0x8: /* addx, V9 addc */
4159                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4160                                         (xop & 0x10));
4161                         break;
4162 #ifdef TARGET_SPARC64
4163                     case 0x9: /* V9 mulx */
4164                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4165                         break;
4166 #endif
4167                     case 0xa: /* umul */
4168                         CHECK_IU_FEATURE(dc, MUL);
4169                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4170                         if (xop & 0x10) {
4171                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4172                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4173                             dc->cc_op = CC_OP_LOGIC;
4174                         }
4175                         break;
4176                     case 0xb: /* smul */
4177                         CHECK_IU_FEATURE(dc, MUL);
4178                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4179                         if (xop & 0x10) {
4180                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4181                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4182                             dc->cc_op = CC_OP_LOGIC;
4183                         }
4184                         break;
4185                     case 0xc: /* subx, V9 subc */
4186                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4187                                         (xop & 0x10));
4188                         break;
4189 #ifdef TARGET_SPARC64
4190                     case 0xd: /* V9 udivx */
4191                         gen_helper_udivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4192                         break;
4193 #endif
4194                     case 0xe: /* udiv */
4195                         CHECK_IU_FEATURE(dc, DIV);
4196                         if (xop & 0x10) {
4197                             gen_helper_udiv_cc(cpu_dst, cpu_env, cpu_src1,
4198                                                cpu_src2);
4199                             dc->cc_op = CC_OP_DIV;
4200                         } else {
4201                             gen_helper_udiv(cpu_dst, cpu_env, cpu_src1,
4202                                             cpu_src2);
4203                         }
4204                         break;
4205                     case 0xf: /* sdiv */
4206                         CHECK_IU_FEATURE(dc, DIV);
4207                         if (xop & 0x10) {
4208                             gen_helper_sdiv_cc(cpu_dst, cpu_env, cpu_src1,
4209                                                cpu_src2);
4210                             dc->cc_op = CC_OP_DIV;
4211                         } else {
4212                             gen_helper_sdiv(cpu_dst, cpu_env, cpu_src1,
4213                                             cpu_src2);
4214                         }
4215                         break;
4216                     default:
4217                         goto illegal_insn;
4218                     }
4219                     gen_store_gpr(dc, rd, cpu_dst);
4220                 } else {
4221                     cpu_src1 = get_src1(dc, insn);
4222                     cpu_src2 = get_src2(dc, insn);
4223                     switch (xop) {
4224                     case 0x20: /* taddcc */
4225                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4226                         gen_store_gpr(dc, rd, cpu_dst);
4227                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4228                         dc->cc_op = CC_OP_TADD;
4229                         break;
4230                     case 0x21: /* tsubcc */
4231                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4232                         gen_store_gpr(dc, rd, cpu_dst);
4233                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4234                         dc->cc_op = CC_OP_TSUB;
4235                         break;
4236                     case 0x22: /* taddcctv */
4237                         gen_helper_taddcctv(cpu_dst, cpu_env,
4238                                             cpu_src1, cpu_src2);
4239                         gen_store_gpr(dc, rd, cpu_dst);
4240                         dc->cc_op = CC_OP_TADDTV;
4241                         break;
4242                     case 0x23: /* tsubcctv */
4243                         gen_helper_tsubcctv(cpu_dst, cpu_env,
4244                                             cpu_src1, cpu_src2);
4245                         gen_store_gpr(dc, rd, cpu_dst);
4246                         dc->cc_op = CC_OP_TSUBTV;
4247                         break;
4248                     case 0x24: /* mulscc */
4249                         update_psr(dc);
4250                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4251                         gen_store_gpr(dc, rd, cpu_dst);
4252                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4253                         dc->cc_op = CC_OP_ADD;
4254                         break;
4255 #ifndef TARGET_SPARC64
4256                     case 0x25:  /* sll */
4257                         if (IS_IMM) { /* immediate */
4258                             simm = GET_FIELDs(insn, 20, 31);
4259                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4260                         } else { /* register */
4261                             cpu_tmp0 = get_temp_tl(dc);
4262                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4263                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4264                         }
4265                         gen_store_gpr(dc, rd, cpu_dst);
4266                         break;
4267                     case 0x26:  /* srl */
4268                         if (IS_IMM) { /* immediate */
4269                             simm = GET_FIELDs(insn, 20, 31);
4270                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4271                         } else { /* register */
4272                             cpu_tmp0 = get_temp_tl(dc);
4273                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4274                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4275                         }
4276                         gen_store_gpr(dc, rd, cpu_dst);
4277                         break;
4278                     case 0x27:  /* sra */
4279                         if (IS_IMM) { /* immediate */
4280                             simm = GET_FIELDs(insn, 20, 31);
4281                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4282                         } else { /* register */
4283                             cpu_tmp0 = get_temp_tl(dc);
4284                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4285                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4286                         }
4287                         gen_store_gpr(dc, rd, cpu_dst);
4288                         break;
4289 #endif
4290                     case 0x30:
4291                         {
4292                             cpu_tmp0 = get_temp_tl(dc);
4293                             switch(rd) {
4294                             case 0: /* wry */
4295                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4296                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4297                                 break;
4298 #ifndef TARGET_SPARC64
4299                             case 0x01 ... 0x0f: /* undefined in the
4300                                                    SPARCv8 manual, nop
4301                                                    on the microSPARC
4302                                                    II */
4303                             case 0x10 ... 0x1f: /* implementation-dependent
4304                                                    in the SPARCv8
4305                                                    manual, nop on the
4306                                                    microSPARC II */
4307                                 if ((rd == 0x13) && (dc->def->features &
4308                                                      CPU_FEATURE_POWERDOWN)) {
4309                                     /* LEON3 power-down */
4310                                     save_state(dc);
4311                                     gen_helper_power_down(cpu_env);
4312                                 }
4313                                 break;
4314 #else
4315                             case 0x2: /* V9 wrccr */
4316                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4317                                 gen_helper_wrccr(cpu_env, cpu_tmp0);
4318                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4319                                 dc->cc_op = CC_OP_FLAGS;
4320                                 break;
4321                             case 0x3: /* V9 wrasi */
4322                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4323                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4324                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4325                                                 offsetof(CPUSPARCState, asi));
4326                                 /* End TB to notice changed ASI.  */
4327                                 save_state(dc);
4328                                 gen_op_next_insn();
4329                                 tcg_gen_exit_tb(0);
4330                                 dc->is_br = 1;
4331                                 break;
4332                             case 0x6: /* V9 wrfprs */
4333                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4334                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4335                                 dc->fprs_dirty = 0;
4336                                 save_state(dc);
4337                                 gen_op_next_insn();
4338                                 tcg_gen_exit_tb(0);
4339                                 dc->is_br = 1;
4340                                 break;
4341                             case 0xf: /* V9 sir, nop if user */
4342 #if !defined(CONFIG_USER_ONLY)
4343                                 if (supervisor(dc)) {
4344                                     ; // XXX
4345                                 }
4346 #endif
4347                                 break;
4348                             case 0x13: /* Graphics Status */
4349                                 if (gen_trap_ifnofpu(dc)) {
4350                                     goto jmp_insn;
4351                                 }
4352                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4353                                 break;
4354                             case 0x14: /* Softint set */
4355                                 if (!supervisor(dc))
4356                                     goto illegal_insn;
4357                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4358                                 gen_helper_set_softint(cpu_env, cpu_tmp0);
4359                                 break;
4360                             case 0x15: /* Softint clear */
4361                                 if (!supervisor(dc))
4362                                     goto illegal_insn;
4363                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4364                                 gen_helper_clear_softint(cpu_env, cpu_tmp0);
4365                                 break;
4366                             case 0x16: /* Softint write */
4367                                 if (!supervisor(dc))
4368                                     goto illegal_insn;
4369                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4370                                 gen_helper_write_softint(cpu_env, cpu_tmp0);
4371                                 break;
4372                             case 0x17: /* Tick compare */
4373 #if !defined(CONFIG_USER_ONLY)
4374                                 if (!supervisor(dc))
4375                                     goto illegal_insn;
4376 #endif
4377                                 {
4378                                     TCGv_ptr r_tickptr;
4379 
4380                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4381                                                    cpu_src2);
4382                                     r_tickptr = tcg_temp_new_ptr();
4383                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4384                                                    offsetof(CPUSPARCState, tick));
4385                                     gen_helper_tick_set_limit(r_tickptr,
4386                                                               cpu_tick_cmpr);
4387                                     tcg_temp_free_ptr(r_tickptr);
4388                                 }
4389                                 break;
4390                             case 0x18: /* System tick */
4391 #if !defined(CONFIG_USER_ONLY)
4392                                 if (!supervisor(dc))
4393                                     goto illegal_insn;
4394 #endif
4395                                 {
4396                                     TCGv_ptr r_tickptr;
4397 
4398                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4399                                                    cpu_src2);
4400                                     r_tickptr = tcg_temp_new_ptr();
4401                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4402                                                    offsetof(CPUSPARCState, stick));
4403                                     gen_helper_tick_set_count(r_tickptr,
4404                                                               cpu_tmp0);
4405                                     tcg_temp_free_ptr(r_tickptr);
4406                                 }
4407                                 break;
4408                             case 0x19: /* System tick compare */
4409 #if !defined(CONFIG_USER_ONLY)
4410                                 if (!supervisor(dc))
4411                                     goto illegal_insn;
4412 #endif
4413                                 {
4414                                     TCGv_ptr r_tickptr;
4415 
4416                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4417                                                    cpu_src2);
4418                                     r_tickptr = tcg_temp_new_ptr();
4419                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4420                                                    offsetof(CPUSPARCState, stick));
4421                                     gen_helper_tick_set_limit(r_tickptr,
4422                                                               cpu_stick_cmpr);
4423                                     tcg_temp_free_ptr(r_tickptr);
4424                                 }
4425                                 break;
4426 
4427                             case 0x10: /* Performance Control */
4428                             case 0x11: /* Performance Instrumentation
4429                                           Counter */
4430                             case 0x12: /* Dispatch Control */
4431 #endif
4432                             default:
4433                                 goto illegal_insn;
4434                             }
4435                         }
4436                         break;
4437 #if !defined(CONFIG_USER_ONLY)
4438                     case 0x31: /* wrpsr, V9 saved, restored */
4439                         {
4440                             if (!supervisor(dc))
4441                                 goto priv_insn;
4442 #ifdef TARGET_SPARC64
4443                             switch (rd) {
4444                             case 0:
4445                                 gen_helper_saved(cpu_env);
4446                                 break;
4447                             case 1:
4448                                 gen_helper_restored(cpu_env);
4449                                 break;
4450                             case 2: /* UA2005 allclean */
4451                             case 3: /* UA2005 otherw */
4452                             case 4: /* UA2005 normalw */
4453                             case 5: /* UA2005 invalw */
4454                                 // XXX
4455                             default:
4456                                 goto illegal_insn;
4457                             }
4458 #else
4459                             cpu_tmp0 = get_temp_tl(dc);
4460                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4461                             gen_helper_wrpsr(cpu_env, cpu_tmp0);
4462                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4463                             dc->cc_op = CC_OP_FLAGS;
4464                             save_state(dc);
4465                             gen_op_next_insn();
4466                             tcg_gen_exit_tb(0);
4467                             dc->is_br = 1;
4468 #endif
4469                         }
4470                         break;
4471                     case 0x32: /* wrwim, V9 wrpr */
4472                         {
4473                             if (!supervisor(dc))
4474                                 goto priv_insn;
4475                             cpu_tmp0 = get_temp_tl(dc);
4476                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4477 #ifdef TARGET_SPARC64
4478                             switch (rd) {
4479                             case 0: // tpc
4480                                 {
4481                                     TCGv_ptr r_tsptr;
4482 
4483                                     r_tsptr = tcg_temp_new_ptr();
4484                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4485                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4486                                                   offsetof(trap_state, tpc));
4487                                     tcg_temp_free_ptr(r_tsptr);
4488                                 }
4489                                 break;
4490                             case 1: // tnpc
4491                                 {
4492                                     TCGv_ptr r_tsptr;
4493 
4494                                     r_tsptr = tcg_temp_new_ptr();
4495                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4496                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4497                                                   offsetof(trap_state, tnpc));
4498                                     tcg_temp_free_ptr(r_tsptr);
4499                                 }
4500                                 break;
4501                             case 2: // tstate
4502                                 {
4503                                     TCGv_ptr r_tsptr;
4504 
4505                                     r_tsptr = tcg_temp_new_ptr();
4506                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4507                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4508                                                   offsetof(trap_state,
4509                                                            tstate));
4510                                     tcg_temp_free_ptr(r_tsptr);
4511                                 }
4512                                 break;
4513                             case 3: // tt
4514                                 {
4515                                     TCGv_ptr r_tsptr;
4516 
4517                                     r_tsptr = tcg_temp_new_ptr();
4518                                     gen_load_trap_state_at_tl(r_tsptr, cpu_env);
4519                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4520                                                     offsetof(trap_state, tt));
4521                                     tcg_temp_free_ptr(r_tsptr);
4522                                 }
4523                                 break;
4524                             case 4: // tick
4525                                 {
4526                                     TCGv_ptr r_tickptr;
4527 
4528                                     r_tickptr = tcg_temp_new_ptr();
4529                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4530                                                    offsetof(CPUSPARCState, tick));
4531                                     gen_helper_tick_set_count(r_tickptr,
4532                                                               cpu_tmp0);
4533                                     tcg_temp_free_ptr(r_tickptr);
4534                                 }
4535                                 break;
4536                             case 5: // tba
4537                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4538                                 break;
4539                             case 6: // pstate
4540                                 save_state(dc);
4541                                 gen_helper_wrpstate(cpu_env, cpu_tmp0);
4542                                 dc->npc = DYNAMIC_PC;
4543                                 break;
4544                             case 7: // tl
4545                                 save_state(dc);
4546                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4547                                                offsetof(CPUSPARCState, tl));
4548                                 dc->npc = DYNAMIC_PC;
4549                                 break;
4550                             case 8: // pil
4551                                 gen_helper_wrpil(cpu_env, cpu_tmp0);
4552                                 break;
4553                             case 9: // cwp
4554                                 gen_helper_wrcwp(cpu_env, cpu_tmp0);
4555                                 break;
4556                             case 10: // cansave
4557                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4558                                                 offsetof(CPUSPARCState,
4559                                                          cansave));
4560                                 break;
4561                             case 11: // canrestore
4562                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4563                                                 offsetof(CPUSPARCState,
4564                                                          canrestore));
4565                                 break;
4566                             case 12: // cleanwin
4567                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4568                                                 offsetof(CPUSPARCState,
4569                                                          cleanwin));
4570                                 break;
4571                             case 13: // otherwin
4572                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4573                                                 offsetof(CPUSPARCState,
4574                                                          otherwin));
4575                                 break;
4576                             case 14: // wstate
4577                                 tcg_gen_st32_tl(cpu_tmp0, cpu_env,
4578                                                 offsetof(CPUSPARCState,
4579                                                          wstate));
4580                                 break;
4581                             case 16: // UA2005 gl
4582                                 CHECK_IU_FEATURE(dc, GL);
4583                                 gen_helper_wrgl(cpu_env, cpu_tmp0);
4584                                 break;
4585                             case 26: // UA2005 strand status
4586                                 CHECK_IU_FEATURE(dc, HYPV);
4587                                 if (!hypervisor(dc))
4588                                     goto priv_insn;
4589                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4590                                 break;
4591                             default:
4592                                 goto illegal_insn;
4593                             }
4594 #else
4595                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4596                             if (dc->def->nwindows != 32) {
4597                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4598                                                 (1 << dc->def->nwindows) - 1);
4599                             }
4600 #endif
4601                         }
4602                         break;
4603                     case 0x33: /* wrtbr, UA2005 wrhpr */
4604                         {
4605 #ifndef TARGET_SPARC64
4606                             if (!supervisor(dc))
4607                                 goto priv_insn;
4608                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4609 #else
4610                             CHECK_IU_FEATURE(dc, HYPV);
4611                             if (!hypervisor(dc))
4612                                 goto priv_insn;
4613                             cpu_tmp0 = get_temp_tl(dc);
4614                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4615                             switch (rd) {
4616                             case 0: // hpstate
4617                                 tcg_gen_st_i64(cpu_tmp0, cpu_env,
4618                                                offsetof(CPUSPARCState,
4619                                                         hpstate));
4620                                 save_state(dc);
4621                                 gen_op_next_insn();
4622                                 tcg_gen_exit_tb(0);
4623                                 dc->is_br = 1;
4624                                 break;
4625                             case 1: // htstate
4626                                 // XXX gen_op_wrhtstate();
4627                                 break;
4628                             case 3: // hintp
4629                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4630                                 break;
4631                             case 5: // htba
4632                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4633                                 break;
4634                             case 31: // hstick_cmpr
4635                                 {
4636                                     TCGv_ptr r_tickptr;
4637 
4638                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4639                                     r_tickptr = tcg_temp_new_ptr();
4640                                     tcg_gen_ld_ptr(r_tickptr, cpu_env,
4641                                                    offsetof(CPUSPARCState, hstick));
4642                                     gen_helper_tick_set_limit(r_tickptr,
4643                                                               cpu_hstick_cmpr);
4644                                     tcg_temp_free_ptr(r_tickptr);
4645                                 }
4646                                 break;
4647                             case 6: // hver readonly
4648                             default:
4649                                 goto illegal_insn;
4650                             }
4651 #endif
4652                         }
4653                         break;
4654 #endif
4655 #ifdef TARGET_SPARC64
4656                     case 0x2c: /* V9 movcc */
4657                         {
4658                             int cc = GET_FIELD_SP(insn, 11, 12);
4659                             int cond = GET_FIELD_SP(insn, 14, 17);
4660                             DisasCompare cmp;
4661                             TCGv dst;
4662 
4663                             if (insn & (1 << 18)) {
4664                                 if (cc == 0) {
4665                                     gen_compare(&cmp, 0, cond, dc);
4666                                 } else if (cc == 2) {
4667                                     gen_compare(&cmp, 1, cond, dc);
4668                                 } else {
4669                                     goto illegal_insn;
4670                                 }
4671                             } else {
4672                                 gen_fcompare(&cmp, cc, cond);
4673                             }
4674 
4675                             /* The get_src2 above loaded the normal 13-bit
4676                                immediate field, not the 11-bit field we have
4677                                in movcc.  But it did handle the reg case.  */
4678                             if (IS_IMM) {
4679                                 simm = GET_FIELD_SPs(insn, 0, 10);
4680                                 tcg_gen_movi_tl(cpu_src2, simm);
4681                             }
4682 
4683                             dst = gen_load_gpr(dc, rd);
4684                             tcg_gen_movcond_tl(cmp.cond, dst,
4685                                                cmp.c1, cmp.c2,
4686                                                cpu_src2, dst);
4687                             free_compare(&cmp);
4688                             gen_store_gpr(dc, rd, dst);
4689                             break;
4690                         }
4691                     case 0x2d: /* V9 sdivx */
4692                         gen_helper_sdivx(cpu_dst, cpu_env, cpu_src1, cpu_src2);
4693                         gen_store_gpr(dc, rd, cpu_dst);
4694                         break;
4695                     case 0x2e: /* V9 popc */
4696                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4697                         gen_store_gpr(dc, rd, cpu_dst);
4698                         break;
4699                     case 0x2f: /* V9 movr */
4700                         {
4701                             int cond = GET_FIELD_SP(insn, 10, 12);
4702                             DisasCompare cmp;
4703                             TCGv dst;
4704 
4705                             gen_compare_reg(&cmp, cond, cpu_src1);
4706 
4707                             /* The get_src2 above loaded the normal 13-bit
4708                                immediate field, not the 10-bit field we have
4709                                in movr.  But it did handle the reg case.  */
4710                             if (IS_IMM) {
4711                                 simm = GET_FIELD_SPs(insn, 0, 9);
4712                                 tcg_gen_movi_tl(cpu_src2, simm);
4713                             }
4714 
4715                             dst = gen_load_gpr(dc, rd);
4716                             tcg_gen_movcond_tl(cmp.cond, dst,
4717                                                cmp.c1, cmp.c2,
4718                                                cpu_src2, dst);
4719                             free_compare(&cmp);
4720                             gen_store_gpr(dc, rd, dst);
4721                             break;
4722                         }
4723 #endif
4724                     default:
4725                         goto illegal_insn;
4726                     }
4727                 }
4728             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4729 #ifdef TARGET_SPARC64
4730                 int opf = GET_FIELD_SP(insn, 5, 13);
4731                 rs1 = GET_FIELD(insn, 13, 17);
4732                 rs2 = GET_FIELD(insn, 27, 31);
4733                 if (gen_trap_ifnofpu(dc)) {
4734                     goto jmp_insn;
4735                 }
4736 
4737                 switch (opf) {
4738                 case 0x000: /* VIS I edge8cc */
4739                     CHECK_FPU_FEATURE(dc, VIS1);
4740                     cpu_src1 = gen_load_gpr(dc, rs1);
4741                     cpu_src2 = gen_load_gpr(dc, rs2);
4742                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4743                     gen_store_gpr(dc, rd, cpu_dst);
4744                     break;
4745                 case 0x001: /* VIS II edge8n */
4746                     CHECK_FPU_FEATURE(dc, VIS2);
4747                     cpu_src1 = gen_load_gpr(dc, rs1);
4748                     cpu_src2 = gen_load_gpr(dc, rs2);
4749                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4750                     gen_store_gpr(dc, rd, cpu_dst);
4751                     break;
4752                 case 0x002: /* VIS I edge8lcc */
4753                     CHECK_FPU_FEATURE(dc, VIS1);
4754                     cpu_src1 = gen_load_gpr(dc, rs1);
4755                     cpu_src2 = gen_load_gpr(dc, rs2);
4756                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4757                     gen_store_gpr(dc, rd, cpu_dst);
4758                     break;
4759                 case 0x003: /* VIS II edge8ln */
4760                     CHECK_FPU_FEATURE(dc, VIS2);
4761                     cpu_src1 = gen_load_gpr(dc, rs1);
4762                     cpu_src2 = gen_load_gpr(dc, rs2);
4763                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4764                     gen_store_gpr(dc, rd, cpu_dst);
4765                     break;
4766                 case 0x004: /* VIS I edge16cc */
4767                     CHECK_FPU_FEATURE(dc, VIS1);
4768                     cpu_src1 = gen_load_gpr(dc, rs1);
4769                     cpu_src2 = gen_load_gpr(dc, rs2);
4770                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4771                     gen_store_gpr(dc, rd, cpu_dst);
4772                     break;
4773                 case 0x005: /* VIS II edge16n */
4774                     CHECK_FPU_FEATURE(dc, VIS2);
4775                     cpu_src1 = gen_load_gpr(dc, rs1);
4776                     cpu_src2 = gen_load_gpr(dc, rs2);
4777                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4778                     gen_store_gpr(dc, rd, cpu_dst);
4779                     break;
4780                 case 0x006: /* VIS I edge16lcc */
4781                     CHECK_FPU_FEATURE(dc, VIS1);
4782                     cpu_src1 = gen_load_gpr(dc, rs1);
4783                     cpu_src2 = gen_load_gpr(dc, rs2);
4784                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4785                     gen_store_gpr(dc, rd, cpu_dst);
4786                     break;
4787                 case 0x007: /* VIS II edge16ln */
4788                     CHECK_FPU_FEATURE(dc, VIS2);
4789                     cpu_src1 = gen_load_gpr(dc, rs1);
4790                     cpu_src2 = gen_load_gpr(dc, rs2);
4791                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4792                     gen_store_gpr(dc, rd, cpu_dst);
4793                     break;
4794                 case 0x008: /* VIS I edge32cc */
4795                     CHECK_FPU_FEATURE(dc, VIS1);
4796                     cpu_src1 = gen_load_gpr(dc, rs1);
4797                     cpu_src2 = gen_load_gpr(dc, rs2);
4798                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4799                     gen_store_gpr(dc, rd, cpu_dst);
4800                     break;
4801                 case 0x009: /* VIS II edge32n */
4802                     CHECK_FPU_FEATURE(dc, VIS2);
4803                     cpu_src1 = gen_load_gpr(dc, rs1);
4804                     cpu_src2 = gen_load_gpr(dc, rs2);
4805                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4806                     gen_store_gpr(dc, rd, cpu_dst);
4807                     break;
4808                 case 0x00a: /* VIS I edge32lcc */
4809                     CHECK_FPU_FEATURE(dc, VIS1);
4810                     cpu_src1 = gen_load_gpr(dc, rs1);
4811                     cpu_src2 = gen_load_gpr(dc, rs2);
4812                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4813                     gen_store_gpr(dc, rd, cpu_dst);
4814                     break;
4815                 case 0x00b: /* VIS II edge32ln */
4816                     CHECK_FPU_FEATURE(dc, VIS2);
4817                     cpu_src1 = gen_load_gpr(dc, rs1);
4818                     cpu_src2 = gen_load_gpr(dc, rs2);
4819                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4820                     gen_store_gpr(dc, rd, cpu_dst);
4821                     break;
4822                 case 0x010: /* VIS I array8 */
4823                     CHECK_FPU_FEATURE(dc, VIS1);
4824                     cpu_src1 = gen_load_gpr(dc, rs1);
4825                     cpu_src2 = gen_load_gpr(dc, rs2);
4826                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4827                     gen_store_gpr(dc, rd, cpu_dst);
4828                     break;
4829                 case 0x012: /* VIS I array16 */
4830                     CHECK_FPU_FEATURE(dc, VIS1);
4831                     cpu_src1 = gen_load_gpr(dc, rs1);
4832                     cpu_src2 = gen_load_gpr(dc, rs2);
4833                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4834                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4835                     gen_store_gpr(dc, rd, cpu_dst);
4836                     break;
4837                 case 0x014: /* VIS I array32 */
4838                     CHECK_FPU_FEATURE(dc, VIS1);
4839                     cpu_src1 = gen_load_gpr(dc, rs1);
4840                     cpu_src2 = gen_load_gpr(dc, rs2);
4841                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4842                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4843                     gen_store_gpr(dc, rd, cpu_dst);
4844                     break;
4845                 case 0x018: /* VIS I alignaddr */
4846                     CHECK_FPU_FEATURE(dc, VIS1);
4847                     cpu_src1 = gen_load_gpr(dc, rs1);
4848                     cpu_src2 = gen_load_gpr(dc, rs2);
4849                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4850                     gen_store_gpr(dc, rd, cpu_dst);
4851                     break;
4852                 case 0x01a: /* VIS I alignaddrl */
4853                     CHECK_FPU_FEATURE(dc, VIS1);
4854                     cpu_src1 = gen_load_gpr(dc, rs1);
4855                     cpu_src2 = gen_load_gpr(dc, rs2);
4856                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4857                     gen_store_gpr(dc, rd, cpu_dst);
4858                     break;
4859                 case 0x019: /* VIS II bmask */
4860                     CHECK_FPU_FEATURE(dc, VIS2);
4861                     cpu_src1 = gen_load_gpr(dc, rs1);
4862                     cpu_src2 = gen_load_gpr(dc, rs2);
4863                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4864                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4865                     gen_store_gpr(dc, rd, cpu_dst);
4866                     break;
4867                 case 0x020: /* VIS I fcmple16 */
4868                     CHECK_FPU_FEATURE(dc, VIS1);
4869                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4870                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4871                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4872                     gen_store_gpr(dc, rd, cpu_dst);
4873                     break;
4874                 case 0x022: /* VIS I fcmpne16 */
4875                     CHECK_FPU_FEATURE(dc, VIS1);
4876                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4877                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4878                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4879                     gen_store_gpr(dc, rd, cpu_dst);
4880                     break;
4881                 case 0x024: /* VIS I fcmple32 */
4882                     CHECK_FPU_FEATURE(dc, VIS1);
4883                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4884                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4885                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4886                     gen_store_gpr(dc, rd, cpu_dst);
4887                     break;
4888                 case 0x026: /* VIS I fcmpne32 */
4889                     CHECK_FPU_FEATURE(dc, VIS1);
4890                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4891                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4892                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4893                     gen_store_gpr(dc, rd, cpu_dst);
4894                     break;
4895                 case 0x028: /* VIS I fcmpgt16 */
4896                     CHECK_FPU_FEATURE(dc, VIS1);
4897                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4898                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4899                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4900                     gen_store_gpr(dc, rd, cpu_dst);
4901                     break;
4902                 case 0x02a: /* VIS I fcmpeq16 */
4903                     CHECK_FPU_FEATURE(dc, VIS1);
4904                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4905                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4906                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4907                     gen_store_gpr(dc, rd, cpu_dst);
4908                     break;
4909                 case 0x02c: /* VIS I fcmpgt32 */
4910                     CHECK_FPU_FEATURE(dc, VIS1);
4911                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4912                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4913                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4914                     gen_store_gpr(dc, rd, cpu_dst);
4915                     break;
4916                 case 0x02e: /* VIS I fcmpeq32 */
4917                     CHECK_FPU_FEATURE(dc, VIS1);
4918                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4919                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4920                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4921                     gen_store_gpr(dc, rd, cpu_dst);
4922                     break;
4923                 case 0x031: /* VIS I fmul8x16 */
4924                     CHECK_FPU_FEATURE(dc, VIS1);
4925                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4926                     break;
4927                 case 0x033: /* VIS I fmul8x16au */
4928                     CHECK_FPU_FEATURE(dc, VIS1);
4929                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4930                     break;
4931                 case 0x035: /* VIS I fmul8x16al */
4932                     CHECK_FPU_FEATURE(dc, VIS1);
4933                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4934                     break;
4935                 case 0x036: /* VIS I fmul8sux16 */
4936                     CHECK_FPU_FEATURE(dc, VIS1);
4937                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4938                     break;
4939                 case 0x037: /* VIS I fmul8ulx16 */
4940                     CHECK_FPU_FEATURE(dc, VIS1);
4941                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4942                     break;
4943                 case 0x038: /* VIS I fmuld8sux16 */
4944                     CHECK_FPU_FEATURE(dc, VIS1);
4945                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4946                     break;
4947                 case 0x039: /* VIS I fmuld8ulx16 */
4948                     CHECK_FPU_FEATURE(dc, VIS1);
4949                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4950                     break;
4951                 case 0x03a: /* VIS I fpack32 */
4952                     CHECK_FPU_FEATURE(dc, VIS1);
4953                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4954                     break;
4955                 case 0x03b: /* VIS I fpack16 */
4956                     CHECK_FPU_FEATURE(dc, VIS1);
4957                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4958                     cpu_dst_32 = gen_dest_fpr_F(dc);
4959                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4960                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4961                     break;
4962                 case 0x03d: /* VIS I fpackfix */
4963                     CHECK_FPU_FEATURE(dc, VIS1);
4964                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4965                     cpu_dst_32 = gen_dest_fpr_F(dc);
4966                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4967                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4968                     break;
4969                 case 0x03e: /* VIS I pdist */
4970                     CHECK_FPU_FEATURE(dc, VIS1);
4971                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4972                     break;
4973                 case 0x048: /* VIS I faligndata */
4974                     CHECK_FPU_FEATURE(dc, VIS1);
4975                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4976                     break;
4977                 case 0x04b: /* VIS I fpmerge */
4978                     CHECK_FPU_FEATURE(dc, VIS1);
4979                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4980                     break;
4981                 case 0x04c: /* VIS II bshuffle */
4982                     CHECK_FPU_FEATURE(dc, VIS2);
4983                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4984                     break;
4985                 case 0x04d: /* VIS I fexpand */
4986                     CHECK_FPU_FEATURE(dc, VIS1);
4987                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4988                     break;
4989                 case 0x050: /* VIS I fpadd16 */
4990                     CHECK_FPU_FEATURE(dc, VIS1);
4991                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4992                     break;
4993                 case 0x051: /* VIS I fpadd16s */
4994                     CHECK_FPU_FEATURE(dc, VIS1);
4995                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4996                     break;
4997                 case 0x052: /* VIS I fpadd32 */
4998                     CHECK_FPU_FEATURE(dc, VIS1);
4999                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
5000                     break;
5001                 case 0x053: /* VIS I fpadd32s */
5002                     CHECK_FPU_FEATURE(dc, VIS1);
5003                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
5004                     break;
5005                 case 0x054: /* VIS I fpsub16 */
5006                     CHECK_FPU_FEATURE(dc, VIS1);
5007                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
5008                     break;
5009                 case 0x055: /* VIS I fpsub16s */
5010                     CHECK_FPU_FEATURE(dc, VIS1);
5011                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
5012                     break;
5013                 case 0x056: /* VIS I fpsub32 */
5014                     CHECK_FPU_FEATURE(dc, VIS1);
5015                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
5016                     break;
5017                 case 0x057: /* VIS I fpsub32s */
5018                     CHECK_FPU_FEATURE(dc, VIS1);
5019                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
5020                     break;
5021                 case 0x060: /* VIS I fzero */
5022                     CHECK_FPU_FEATURE(dc, VIS1);
5023                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5024                     tcg_gen_movi_i64(cpu_dst_64, 0);
5025                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5026                     break;
5027                 case 0x061: /* VIS I fzeros */
5028                     CHECK_FPU_FEATURE(dc, VIS1);
5029                     cpu_dst_32 = gen_dest_fpr_F(dc);
5030                     tcg_gen_movi_i32(cpu_dst_32, 0);
5031                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5032                     break;
5033                 case 0x062: /* VIS I fnor */
5034                     CHECK_FPU_FEATURE(dc, VIS1);
5035                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
5036                     break;
5037                 case 0x063: /* VIS I fnors */
5038                     CHECK_FPU_FEATURE(dc, VIS1);
5039                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
5040                     break;
5041                 case 0x064: /* VIS I fandnot2 */
5042                     CHECK_FPU_FEATURE(dc, VIS1);
5043                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
5044                     break;
5045                 case 0x065: /* VIS I fandnot2s */
5046                     CHECK_FPU_FEATURE(dc, VIS1);
5047                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
5048                     break;
5049                 case 0x066: /* VIS I fnot2 */
5050                     CHECK_FPU_FEATURE(dc, VIS1);
5051                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
5052                     break;
5053                 case 0x067: /* VIS I fnot2s */
5054                     CHECK_FPU_FEATURE(dc, VIS1);
5055                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
5056                     break;
5057                 case 0x068: /* VIS I fandnot1 */
5058                     CHECK_FPU_FEATURE(dc, VIS1);
5059                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
5060                     break;
5061                 case 0x069: /* VIS I fandnot1s */
5062                     CHECK_FPU_FEATURE(dc, VIS1);
5063                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
5064                     break;
5065                 case 0x06a: /* VIS I fnot1 */
5066                     CHECK_FPU_FEATURE(dc, VIS1);
5067                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
5068                     break;
5069                 case 0x06b: /* VIS I fnot1s */
5070                     CHECK_FPU_FEATURE(dc, VIS1);
5071                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
5072                     break;
5073                 case 0x06c: /* VIS I fxor */
5074                     CHECK_FPU_FEATURE(dc, VIS1);
5075                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
5076                     break;
5077                 case 0x06d: /* VIS I fxors */
5078                     CHECK_FPU_FEATURE(dc, VIS1);
5079                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
5080                     break;
5081                 case 0x06e: /* VIS I fnand */
5082                     CHECK_FPU_FEATURE(dc, VIS1);
5083                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
5084                     break;
5085                 case 0x06f: /* VIS I fnands */
5086                     CHECK_FPU_FEATURE(dc, VIS1);
5087                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
5088                     break;
5089                 case 0x070: /* VIS I fand */
5090                     CHECK_FPU_FEATURE(dc, VIS1);
5091                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
5092                     break;
5093                 case 0x071: /* VIS I fands */
5094                     CHECK_FPU_FEATURE(dc, VIS1);
5095                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
5096                     break;
5097                 case 0x072: /* VIS I fxnor */
5098                     CHECK_FPU_FEATURE(dc, VIS1);
5099                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5100                     break;
5101                 case 0x073: /* VIS I fxnors */
5102                     CHECK_FPU_FEATURE(dc, VIS1);
5103                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5104                     break;
5105                 case 0x074: /* VIS I fsrc1 */
5106                     CHECK_FPU_FEATURE(dc, VIS1);
5107                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5108                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5109                     break;
5110                 case 0x075: /* VIS I fsrc1s */
5111                     CHECK_FPU_FEATURE(dc, VIS1);
5112                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5113                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5114                     break;
5115                 case 0x076: /* VIS I fornot2 */
5116                     CHECK_FPU_FEATURE(dc, VIS1);
5117                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5118                     break;
5119                 case 0x077: /* VIS I fornot2s */
5120                     CHECK_FPU_FEATURE(dc, VIS1);
5121                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5122                     break;
5123                 case 0x078: /* VIS I fsrc2 */
5124                     CHECK_FPU_FEATURE(dc, VIS1);
5125                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5126                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5127                     break;
5128                 case 0x079: /* VIS I fsrc2s */
5129                     CHECK_FPU_FEATURE(dc, VIS1);
5130                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5131                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5132                     break;
5133                 case 0x07a: /* VIS I fornot1 */
5134                     CHECK_FPU_FEATURE(dc, VIS1);
5135                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5136                     break;
5137                 case 0x07b: /* VIS I fornot1s */
5138                     CHECK_FPU_FEATURE(dc, VIS1);
5139                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5140                     break;
5141                 case 0x07c: /* VIS I for */
5142                     CHECK_FPU_FEATURE(dc, VIS1);
5143                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5144                     break;
5145                 case 0x07d: /* VIS I fors */
5146                     CHECK_FPU_FEATURE(dc, VIS1);
5147                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5148                     break;
5149                 case 0x07e: /* VIS I fone */
5150                     CHECK_FPU_FEATURE(dc, VIS1);
5151                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5152                     tcg_gen_movi_i64(cpu_dst_64, -1);
5153                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5154                     break;
5155                 case 0x07f: /* VIS I fones */
5156                     CHECK_FPU_FEATURE(dc, VIS1);
5157                     cpu_dst_32 = gen_dest_fpr_F(dc);
5158                     tcg_gen_movi_i32(cpu_dst_32, -1);
5159                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5160                     break;
5161                 case 0x080: /* VIS I shutdown */
5162                 case 0x081: /* VIS II siam */
5163                     // XXX
5164                     goto illegal_insn;
5165                 default:
5166                     goto illegal_insn;
5167                 }
5168 #else
5169                 goto ncp_insn;
5170 #endif
5171             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5172 #ifdef TARGET_SPARC64
5173                 goto illegal_insn;
5174 #else
5175                 goto ncp_insn;
5176 #endif
5177 #ifdef TARGET_SPARC64
5178             } else if (xop == 0x39) { /* V9 return */
5179                 save_state(dc);
5180                 cpu_src1 = get_src1(dc, insn);
5181                 cpu_tmp0 = get_temp_tl(dc);
5182                 if (IS_IMM) {   /* immediate */
5183                     simm = GET_FIELDs(insn, 19, 31);
5184                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5185                 } else {                /* register */
5186                     rs2 = GET_FIELD(insn, 27, 31);
5187                     if (rs2) {
5188                         cpu_src2 = gen_load_gpr(dc, rs2);
5189                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5190                     } else {
5191                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5192                     }
5193                 }
5194                 gen_helper_restore(cpu_env);
5195                 gen_mov_pc_npc(dc);
5196                 gen_check_align(cpu_tmp0, 3);
5197                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5198                 dc->npc = DYNAMIC_PC;
5199                 goto jmp_insn;
5200 #endif
5201             } else {
5202                 cpu_src1 = get_src1(dc, insn);
5203                 cpu_tmp0 = get_temp_tl(dc);
5204                 if (IS_IMM) {   /* immediate */
5205                     simm = GET_FIELDs(insn, 19, 31);
5206                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5207                 } else {                /* register */
5208                     rs2 = GET_FIELD(insn, 27, 31);
5209                     if (rs2) {
5210                         cpu_src2 = gen_load_gpr(dc, rs2);
5211                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5212                     } else {
5213                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5214                     }
5215                 }
5216                 switch (xop) {
5217                 case 0x38:      /* jmpl */
5218                     {
5219                         TCGv t = gen_dest_gpr(dc, rd);
5220                         tcg_gen_movi_tl(t, dc->pc);
5221                         gen_store_gpr(dc, rd, t);
5222 
5223                         gen_mov_pc_npc(dc);
5224                         gen_check_align(cpu_tmp0, 3);
5225                         gen_address_mask(dc, cpu_tmp0);
5226                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5227                         dc->npc = DYNAMIC_PC;
5228                     }
5229                     goto jmp_insn;
5230 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5231                 case 0x39:      /* rett, V9 return */
5232                     {
5233                         if (!supervisor(dc))
5234                             goto priv_insn;
5235                         gen_mov_pc_npc(dc);
5236                         gen_check_align(cpu_tmp0, 3);
5237                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5238                         dc->npc = DYNAMIC_PC;
5239                         gen_helper_rett(cpu_env);
5240                     }
5241                     goto jmp_insn;
5242 #endif
5243                 case 0x3b: /* flush */
5244                     if (!((dc)->def->features & CPU_FEATURE_FLUSH))
5245                         goto unimp_flush;
5246                     /* nop */
5247                     break;
5248                 case 0x3c:      /* save */
5249                     gen_helper_save(cpu_env);
5250                     gen_store_gpr(dc, rd, cpu_tmp0);
5251                     break;
5252                 case 0x3d:      /* restore */
5253                     gen_helper_restore(cpu_env);
5254                     gen_store_gpr(dc, rd, cpu_tmp0);
5255                     break;
5256 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5257                 case 0x3e:      /* V9 done/retry */
5258                     {
5259                         switch (rd) {
5260                         case 0:
5261                             if (!supervisor(dc))
5262                                 goto priv_insn;
5263                             dc->npc = DYNAMIC_PC;
5264                             dc->pc = DYNAMIC_PC;
5265                             gen_helper_done(cpu_env);
5266                             goto jmp_insn;
5267                         case 1:
5268                             if (!supervisor(dc))
5269                                 goto priv_insn;
5270                             dc->npc = DYNAMIC_PC;
5271                             dc->pc = DYNAMIC_PC;
5272                             gen_helper_retry(cpu_env);
5273                             goto jmp_insn;
5274                         default:
5275                             goto illegal_insn;
5276                         }
5277                     }
5278                     break;
5279 #endif
5280                 default:
5281                     goto illegal_insn;
5282                 }
5283             }
5284             break;
5285         }
5286         break;
5287     case 3:                     /* load/store instructions */
5288         {
5289             unsigned int xop = GET_FIELD(insn, 7, 12);
5290             /* ??? gen_address_mask prevents us from using a source
5291                register directly.  Always generate a temporary.  */
5292             TCGv cpu_addr = get_temp_tl(dc);
5293 
5294             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5295             if (xop == 0x3c || xop == 0x3e) {
5296                 /* V9 casa/casxa : no offset */
5297             } else if (IS_IMM) {     /* immediate */
5298                 simm = GET_FIELDs(insn, 19, 31);
5299                 if (simm != 0) {
5300                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5301                 }
5302             } else {            /* register */
5303                 rs2 = GET_FIELD(insn, 27, 31);
5304                 if (rs2 != 0) {
5305                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5306                 }
5307             }
5308             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5309                 (xop > 0x17 && xop <= 0x1d ) ||
5310                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5311                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5312 
5313                 switch (xop) {
5314                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5315                     gen_address_mask(dc, cpu_addr);
5316                     tcg_gen_qemu_ld32u(cpu_val, cpu_addr, dc->mem_idx);
5317                     break;
5318                 case 0x1:       /* ldub, load unsigned byte */
5319                     gen_address_mask(dc, cpu_addr);
5320                     tcg_gen_qemu_ld8u(cpu_val, cpu_addr, dc->mem_idx);
5321                     break;
5322                 case 0x2:       /* lduh, load unsigned halfword */
5323                     gen_address_mask(dc, cpu_addr);
5324                     tcg_gen_qemu_ld16u(cpu_val, cpu_addr, dc->mem_idx);
5325                     break;
5326                 case 0x3:       /* ldd, load double word */
5327                     if (rd & 1)
5328                         goto illegal_insn;
5329                     else {
5330                         TCGv_i64 t64;
5331 
5332                         gen_address_mask(dc, cpu_addr);
5333                         t64 = tcg_temp_new_i64();
5334                         tcg_gen_qemu_ld64(t64, cpu_addr, dc->mem_idx);
5335                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5336                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5337                         gen_store_gpr(dc, rd + 1, cpu_val);
5338                         tcg_gen_shri_i64(t64, t64, 32);
5339                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5340                         tcg_temp_free_i64(t64);
5341                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5342                     }
5343                     break;
5344                 case 0x9:       /* ldsb, load signed byte */
5345                     gen_address_mask(dc, cpu_addr);
5346                     tcg_gen_qemu_ld8s(cpu_val, cpu_addr, dc->mem_idx);
5347                     break;
5348                 case 0xa:       /* ldsh, load signed halfword */
5349                     gen_address_mask(dc, cpu_addr);
5350                     tcg_gen_qemu_ld16s(cpu_val, cpu_addr, dc->mem_idx);
5351                     break;
5352                 case 0xd:       /* ldstub */
5353                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5354                     break;
5355                 case 0x0f:
5356                     /* swap, swap register with memory. Also atomically */
5357                     CHECK_IU_FEATURE(dc, SWAP);
5358                     cpu_src1 = gen_load_gpr(dc, rd);
5359                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5360                              dc->mem_idx, MO_TEUL);
5361                     break;
5362 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5363                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5364                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5365                     break;
5366                 case 0x11:      /* lduba, load unsigned byte alternate */
5367                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5368                     break;
5369                 case 0x12:      /* lduha, load unsigned halfword alternate */
5370                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5371                     break;
5372                 case 0x13:      /* ldda, load double word alternate */
5373                     if (rd & 1) {
5374                         goto illegal_insn;
5375                     }
5376                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5377                     goto skip_move;
5378                 case 0x19:      /* ldsba, load signed byte alternate */
5379                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5380                     break;
5381                 case 0x1a:      /* ldsha, load signed halfword alternate */
5382                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5383                     break;
5384                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5385                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5386                     break;
5387                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5388                                    atomically */
5389                     CHECK_IU_FEATURE(dc, SWAP);
5390                     cpu_src1 = gen_load_gpr(dc, rd);
5391                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5392                     break;
5393 
5394 #ifndef TARGET_SPARC64
5395                 case 0x30: /* ldc */
5396                 case 0x31: /* ldcsr */
5397                 case 0x33: /* lddc */
5398                     goto ncp_insn;
5399 #endif
5400 #endif
5401 #ifdef TARGET_SPARC64
5402                 case 0x08: /* V9 ldsw */
5403                     gen_address_mask(dc, cpu_addr);
5404                     tcg_gen_qemu_ld32s(cpu_val, cpu_addr, dc->mem_idx);
5405                     break;
5406                 case 0x0b: /* V9 ldx */
5407                     gen_address_mask(dc, cpu_addr);
5408                     tcg_gen_qemu_ld64(cpu_val, cpu_addr, dc->mem_idx);
5409                     break;
5410                 case 0x18: /* V9 ldswa */
5411                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5412                     break;
5413                 case 0x1b: /* V9 ldxa */
5414                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5415                     break;
5416                 case 0x2d: /* V9 prefetch, no effect */
5417                     goto skip_move;
5418                 case 0x30: /* V9 ldfa */
5419                     if (gen_trap_ifnofpu(dc)) {
5420                         goto jmp_insn;
5421                     }
5422                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5423                     gen_update_fprs_dirty(dc, rd);
5424                     goto skip_move;
5425                 case 0x33: /* V9 lddfa */
5426                     if (gen_trap_ifnofpu(dc)) {
5427                         goto jmp_insn;
5428                     }
5429                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5430                     gen_update_fprs_dirty(dc, DFPREG(rd));
5431                     goto skip_move;
5432                 case 0x3d: /* V9 prefetcha, no effect */
5433                     goto skip_move;
5434                 case 0x32: /* V9 ldqfa */
5435                     CHECK_FPU_FEATURE(dc, FLOAT128);
5436                     if (gen_trap_ifnofpu(dc)) {
5437                         goto jmp_insn;
5438                     }
5439                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5440                     gen_update_fprs_dirty(dc, QFPREG(rd));
5441                     goto skip_move;
5442 #endif
5443                 default:
5444                     goto illegal_insn;
5445                 }
5446                 gen_store_gpr(dc, rd, cpu_val);
5447 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5448             skip_move: ;
5449 #endif
5450             } else if (xop >= 0x20 && xop < 0x24) {
5451                 if (gen_trap_ifnofpu(dc)) {
5452                     goto jmp_insn;
5453                 }
5454                 switch (xop) {
5455                 case 0x20:      /* ldf, load fpreg */
5456                     gen_address_mask(dc, cpu_addr);
5457                     cpu_dst_32 = gen_dest_fpr_F(dc);
5458                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5459                                         dc->mem_idx, MO_TEUL);
5460                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5461                     break;
5462                 case 0x21:      /* ldfsr, V9 ldxfsr */
5463 #ifdef TARGET_SPARC64
5464                     gen_address_mask(dc, cpu_addr);
5465                     if (rd == 1) {
5466                         TCGv_i64 t64 = tcg_temp_new_i64();
5467                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5468                                             dc->mem_idx, MO_TEQ);
5469                         gen_helper_ldxfsr(cpu_fsr, cpu_env, cpu_fsr, t64);
5470                         tcg_temp_free_i64(t64);
5471                         break;
5472                     }
5473 #endif
5474                     cpu_dst_32 = get_temp_i32(dc);
5475                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5476                                         dc->mem_idx, MO_TEUL);
5477                     gen_helper_ldfsr(cpu_fsr, cpu_env, cpu_fsr, cpu_dst_32);
5478                     break;
5479                 case 0x22:      /* ldqf, load quad fpreg */
5480                     CHECK_FPU_FEATURE(dc, FLOAT128);
5481                     gen_address_mask(dc, cpu_addr);
5482                     cpu_src1_64 = tcg_temp_new_i64();
5483                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5484                                         MO_TEQ | MO_ALIGN_4);
5485                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5486                     cpu_src2_64 = tcg_temp_new_i64();
5487                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5488                                         MO_TEQ | MO_ALIGN_4);
5489                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5490                     tcg_temp_free_i64(cpu_src1_64);
5491                     tcg_temp_free_i64(cpu_src2_64);
5492                     break;
5493                 case 0x23:      /* lddf, load double fpreg */
5494                     gen_address_mask(dc, cpu_addr);
5495                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5496                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5497                                         MO_TEQ | MO_ALIGN_4);
5498                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5499                     break;
5500                 default:
5501                     goto illegal_insn;
5502                 }
5503             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5504                        xop == 0xe || xop == 0x1e) {
5505                 TCGv cpu_val = gen_load_gpr(dc, rd);
5506 
5507                 switch (xop) {
5508                 case 0x4: /* st, store word */
5509                     gen_address_mask(dc, cpu_addr);
5510                     tcg_gen_qemu_st32(cpu_val, cpu_addr, dc->mem_idx);
5511                     break;
5512                 case 0x5: /* stb, store byte */
5513                     gen_address_mask(dc, cpu_addr);
5514                     tcg_gen_qemu_st8(cpu_val, cpu_addr, dc->mem_idx);
5515                     break;
5516                 case 0x6: /* sth, store halfword */
5517                     gen_address_mask(dc, cpu_addr);
5518                     tcg_gen_qemu_st16(cpu_val, cpu_addr, dc->mem_idx);
5519                     break;
5520                 case 0x7: /* std, store double word */
5521                     if (rd & 1)
5522                         goto illegal_insn;
5523                     else {
5524                         TCGv_i64 t64;
5525                         TCGv lo;
5526 
5527                         gen_address_mask(dc, cpu_addr);
5528                         lo = gen_load_gpr(dc, rd + 1);
5529                         t64 = tcg_temp_new_i64();
5530                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5531                         tcg_gen_qemu_st64(t64, cpu_addr, dc->mem_idx);
5532                         tcg_temp_free_i64(t64);
5533                     }
5534                     break;
5535 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5536                 case 0x14: /* sta, V9 stwa, store word alternate */
5537                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5538                     break;
5539                 case 0x15: /* stba, store byte alternate */
5540                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5541                     break;
5542                 case 0x16: /* stha, store halfword alternate */
5543                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5544                     break;
5545                 case 0x17: /* stda, store double word alternate */
5546                     if (rd & 1) {
5547                         goto illegal_insn;
5548                     }
5549                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5550                     break;
5551 #endif
5552 #ifdef TARGET_SPARC64
5553                 case 0x0e: /* V9 stx */
5554                     gen_address_mask(dc, cpu_addr);
5555                     tcg_gen_qemu_st64(cpu_val, cpu_addr, dc->mem_idx);
5556                     break;
5557                 case 0x1e: /* V9 stxa */
5558                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEQ);
5559                     break;
5560 #endif
5561                 default:
5562                     goto illegal_insn;
5563                 }
5564             } else if (xop > 0x23 && xop < 0x28) {
5565                 if (gen_trap_ifnofpu(dc)) {
5566                     goto jmp_insn;
5567                 }
5568                 switch (xop) {
5569                 case 0x24: /* stf, store fpreg */
5570                     gen_address_mask(dc, cpu_addr);
5571                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5572                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5573                                         dc->mem_idx, MO_TEUL);
5574                     break;
5575                 case 0x25: /* stfsr, V9 stxfsr */
5576                     {
5577 #ifdef TARGET_SPARC64
5578                         gen_address_mask(dc, cpu_addr);
5579                         if (rd == 1) {
5580                             tcg_gen_qemu_st64(cpu_fsr, cpu_addr, dc->mem_idx);
5581                             break;
5582                         }
5583 #endif
5584                         tcg_gen_qemu_st32(cpu_fsr, cpu_addr, dc->mem_idx);
5585                     }
5586                     break;
5587                 case 0x26:
5588 #ifdef TARGET_SPARC64
5589                     /* V9 stqf, store quad fpreg */
5590                     CHECK_FPU_FEATURE(dc, FLOAT128);
5591                     gen_address_mask(dc, cpu_addr);
5592                     /* ??? While stqf only requires 4-byte alignment, it is
5593                        legal for the cpu to signal the unaligned exception.
5594                        The OS trap handler is then required to fix it up.
5595                        For qemu, this avoids having to probe the second page
5596                        before performing the first write.  */
5597                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5598                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5599                                         dc->mem_idx, MO_TEQ | MO_ALIGN_16);
5600                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5601                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5602                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5603                                         dc->mem_idx, MO_TEQ);
5604                     break;
5605 #else /* !TARGET_SPARC64 */
5606                     /* stdfq, store floating point queue */
5607 #if defined(CONFIG_USER_ONLY)
5608                     goto illegal_insn;
5609 #else
5610                     if (!supervisor(dc))
5611                         goto priv_insn;
5612                     if (gen_trap_ifnofpu(dc)) {
5613                         goto jmp_insn;
5614                     }
5615                     goto nfq_insn;
5616 #endif
5617 #endif
5618                 case 0x27: /* stdf, store double fpreg */
5619                     gen_address_mask(dc, cpu_addr);
5620                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5621                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5622                                         MO_TEQ | MO_ALIGN_4);
5623                     break;
5624                 default:
5625                     goto illegal_insn;
5626                 }
5627             } else if (xop > 0x33 && xop < 0x3f) {
5628                 switch (xop) {
5629 #ifdef TARGET_SPARC64
5630                 case 0x34: /* V9 stfa */
5631                     if (gen_trap_ifnofpu(dc)) {
5632                         goto jmp_insn;
5633                     }
5634                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5635                     break;
5636                 case 0x36: /* V9 stqfa */
5637                     {
5638                         CHECK_FPU_FEATURE(dc, FLOAT128);
5639                         if (gen_trap_ifnofpu(dc)) {
5640                             goto jmp_insn;
5641                         }
5642                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5643                     }
5644                     break;
5645                 case 0x37: /* V9 stdfa */
5646                     if (gen_trap_ifnofpu(dc)) {
5647                         goto jmp_insn;
5648                     }
5649                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5650                     break;
5651                 case 0x3e: /* V9 casxa */
5652                     rs2 = GET_FIELD(insn, 27, 31);
5653                     cpu_src2 = gen_load_gpr(dc, rs2);
5654                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5655                     break;
5656 #else
5657                 case 0x34: /* stc */
5658                 case 0x35: /* stcsr */
5659                 case 0x36: /* stdcq */
5660                 case 0x37: /* stdc */
5661                     goto ncp_insn;
5662 #endif
5663 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5664                 case 0x3c: /* V9 or LEON3 casa */
5665 #ifndef TARGET_SPARC64
5666                     CHECK_IU_FEATURE(dc, CASA);
5667 #endif
5668                     rs2 = GET_FIELD(insn, 27, 31);
5669                     cpu_src2 = gen_load_gpr(dc, rs2);
5670                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5671                     break;
5672 #endif
5673                 default:
5674                     goto illegal_insn;
5675                 }
5676             } else {
5677                 goto illegal_insn;
5678             }
5679         }
5680         break;
5681     }
5682     /* default case for non jump instructions */
5683     if (dc->npc == DYNAMIC_PC) {
5684         dc->pc = DYNAMIC_PC;
5685         gen_op_next_insn();
5686     } else if (dc->npc == JUMP_PC) {
5687         /* we can do a static jump */
5688         gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
5689         dc->is_br = 1;
5690     } else {
5691         dc->pc = dc->npc;
5692         dc->npc = dc->npc + 4;
5693     }
5694  jmp_insn:
5695     goto egress;
5696  illegal_insn:
5697     gen_exception(dc, TT_ILL_INSN);
5698     goto egress;
5699  unimp_flush:
5700     gen_exception(dc, TT_UNIMP_FLUSH);
5701     goto egress;
5702 #if !defined(CONFIG_USER_ONLY)
5703  priv_insn:
5704     gen_exception(dc, TT_PRIV_INSN);
5705     goto egress;
5706 #endif
5707  nfpu_insn:
5708     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5709     goto egress;
5710 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5711  nfq_insn:
5712     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5713     goto egress;
5714 #endif
5715 #ifndef TARGET_SPARC64
5716  ncp_insn:
5717     gen_exception(dc, TT_NCP_INSN);
5718     goto egress;
5719 #endif
5720  egress:
5721     if (dc->n_t32 != 0) {
5722         int i;
5723         for (i = dc->n_t32 - 1; i >= 0; --i) {
5724             tcg_temp_free_i32(dc->t32[i]);
5725         }
5726         dc->n_t32 = 0;
5727     }
5728     if (dc->n_ttl != 0) {
5729         int i;
5730         for (i = dc->n_ttl - 1; i >= 0; --i) {
5731             tcg_temp_free(dc->ttl[i]);
5732         }
5733         dc->n_ttl = 0;
5734     }
5735 }
5736 
5737 void gen_intermediate_code(CPUState *cs, TranslationBlock * tb)
5738 {
5739     CPUSPARCState *env = cs->env_ptr;
5740     target_ulong pc_start, last_pc;
5741     DisasContext dc1, *dc = &dc1;
5742     int num_insns;
5743     int max_insns;
5744     unsigned int insn;
5745 
5746     memset(dc, 0, sizeof(DisasContext));
5747     dc->tb = tb;
5748     pc_start = tb->pc;
5749     dc->pc = pc_start;
5750     last_pc = dc->pc;
5751     dc->npc = (target_ulong) tb->cs_base;
5752     dc->cc_op = CC_OP_DYNAMIC;
5753     dc->mem_idx = tb->flags & TB_FLAG_MMU_MASK;
5754     dc->def = &env->def;
5755     dc->fpu_enabled = tb_fpu_enabled(tb->flags);
5756     dc->address_mask_32bit = tb_am_enabled(tb->flags);
5757     dc->singlestep = (cs->singlestep_enabled || singlestep);
5758 #ifndef CONFIG_USER_ONLY
5759     dc->supervisor = (tb->flags & TB_FLAG_SUPER) != 0;
5760 #endif
5761 #ifdef TARGET_SPARC64
5762     dc->fprs_dirty = 0;
5763     dc->asi = (tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5764 #ifndef CONFIG_USER_ONLY
5765     dc->hypervisor = (tb->flags & TB_FLAG_HYPER) != 0;
5766 #endif
5767 #endif
5768 
5769     num_insns = 0;
5770     max_insns = tb_cflags(tb) & CF_COUNT_MASK;
5771     if (max_insns == 0) {
5772         max_insns = CF_COUNT_MASK;
5773     }
5774     if (max_insns > TCG_MAX_INSNS) {
5775         max_insns = TCG_MAX_INSNS;
5776     }
5777 
5778     gen_tb_start(tb);
5779     do {
5780         if (dc->npc & JUMP_PC) {
5781             assert(dc->jump_pc[1] == dc->pc + 4);
5782             tcg_gen_insn_start(dc->pc, dc->jump_pc[0] | JUMP_PC);
5783         } else {
5784             tcg_gen_insn_start(dc->pc, dc->npc);
5785         }
5786         num_insns++;
5787         last_pc = dc->pc;
5788 
5789         if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
5790             if (dc->pc != pc_start) {
5791                 save_state(dc);
5792             }
5793             gen_helper_debug(cpu_env);
5794             tcg_gen_exit_tb(0);
5795             dc->is_br = 1;
5796             goto exit_gen_loop;
5797         }
5798 
5799         if (num_insns == max_insns && (tb_cflags(tb) & CF_LAST_IO)) {
5800             gen_io_start();
5801         }
5802 
5803         insn = cpu_ldl_code(env, dc->pc);
5804 
5805         disas_sparc_insn(dc, insn);
5806 
5807         if (dc->is_br)
5808             break;
5809         /* if the next PC is different, we abort now */
5810         if (dc->pc != (last_pc + 4))
5811             break;
5812         /* if we reach a page boundary, we stop generation so that the
5813            PC of a TT_TFAULT exception is always in the right page */
5814         if ((dc->pc & (TARGET_PAGE_SIZE - 1)) == 0)
5815             break;
5816         /* if single step mode, we generate only one instruction and
5817            generate an exception */
5818         if (dc->singlestep) {
5819             break;
5820         }
5821     } while (!tcg_op_buf_full() &&
5822              (dc->pc - pc_start) < (TARGET_PAGE_SIZE - 32) &&
5823              num_insns < max_insns);
5824 
5825  exit_gen_loop:
5826     if (tb_cflags(tb) & CF_LAST_IO) {
5827         gen_io_end();
5828     }
5829     if (!dc->is_br) {
5830         if (dc->pc != DYNAMIC_PC &&
5831             (dc->npc != DYNAMIC_PC && dc->npc != JUMP_PC)) {
5832             /* static PC and NPC: we can use direct chaining */
5833             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5834         } else {
5835             if (dc->pc != DYNAMIC_PC) {
5836                 tcg_gen_movi_tl(cpu_pc, dc->pc);
5837             }
5838             save_npc(dc);
5839             tcg_gen_exit_tb(0);
5840         }
5841     }
5842     gen_tb_end(tb, num_insns);
5843 
5844     tb->size = last_pc + 4 - pc_start;
5845     tb->icount = num_insns;
5846 
5847 #ifdef DEBUG_DISAS
5848     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
5849         && qemu_log_in_addr_range(pc_start)) {
5850         qemu_log_lock();
5851         qemu_log("--------------\n");
5852         qemu_log("IN: %s\n", lookup_symbol(pc_start));
5853         log_target_disas(cs, pc_start, last_pc + 4 - pc_start, 0);
5854         qemu_log("\n");
5855         qemu_log_unlock();
5856     }
5857 #endif
5858 }
5859 
5860 void sparc_tcg_init(void)
5861 {
5862     static const char gregnames[32][4] = {
5863         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5864         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5865         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5866         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5867     };
5868     static const char fregnames[32][4] = {
5869         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5870         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5871         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5872         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5873     };
5874 
5875     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5876 #ifdef TARGET_SPARC64
5877         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5878         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5879 #else
5880         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5881 #endif
5882         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5883         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5884     };
5885 
5886     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5887 #ifdef TARGET_SPARC64
5888         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5889         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5890         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5891         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5892           "hstick_cmpr" },
5893         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5894         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5895         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5896         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5897         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5898 #endif
5899         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5900         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5901         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5902         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5903         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5904         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5905         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5906         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5907 #ifndef CONFIG_USER_ONLY
5908         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5909 #endif
5910     };
5911 
5912     unsigned int i;
5913 
5914     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
5915     tcg_ctx->tcg_env = cpu_env;
5916 
5917     cpu_regwptr = tcg_global_mem_new_ptr(cpu_env,
5918                                          offsetof(CPUSPARCState, regwptr),
5919                                          "regwptr");
5920 
5921     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5922         *r32[i].ptr = tcg_global_mem_new_i32(cpu_env, r32[i].off, r32[i].name);
5923     }
5924 
5925     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5926         *rtl[i].ptr = tcg_global_mem_new(cpu_env, rtl[i].off, rtl[i].name);
5927     }
5928 
5929     TCGV_UNUSED(cpu_regs[0]);
5930     for (i = 1; i < 8; ++i) {
5931         cpu_regs[i] = tcg_global_mem_new(cpu_env,
5932                                          offsetof(CPUSPARCState, gregs[i]),
5933                                          gregnames[i]);
5934     }
5935 
5936     for (i = 8; i < 32; ++i) {
5937         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5938                                          (i - 8) * sizeof(target_ulong),
5939                                          gregnames[i]);
5940     }
5941 
5942     for (i = 0; i < TARGET_DPREGS; i++) {
5943         cpu_fpr[i] = tcg_global_mem_new_i64(cpu_env,
5944                                             offsetof(CPUSPARCState, fpr[i]),
5945                                             fregnames[i]);
5946     }
5947 }
5948 
5949 void restore_state_to_opc(CPUSPARCState *env, TranslationBlock *tb,
5950                           target_ulong *data)
5951 {
5952     target_ulong pc = data[0];
5953     target_ulong npc = data[1];
5954 
5955     env->pc = pc;
5956     if (npc == DYNAMIC_PC) {
5957         /* dynamic NPC: already stored */
5958     } else if (npc & JUMP_PC) {
5959         /* jump PC: use 'cond' and the jump targets of the translation */
5960         if (env->cond) {
5961             env->npc = npc & ~3;
5962         } else {
5963             env->npc = pc + 4;
5964         }
5965     } else {
5966         env->npc = npc;
5967     }
5968 }
5969