xref: /openbmc/qemu/target/sparc/translate.c (revision b1bc09ea6bce116c679bbffa66edcb7520d57424)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 
29 #include "exec/helper-gen.h"
30 
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 #include "asi.h"
34 
35 #define HELPER_H "helper.h"
36 #include "exec/helper-info.c.inc"
37 #undef  HELPER_H
38 
39 /* Dynamic PC, must exit to main loop. */
40 #define DYNAMIC_PC         1
41 /* Dynamic PC, one of two values according to jump_pc[T2]. */
42 #define JUMP_PC            2
43 /* Dynamic PC, may lookup next TB. */
44 #define DYNAMIC_PC_LOOKUP  3
45 
46 #define DISAS_EXIT  DISAS_TARGET_0
47 
48 /* global register indexes */
49 static TCGv_ptr cpu_regwptr;
50 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
51 static TCGv_i32 cpu_cc_op;
52 static TCGv_i32 cpu_psr;
53 static TCGv cpu_fsr, cpu_pc, cpu_npc;
54 static TCGv cpu_regs[32];
55 static TCGv cpu_y;
56 #ifndef CONFIG_USER_ONLY
57 static TCGv cpu_tbr;
58 #endif
59 static TCGv cpu_cond;
60 #ifdef TARGET_SPARC64
61 static TCGv_i32 cpu_xcc, cpu_fprs;
62 static TCGv cpu_gsr;
63 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
64 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
65 #else
66 static TCGv cpu_wim;
67 #endif
68 /* Floating point registers */
69 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
70 
71 typedef struct DisasDelayException {
72     struct DisasDelayException *next;
73     TCGLabel *lab;
74     TCGv_i32 excp;
75     /* Saved state at parent insn. */
76     target_ulong pc;
77     target_ulong npc;
78 } DisasDelayException;
79 
80 typedef struct DisasContext {
81     DisasContextBase base;
82     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
83     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
84     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
85     int mem_idx;
86     bool fpu_enabled;
87     bool address_mask_32bit;
88 #ifndef CONFIG_USER_ONLY
89     bool supervisor;
90 #ifdef TARGET_SPARC64
91     bool hypervisor;
92 #endif
93 #endif
94 
95     uint32_t cc_op;  /* current CC operation */
96     sparc_def_t *def;
97 #ifdef TARGET_SPARC64
98     int fprs_dirty;
99     int asi;
100 #endif
101     DisasDelayException *delay_excp_list;
102 } DisasContext;
103 
104 typedef struct {
105     TCGCond cond;
106     bool is_bool;
107     TCGv c1, c2;
108 } DisasCompare;
109 
110 // This function uses non-native bit order
111 #define GET_FIELD(X, FROM, TO)                                  \
112     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
113 
114 // This function uses the order in the manuals, i.e. bit 0 is 2^0
115 #define GET_FIELD_SP(X, FROM, TO)               \
116     GET_FIELD(X, 31 - (TO), 31 - (FROM))
117 
118 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
119 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
120 
121 #ifdef TARGET_SPARC64
122 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
123 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
124 #else
125 #define DFPREG(r) (r & 0x1e)
126 #define QFPREG(r) (r & 0x1c)
127 #endif
128 
129 #define UA2005_HTRAP_MASK 0xff
130 #define V8_TRAP_MASK 0x7f
131 
132 static int sign_extend(int x, int len)
133 {
134     len = 32 - len;
135     return (x << len) >> len;
136 }
137 
138 #define IS_IMM (insn & (1<<13))
139 
140 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     int bit = (rd < 32) ? 1 : 2;
144     /* If we know we've already set this bit within the TB,
145        we can avoid setting it again.  */
146     if (!(dc->fprs_dirty & bit)) {
147         dc->fprs_dirty |= bit;
148         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
149     }
150 #endif
151 }
152 
153 /* floating point registers moves */
154 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
155 {
156     TCGv_i32 ret = tcg_temp_new_i32();
157     if (src & 1) {
158         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
159     } else {
160         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
161     }
162     return ret;
163 }
164 
165 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
166 {
167     TCGv_i64 t = tcg_temp_new_i64();
168 
169     tcg_gen_extu_i32_i64(t, v);
170     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
171                         (dst & 1 ? 0 : 32), 32);
172     gen_update_fprs_dirty(dc, dst);
173 }
174 
175 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
176 {
177     return tcg_temp_new_i32();
178 }
179 
180 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
181 {
182     src = DFPREG(src);
183     return cpu_fpr[src / 2];
184 }
185 
186 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
187 {
188     dst = DFPREG(dst);
189     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
190     gen_update_fprs_dirty(dc, dst);
191 }
192 
193 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
194 {
195     return cpu_fpr[DFPREG(dst) / 2];
196 }
197 
198 static void gen_op_load_fpr_QT0(unsigned int src)
199 {
200     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
201                    offsetof(CPU_QuadU, ll.upper));
202     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.lower));
204 }
205 
206 static void gen_op_load_fpr_QT1(unsigned int src)
207 {
208     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt1) +
209                    offsetof(CPU_QuadU, ll.upper));
210     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.lower));
212 }
213 
214 static void gen_op_store_QT0_fpr(unsigned int dst)
215 {
216     tcg_gen_ld_i64(cpu_fpr[dst / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221 
222 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
223                             TCGv_i64 v1, TCGv_i64 v2)
224 {
225     dst = QFPREG(dst);
226 
227     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
228     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
229     gen_update_fprs_dirty(dc, dst);
230 }
231 
232 #ifdef TARGET_SPARC64
233 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
234 {
235     src = QFPREG(src);
236     return cpu_fpr[src / 2];
237 }
238 
239 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
240 {
241     src = QFPREG(src);
242     return cpu_fpr[src / 2 + 1];
243 }
244 
245 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
246 {
247     rd = QFPREG(rd);
248     rs = QFPREG(rs);
249 
250     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
251     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
252     gen_update_fprs_dirty(dc, rd);
253 }
254 #endif
255 
256 /* moves */
257 #ifdef CONFIG_USER_ONLY
258 #define supervisor(dc) 0
259 #ifdef TARGET_SPARC64
260 #define hypervisor(dc) 0
261 #endif
262 #else
263 #ifdef TARGET_SPARC64
264 #define hypervisor(dc) (dc->hypervisor)
265 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
266 #else
267 #define supervisor(dc) (dc->supervisor)
268 #endif
269 #endif
270 
271 #if !defined(TARGET_SPARC64)
272 # define AM_CHECK(dc)  false
273 #elif defined(TARGET_ABI32)
274 # define AM_CHECK(dc)  true
275 #elif defined(CONFIG_USER_ONLY)
276 # define AM_CHECK(dc)  false
277 #else
278 # define AM_CHECK(dc)  ((dc)->address_mask_32bit)
279 #endif
280 
281 static void gen_address_mask(DisasContext *dc, TCGv addr)
282 {
283     if (AM_CHECK(dc)) {
284         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
285     }
286 }
287 
288 static TCGv gen_load_gpr(DisasContext *dc, int reg)
289 {
290     if (reg > 0) {
291         assert(reg < 32);
292         return cpu_regs[reg];
293     } else {
294         TCGv t = tcg_temp_new();
295         tcg_gen_movi_tl(t, 0);
296         return t;
297     }
298 }
299 
300 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
301 {
302     if (reg > 0) {
303         assert(reg < 32);
304         tcg_gen_mov_tl(cpu_regs[reg], v);
305     }
306 }
307 
308 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
309 {
310     if (reg > 0) {
311         assert(reg < 32);
312         return cpu_regs[reg];
313     } else {
314         return tcg_temp_new();
315     }
316 }
317 
318 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
319 {
320     return translator_use_goto_tb(&s->base, pc) &&
321            translator_use_goto_tb(&s->base, npc);
322 }
323 
324 static void gen_goto_tb(DisasContext *s, int tb_num,
325                         target_ulong pc, target_ulong npc)
326 {
327     if (use_goto_tb(s, pc, npc))  {
328         /* jump to same page: we can use a direct jump */
329         tcg_gen_goto_tb(tb_num);
330         tcg_gen_movi_tl(cpu_pc, pc);
331         tcg_gen_movi_tl(cpu_npc, npc);
332         tcg_gen_exit_tb(s->base.tb, tb_num);
333     } else {
334         /* jump to another page: we can use an indirect jump */
335         tcg_gen_movi_tl(cpu_pc, pc);
336         tcg_gen_movi_tl(cpu_npc, npc);
337         tcg_gen_lookup_and_goto_ptr();
338     }
339 }
340 
341 // XXX suboptimal
342 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
343 {
344     tcg_gen_extu_i32_tl(reg, src);
345     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
346 }
347 
348 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
349 {
350     tcg_gen_extu_i32_tl(reg, src);
351     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
352 }
353 
354 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
355 {
356     tcg_gen_extu_i32_tl(reg, src);
357     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
358 }
359 
360 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
361 {
362     tcg_gen_extu_i32_tl(reg, src);
363     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
364 }
365 
366 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
367 {
368     tcg_gen_mov_tl(cpu_cc_src, src1);
369     tcg_gen_mov_tl(cpu_cc_src2, src2);
370     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
371     tcg_gen_mov_tl(dst, cpu_cc_dst);
372 }
373 
374 static TCGv_i32 gen_add32_carry32(void)
375 {
376     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
377 
378     /* Carry is computed from a previous add: (dst < src)  */
379 #if TARGET_LONG_BITS == 64
380     cc_src1_32 = tcg_temp_new_i32();
381     cc_src2_32 = tcg_temp_new_i32();
382     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
383     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
384 #else
385     cc_src1_32 = cpu_cc_dst;
386     cc_src2_32 = cpu_cc_src;
387 #endif
388 
389     carry_32 = tcg_temp_new_i32();
390     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
391 
392     return carry_32;
393 }
394 
395 static TCGv_i32 gen_sub32_carry32(void)
396 {
397     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
398 
399     /* Carry is computed from a previous borrow: (src1 < src2)  */
400 #if TARGET_LONG_BITS == 64
401     cc_src1_32 = tcg_temp_new_i32();
402     cc_src2_32 = tcg_temp_new_i32();
403     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
404     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
405 #else
406     cc_src1_32 = cpu_cc_src;
407     cc_src2_32 = cpu_cc_src2;
408 #endif
409 
410     carry_32 = tcg_temp_new_i32();
411     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
412 
413     return carry_32;
414 }
415 
416 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
417                             TCGv src2, int update_cc)
418 {
419     TCGv_i32 carry_32;
420     TCGv carry;
421 
422     switch (dc->cc_op) {
423     case CC_OP_DIV:
424     case CC_OP_LOGIC:
425         /* Carry is known to be zero.  Fall back to plain ADD.  */
426         if (update_cc) {
427             gen_op_add_cc(dst, src1, src2);
428         } else {
429             tcg_gen_add_tl(dst, src1, src2);
430         }
431         return;
432 
433     case CC_OP_ADD:
434     case CC_OP_TADD:
435     case CC_OP_TADDTV:
436         if (TARGET_LONG_BITS == 32) {
437             /* We can re-use the host's hardware carry generation by using
438                an ADD2 opcode.  We discard the low part of the output.
439                Ideally we'd combine this operation with the add that
440                generated the carry in the first place.  */
441             carry = tcg_temp_new();
442             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
443             goto add_done;
444         }
445         carry_32 = gen_add32_carry32();
446         break;
447 
448     case CC_OP_SUB:
449     case CC_OP_TSUB:
450     case CC_OP_TSUBTV:
451         carry_32 = gen_sub32_carry32();
452         break;
453 
454     default:
455         /* We need external help to produce the carry.  */
456         carry_32 = tcg_temp_new_i32();
457         gen_helper_compute_C_icc(carry_32, tcg_env);
458         break;
459     }
460 
461 #if TARGET_LONG_BITS == 64
462     carry = tcg_temp_new();
463     tcg_gen_extu_i32_i64(carry, carry_32);
464 #else
465     carry = carry_32;
466 #endif
467 
468     tcg_gen_add_tl(dst, src1, src2);
469     tcg_gen_add_tl(dst, dst, carry);
470 
471  add_done:
472     if (update_cc) {
473         tcg_gen_mov_tl(cpu_cc_src, src1);
474         tcg_gen_mov_tl(cpu_cc_src2, src2);
475         tcg_gen_mov_tl(cpu_cc_dst, dst);
476         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
477         dc->cc_op = CC_OP_ADDX;
478     }
479 }
480 
481 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
482 {
483     tcg_gen_mov_tl(cpu_cc_src, src1);
484     tcg_gen_mov_tl(cpu_cc_src2, src2);
485     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
486     tcg_gen_mov_tl(dst, cpu_cc_dst);
487 }
488 
489 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
490                             TCGv src2, int update_cc)
491 {
492     TCGv_i32 carry_32;
493     TCGv carry;
494 
495     switch (dc->cc_op) {
496     case CC_OP_DIV:
497     case CC_OP_LOGIC:
498         /* Carry is known to be zero.  Fall back to plain SUB.  */
499         if (update_cc) {
500             gen_op_sub_cc(dst, src1, src2);
501         } else {
502             tcg_gen_sub_tl(dst, src1, src2);
503         }
504         return;
505 
506     case CC_OP_ADD:
507     case CC_OP_TADD:
508     case CC_OP_TADDTV:
509         carry_32 = gen_add32_carry32();
510         break;
511 
512     case CC_OP_SUB:
513     case CC_OP_TSUB:
514     case CC_OP_TSUBTV:
515         if (TARGET_LONG_BITS == 32) {
516             /* We can re-use the host's hardware carry generation by using
517                a SUB2 opcode.  We discard the low part of the output.
518                Ideally we'd combine this operation with the add that
519                generated the carry in the first place.  */
520             carry = tcg_temp_new();
521             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
522             goto sub_done;
523         }
524         carry_32 = gen_sub32_carry32();
525         break;
526 
527     default:
528         /* We need external help to produce the carry.  */
529         carry_32 = tcg_temp_new_i32();
530         gen_helper_compute_C_icc(carry_32, tcg_env);
531         break;
532     }
533 
534 #if TARGET_LONG_BITS == 64
535     carry = tcg_temp_new();
536     tcg_gen_extu_i32_i64(carry, carry_32);
537 #else
538     carry = carry_32;
539 #endif
540 
541     tcg_gen_sub_tl(dst, src1, src2);
542     tcg_gen_sub_tl(dst, dst, carry);
543 
544  sub_done:
545     if (update_cc) {
546         tcg_gen_mov_tl(cpu_cc_src, src1);
547         tcg_gen_mov_tl(cpu_cc_src2, src2);
548         tcg_gen_mov_tl(cpu_cc_dst, dst);
549         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
550         dc->cc_op = CC_OP_SUBX;
551     }
552 }
553 
554 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
555 {
556     TCGv r_temp, zero, t0;
557 
558     r_temp = tcg_temp_new();
559     t0 = tcg_temp_new();
560 
561     /* old op:
562     if (!(env->y & 1))
563         T1 = 0;
564     */
565     zero = tcg_constant_tl(0);
566     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
567     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
568     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
569     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
570                        zero, cpu_cc_src2);
571 
572     // b2 = T0 & 1;
573     // env->y = (b2 << 31) | (env->y >> 1);
574     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
575     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
576 
577     // b1 = N ^ V;
578     gen_mov_reg_N(t0, cpu_psr);
579     gen_mov_reg_V(r_temp, cpu_psr);
580     tcg_gen_xor_tl(t0, t0, r_temp);
581 
582     // T0 = (b1 << 31) | (T0 >> 1);
583     // src1 = T0;
584     tcg_gen_shli_tl(t0, t0, 31);
585     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
586     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
587 
588     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
589 
590     tcg_gen_mov_tl(dst, cpu_cc_dst);
591 }
592 
593 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
594 {
595 #if TARGET_LONG_BITS == 32
596     if (sign_ext) {
597         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
598     } else {
599         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
600     }
601 #else
602     TCGv t0 = tcg_temp_new_i64();
603     TCGv t1 = tcg_temp_new_i64();
604 
605     if (sign_ext) {
606         tcg_gen_ext32s_i64(t0, src1);
607         tcg_gen_ext32s_i64(t1, src2);
608     } else {
609         tcg_gen_ext32u_i64(t0, src1);
610         tcg_gen_ext32u_i64(t1, src2);
611     }
612 
613     tcg_gen_mul_i64(dst, t0, t1);
614     tcg_gen_shri_i64(cpu_y, dst, 32);
615 #endif
616 }
617 
618 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
619 {
620     /* zero-extend truncated operands before multiplication */
621     gen_op_multiply(dst, src1, src2, 0);
622 }
623 
624 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
625 {
626     /* sign-extend truncated operands before multiplication */
627     gen_op_multiply(dst, src1, src2, 1);
628 }
629 
630 // 1
631 static void gen_op_eval_ba(TCGv dst)
632 {
633     tcg_gen_movi_tl(dst, 1);
634 }
635 
636 // Z
637 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
638 {
639     gen_mov_reg_Z(dst, src);
640 }
641 
642 // Z | (N ^ V)
643 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
644 {
645     TCGv t0 = tcg_temp_new();
646     gen_mov_reg_N(t0, src);
647     gen_mov_reg_V(dst, src);
648     tcg_gen_xor_tl(dst, dst, t0);
649     gen_mov_reg_Z(t0, src);
650     tcg_gen_or_tl(dst, dst, t0);
651 }
652 
653 // N ^ V
654 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
655 {
656     TCGv t0 = tcg_temp_new();
657     gen_mov_reg_V(t0, src);
658     gen_mov_reg_N(dst, src);
659     tcg_gen_xor_tl(dst, dst, t0);
660 }
661 
662 // C | Z
663 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
664 {
665     TCGv t0 = tcg_temp_new();
666     gen_mov_reg_Z(t0, src);
667     gen_mov_reg_C(dst, src);
668     tcg_gen_or_tl(dst, dst, t0);
669 }
670 
671 // C
672 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
673 {
674     gen_mov_reg_C(dst, src);
675 }
676 
677 // V
678 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
679 {
680     gen_mov_reg_V(dst, src);
681 }
682 
683 // 0
684 static void gen_op_eval_bn(TCGv dst)
685 {
686     tcg_gen_movi_tl(dst, 0);
687 }
688 
689 // N
690 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
691 {
692     gen_mov_reg_N(dst, src);
693 }
694 
695 // !Z
696 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
697 {
698     gen_mov_reg_Z(dst, src);
699     tcg_gen_xori_tl(dst, dst, 0x1);
700 }
701 
702 // !(Z | (N ^ V))
703 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
704 {
705     gen_op_eval_ble(dst, src);
706     tcg_gen_xori_tl(dst, dst, 0x1);
707 }
708 
709 // !(N ^ V)
710 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
711 {
712     gen_op_eval_bl(dst, src);
713     tcg_gen_xori_tl(dst, dst, 0x1);
714 }
715 
716 // !(C | Z)
717 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
718 {
719     gen_op_eval_bleu(dst, src);
720     tcg_gen_xori_tl(dst, dst, 0x1);
721 }
722 
723 // !C
724 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
725 {
726     gen_mov_reg_C(dst, src);
727     tcg_gen_xori_tl(dst, dst, 0x1);
728 }
729 
730 // !N
731 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
732 {
733     gen_mov_reg_N(dst, src);
734     tcg_gen_xori_tl(dst, dst, 0x1);
735 }
736 
737 // !V
738 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
739 {
740     gen_mov_reg_V(dst, src);
741     tcg_gen_xori_tl(dst, dst, 0x1);
742 }
743 
744 /*
745   FPSR bit field FCC1 | FCC0:
746    0 =
747    1 <
748    2 >
749    3 unordered
750 */
751 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
752                                     unsigned int fcc_offset)
753 {
754     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
755     tcg_gen_andi_tl(reg, reg, 0x1);
756 }
757 
758 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
759 {
760     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
761     tcg_gen_andi_tl(reg, reg, 0x1);
762 }
763 
764 // !0: FCC0 | FCC1
765 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
766 {
767     TCGv t0 = tcg_temp_new();
768     gen_mov_reg_FCC0(dst, src, fcc_offset);
769     gen_mov_reg_FCC1(t0, src, fcc_offset);
770     tcg_gen_or_tl(dst, dst, t0);
771 }
772 
773 // 1 or 2: FCC0 ^ FCC1
774 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
775 {
776     TCGv t0 = tcg_temp_new();
777     gen_mov_reg_FCC0(dst, src, fcc_offset);
778     gen_mov_reg_FCC1(t0, src, fcc_offset);
779     tcg_gen_xor_tl(dst, dst, t0);
780 }
781 
782 // 1 or 3: FCC0
783 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
784 {
785     gen_mov_reg_FCC0(dst, src, fcc_offset);
786 }
787 
788 // 1: FCC0 & !FCC1
789 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
790 {
791     TCGv t0 = tcg_temp_new();
792     gen_mov_reg_FCC0(dst, src, fcc_offset);
793     gen_mov_reg_FCC1(t0, src, fcc_offset);
794     tcg_gen_andc_tl(dst, dst, t0);
795 }
796 
797 // 2 or 3: FCC1
798 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
799 {
800     gen_mov_reg_FCC1(dst, src, fcc_offset);
801 }
802 
803 // 2: !FCC0 & FCC1
804 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
805 {
806     TCGv t0 = tcg_temp_new();
807     gen_mov_reg_FCC0(dst, src, fcc_offset);
808     gen_mov_reg_FCC1(t0, src, fcc_offset);
809     tcg_gen_andc_tl(dst, t0, dst);
810 }
811 
812 // 3: FCC0 & FCC1
813 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
814 {
815     TCGv t0 = tcg_temp_new();
816     gen_mov_reg_FCC0(dst, src, fcc_offset);
817     gen_mov_reg_FCC1(t0, src, fcc_offset);
818     tcg_gen_and_tl(dst, dst, t0);
819 }
820 
821 // 0: !(FCC0 | FCC1)
822 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
823 {
824     TCGv t0 = tcg_temp_new();
825     gen_mov_reg_FCC0(dst, src, fcc_offset);
826     gen_mov_reg_FCC1(t0, src, fcc_offset);
827     tcg_gen_or_tl(dst, dst, t0);
828     tcg_gen_xori_tl(dst, dst, 0x1);
829 }
830 
831 // 0 or 3: !(FCC0 ^ FCC1)
832 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
833 {
834     TCGv t0 = tcg_temp_new();
835     gen_mov_reg_FCC0(dst, src, fcc_offset);
836     gen_mov_reg_FCC1(t0, src, fcc_offset);
837     tcg_gen_xor_tl(dst, dst, t0);
838     tcg_gen_xori_tl(dst, dst, 0x1);
839 }
840 
841 // 0 or 2: !FCC0
842 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
843 {
844     gen_mov_reg_FCC0(dst, src, fcc_offset);
845     tcg_gen_xori_tl(dst, dst, 0x1);
846 }
847 
848 // !1: !(FCC0 & !FCC1)
849 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
850 {
851     TCGv t0 = tcg_temp_new();
852     gen_mov_reg_FCC0(dst, src, fcc_offset);
853     gen_mov_reg_FCC1(t0, src, fcc_offset);
854     tcg_gen_andc_tl(dst, dst, t0);
855     tcg_gen_xori_tl(dst, dst, 0x1);
856 }
857 
858 // 0 or 1: !FCC1
859 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
860 {
861     gen_mov_reg_FCC1(dst, src, fcc_offset);
862     tcg_gen_xori_tl(dst, dst, 0x1);
863 }
864 
865 // !2: !(!FCC0 & FCC1)
866 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
867 {
868     TCGv t0 = tcg_temp_new();
869     gen_mov_reg_FCC0(dst, src, fcc_offset);
870     gen_mov_reg_FCC1(t0, src, fcc_offset);
871     tcg_gen_andc_tl(dst, t0, dst);
872     tcg_gen_xori_tl(dst, dst, 0x1);
873 }
874 
875 // !3: !(FCC0 & FCC1)
876 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
877 {
878     TCGv t0 = tcg_temp_new();
879     gen_mov_reg_FCC0(dst, src, fcc_offset);
880     gen_mov_reg_FCC1(t0, src, fcc_offset);
881     tcg_gen_and_tl(dst, dst, t0);
882     tcg_gen_xori_tl(dst, dst, 0x1);
883 }
884 
885 static void gen_branch2(DisasContext *dc, target_ulong pc1,
886                         target_ulong pc2, TCGv r_cond)
887 {
888     TCGLabel *l1 = gen_new_label();
889 
890     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
891 
892     gen_goto_tb(dc, 0, pc1, pc1 + 4);
893 
894     gen_set_label(l1);
895     gen_goto_tb(dc, 1, pc2, pc2 + 4);
896 }
897 
898 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
899 {
900     TCGLabel *l1 = gen_new_label();
901     target_ulong npc = dc->npc;
902 
903     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
904 
905     gen_goto_tb(dc, 0, npc, pc1);
906 
907     gen_set_label(l1);
908     gen_goto_tb(dc, 1, npc + 4, npc + 8);
909 
910     dc->base.is_jmp = DISAS_NORETURN;
911 }
912 
913 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
914 {
915     target_ulong npc = dc->npc;
916 
917     if (npc & 3) {
918         switch (npc) {
919         case DYNAMIC_PC:
920         case DYNAMIC_PC_LOOKUP:
921             tcg_gen_mov_tl(cpu_pc, cpu_npc);
922             tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
923             tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc,
924                                cpu_cond, tcg_constant_tl(0),
925                                tcg_constant_tl(pc1), cpu_npc);
926             dc->pc = npc;
927             break;
928         default:
929             g_assert_not_reached();
930         }
931     } else {
932         dc->pc = npc;
933         dc->jump_pc[0] = pc1;
934         dc->jump_pc[1] = npc + 4;
935         dc->npc = JUMP_PC;
936     }
937 }
938 
939 static void gen_generic_branch(DisasContext *dc)
940 {
941     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
942     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
943     TCGv zero = tcg_constant_tl(0);
944 
945     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
946 }
947 
948 /* call this function before using the condition register as it may
949    have been set for a jump */
950 static void flush_cond(DisasContext *dc)
951 {
952     if (dc->npc == JUMP_PC) {
953         gen_generic_branch(dc);
954         dc->npc = DYNAMIC_PC_LOOKUP;
955     }
956 }
957 
958 static void save_npc(DisasContext *dc)
959 {
960     if (dc->npc & 3) {
961         switch (dc->npc) {
962         case JUMP_PC:
963             gen_generic_branch(dc);
964             dc->npc = DYNAMIC_PC_LOOKUP;
965             break;
966         case DYNAMIC_PC:
967         case DYNAMIC_PC_LOOKUP:
968             break;
969         default:
970             g_assert_not_reached();
971         }
972     } else {
973         tcg_gen_movi_tl(cpu_npc, dc->npc);
974     }
975 }
976 
977 static void update_psr(DisasContext *dc)
978 {
979     if (dc->cc_op != CC_OP_FLAGS) {
980         dc->cc_op = CC_OP_FLAGS;
981         gen_helper_compute_psr(tcg_env);
982     }
983 }
984 
985 static void save_state(DisasContext *dc)
986 {
987     tcg_gen_movi_tl(cpu_pc, dc->pc);
988     save_npc(dc);
989 }
990 
991 static void gen_exception(DisasContext *dc, int which)
992 {
993     save_state(dc);
994     gen_helper_raise_exception(tcg_env, tcg_constant_i32(which));
995     dc->base.is_jmp = DISAS_NORETURN;
996 }
997 
998 static TCGLabel *delay_exceptionv(DisasContext *dc, TCGv_i32 excp)
999 {
1000     DisasDelayException *e = g_new0(DisasDelayException, 1);
1001 
1002     e->next = dc->delay_excp_list;
1003     dc->delay_excp_list = e;
1004 
1005     e->lab = gen_new_label();
1006     e->excp = excp;
1007     e->pc = dc->pc;
1008     /* Caller must have used flush_cond before branch. */
1009     assert(e->npc != JUMP_PC);
1010     e->npc = dc->npc;
1011 
1012     return e->lab;
1013 }
1014 
1015 static TCGLabel *delay_exception(DisasContext *dc, int excp)
1016 {
1017     return delay_exceptionv(dc, tcg_constant_i32(excp));
1018 }
1019 
1020 static void gen_check_align(DisasContext *dc, TCGv addr, int mask)
1021 {
1022     TCGv t = tcg_temp_new();
1023     TCGLabel *lab;
1024 
1025     tcg_gen_andi_tl(t, addr, mask);
1026 
1027     flush_cond(dc);
1028     lab = delay_exception(dc, TT_UNALIGNED);
1029     tcg_gen_brcondi_tl(TCG_COND_NE, t, 0, lab);
1030 }
1031 
1032 static void gen_mov_pc_npc(DisasContext *dc)
1033 {
1034     if (dc->npc & 3) {
1035         switch (dc->npc) {
1036         case JUMP_PC:
1037             gen_generic_branch(dc);
1038             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1039             dc->pc = DYNAMIC_PC_LOOKUP;
1040             break;
1041         case DYNAMIC_PC:
1042         case DYNAMIC_PC_LOOKUP:
1043             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1044             dc->pc = dc->npc;
1045             break;
1046         default:
1047             g_assert_not_reached();
1048         }
1049     } else {
1050         dc->pc = dc->npc;
1051     }
1052 }
1053 
1054 static void gen_op_next_insn(void)
1055 {
1056     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1057     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1058 }
1059 
1060 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1061                         DisasContext *dc)
1062 {
1063     static int subcc_cond[16] = {
1064         TCG_COND_NEVER,
1065         TCG_COND_EQ,
1066         TCG_COND_LE,
1067         TCG_COND_LT,
1068         TCG_COND_LEU,
1069         TCG_COND_LTU,
1070         -1, /* neg */
1071         -1, /* overflow */
1072         TCG_COND_ALWAYS,
1073         TCG_COND_NE,
1074         TCG_COND_GT,
1075         TCG_COND_GE,
1076         TCG_COND_GTU,
1077         TCG_COND_GEU,
1078         -1, /* pos */
1079         -1, /* no overflow */
1080     };
1081 
1082     static int logic_cond[16] = {
1083         TCG_COND_NEVER,
1084         TCG_COND_EQ,     /* eq:  Z */
1085         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1086         TCG_COND_LT,     /* lt:  N ^ V -> N */
1087         TCG_COND_EQ,     /* leu: C | Z -> Z */
1088         TCG_COND_NEVER,  /* ltu: C -> 0 */
1089         TCG_COND_LT,     /* neg: N */
1090         TCG_COND_NEVER,  /* vs:  V -> 0 */
1091         TCG_COND_ALWAYS,
1092         TCG_COND_NE,     /* ne:  !Z */
1093         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1094         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1095         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1096         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1097         TCG_COND_GE,     /* pos: !N */
1098         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1099     };
1100 
1101     TCGv_i32 r_src;
1102     TCGv r_dst;
1103 
1104 #ifdef TARGET_SPARC64
1105     if (xcc) {
1106         r_src = cpu_xcc;
1107     } else {
1108         r_src = cpu_psr;
1109     }
1110 #else
1111     r_src = cpu_psr;
1112 #endif
1113 
1114     switch (dc->cc_op) {
1115     case CC_OP_LOGIC:
1116         cmp->cond = logic_cond[cond];
1117     do_compare_dst_0:
1118         cmp->is_bool = false;
1119         cmp->c2 = tcg_constant_tl(0);
1120 #ifdef TARGET_SPARC64
1121         if (!xcc) {
1122             cmp->c1 = tcg_temp_new();
1123             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1124             break;
1125         }
1126 #endif
1127         cmp->c1 = cpu_cc_dst;
1128         break;
1129 
1130     case CC_OP_SUB:
1131         switch (cond) {
1132         case 6:  /* neg */
1133         case 14: /* pos */
1134             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1135             goto do_compare_dst_0;
1136 
1137         case 7: /* overflow */
1138         case 15: /* !overflow */
1139             goto do_dynamic;
1140 
1141         default:
1142             cmp->cond = subcc_cond[cond];
1143             cmp->is_bool = false;
1144 #ifdef TARGET_SPARC64
1145             if (!xcc) {
1146                 /* Note that sign-extension works for unsigned compares as
1147                    long as both operands are sign-extended.  */
1148                 cmp->c1 = tcg_temp_new();
1149                 cmp->c2 = tcg_temp_new();
1150                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1151                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1152                 break;
1153             }
1154 #endif
1155             cmp->c1 = cpu_cc_src;
1156             cmp->c2 = cpu_cc_src2;
1157             break;
1158         }
1159         break;
1160 
1161     default:
1162     do_dynamic:
1163         gen_helper_compute_psr(tcg_env);
1164         dc->cc_op = CC_OP_FLAGS;
1165         /* FALLTHRU */
1166 
1167     case CC_OP_FLAGS:
1168         /* We're going to generate a boolean result.  */
1169         cmp->cond = TCG_COND_NE;
1170         cmp->is_bool = true;
1171         cmp->c1 = r_dst = tcg_temp_new();
1172         cmp->c2 = tcg_constant_tl(0);
1173 
1174         switch (cond) {
1175         case 0x0:
1176             gen_op_eval_bn(r_dst);
1177             break;
1178         case 0x1:
1179             gen_op_eval_be(r_dst, r_src);
1180             break;
1181         case 0x2:
1182             gen_op_eval_ble(r_dst, r_src);
1183             break;
1184         case 0x3:
1185             gen_op_eval_bl(r_dst, r_src);
1186             break;
1187         case 0x4:
1188             gen_op_eval_bleu(r_dst, r_src);
1189             break;
1190         case 0x5:
1191             gen_op_eval_bcs(r_dst, r_src);
1192             break;
1193         case 0x6:
1194             gen_op_eval_bneg(r_dst, r_src);
1195             break;
1196         case 0x7:
1197             gen_op_eval_bvs(r_dst, r_src);
1198             break;
1199         case 0x8:
1200             gen_op_eval_ba(r_dst);
1201             break;
1202         case 0x9:
1203             gen_op_eval_bne(r_dst, r_src);
1204             break;
1205         case 0xa:
1206             gen_op_eval_bg(r_dst, r_src);
1207             break;
1208         case 0xb:
1209             gen_op_eval_bge(r_dst, r_src);
1210             break;
1211         case 0xc:
1212             gen_op_eval_bgu(r_dst, r_src);
1213             break;
1214         case 0xd:
1215             gen_op_eval_bcc(r_dst, r_src);
1216             break;
1217         case 0xe:
1218             gen_op_eval_bpos(r_dst, r_src);
1219             break;
1220         case 0xf:
1221             gen_op_eval_bvc(r_dst, r_src);
1222             break;
1223         }
1224         break;
1225     }
1226 }
1227 
1228 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1229 {
1230     unsigned int offset;
1231     TCGv r_dst;
1232 
1233     /* For now we still generate a straight boolean result.  */
1234     cmp->cond = TCG_COND_NE;
1235     cmp->is_bool = true;
1236     cmp->c1 = r_dst = tcg_temp_new();
1237     cmp->c2 = tcg_constant_tl(0);
1238 
1239     switch (cc) {
1240     default:
1241     case 0x0:
1242         offset = 0;
1243         break;
1244     case 0x1:
1245         offset = 32 - 10;
1246         break;
1247     case 0x2:
1248         offset = 34 - 10;
1249         break;
1250     case 0x3:
1251         offset = 36 - 10;
1252         break;
1253     }
1254 
1255     switch (cond) {
1256     case 0x0:
1257         gen_op_eval_bn(r_dst);
1258         break;
1259     case 0x1:
1260         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1261         break;
1262     case 0x2:
1263         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1264         break;
1265     case 0x3:
1266         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1267         break;
1268     case 0x4:
1269         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1270         break;
1271     case 0x5:
1272         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1273         break;
1274     case 0x6:
1275         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1276         break;
1277     case 0x7:
1278         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1279         break;
1280     case 0x8:
1281         gen_op_eval_ba(r_dst);
1282         break;
1283     case 0x9:
1284         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1285         break;
1286     case 0xa:
1287         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1288         break;
1289     case 0xb:
1290         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1291         break;
1292     case 0xc:
1293         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1294         break;
1295     case 0xd:
1296         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1297         break;
1298     case 0xe:
1299         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1300         break;
1301     case 0xf:
1302         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1303         break;
1304     }
1305 }
1306 
1307 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1308                      DisasContext *dc)
1309 {
1310     DisasCompare cmp;
1311     gen_compare(&cmp, cc, cond, dc);
1312 
1313     /* The interface is to return a boolean in r_dst.  */
1314     if (cmp.is_bool) {
1315         tcg_gen_mov_tl(r_dst, cmp.c1);
1316     } else {
1317         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1318     }
1319 }
1320 
1321 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1322 {
1323     DisasCompare cmp;
1324     gen_fcompare(&cmp, cc, cond);
1325 
1326     /* The interface is to return a boolean in r_dst.  */
1327     if (cmp.is_bool) {
1328         tcg_gen_mov_tl(r_dst, cmp.c1);
1329     } else {
1330         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1331     }
1332 }
1333 
1334 #ifdef TARGET_SPARC64
1335 // Inverted logic
1336 static const int gen_tcg_cond_reg[8] = {
1337     -1,
1338     TCG_COND_NE,
1339     TCG_COND_GT,
1340     TCG_COND_GE,
1341     -1,
1342     TCG_COND_EQ,
1343     TCG_COND_LE,
1344     TCG_COND_LT,
1345 };
1346 
1347 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1348 {
1349     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1350     cmp->is_bool = false;
1351     cmp->c1 = r_src;
1352     cmp->c2 = tcg_constant_tl(0);
1353 }
1354 
1355 static void gen_cond_reg(TCGv r_dst, int cond, TCGv r_src)
1356 {
1357     DisasCompare cmp;
1358     gen_compare_reg(&cmp, cond, r_src);
1359 
1360     /* The interface is to return a boolean in r_dst.  */
1361     tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1362 }
1363 #endif
1364 
1365 static void do_branch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1366 {
1367     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1368     target_ulong target = dc->pc + offset;
1369 
1370     if (unlikely(AM_CHECK(dc))) {
1371         target &= 0xffffffffULL;
1372     }
1373     if (cond == 0x0) {
1374         /* unconditional not taken */
1375         if (a) {
1376             dc->pc = dc->npc + 4;
1377             dc->npc = dc->pc + 4;
1378         } else {
1379             dc->pc = dc->npc;
1380             dc->npc = dc->pc + 4;
1381         }
1382     } else if (cond == 0x8) {
1383         /* unconditional taken */
1384         if (a) {
1385             dc->pc = target;
1386             dc->npc = dc->pc + 4;
1387         } else {
1388             dc->pc = dc->npc;
1389             dc->npc = target;
1390             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1391         }
1392     } else {
1393         flush_cond(dc);
1394         gen_cond(cpu_cond, cc, cond, dc);
1395         if (a) {
1396             gen_branch_a(dc, target);
1397         } else {
1398             gen_branch_n(dc, target);
1399         }
1400     }
1401 }
1402 
1403 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1404 {
1405     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1406     target_ulong target = dc->pc + offset;
1407 
1408     if (unlikely(AM_CHECK(dc))) {
1409         target &= 0xffffffffULL;
1410     }
1411     if (cond == 0x0) {
1412         /* unconditional not taken */
1413         if (a) {
1414             dc->pc = dc->npc + 4;
1415             dc->npc = dc->pc + 4;
1416         } else {
1417             dc->pc = dc->npc;
1418             dc->npc = dc->pc + 4;
1419         }
1420     } else if (cond == 0x8) {
1421         /* unconditional taken */
1422         if (a) {
1423             dc->pc = target;
1424             dc->npc = dc->pc + 4;
1425         } else {
1426             dc->pc = dc->npc;
1427             dc->npc = target;
1428             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1429         }
1430     } else {
1431         flush_cond(dc);
1432         gen_fcond(cpu_cond, cc, cond);
1433         if (a) {
1434             gen_branch_a(dc, target);
1435         } else {
1436             gen_branch_n(dc, target);
1437         }
1438     }
1439 }
1440 
1441 #ifdef TARGET_SPARC64
1442 static void do_branch_reg(DisasContext *dc, int32_t offset, uint32_t insn,
1443                           TCGv r_reg)
1444 {
1445     unsigned int cond = GET_FIELD_SP(insn, 25, 27), a = (insn & (1 << 29));
1446     target_ulong target = dc->pc + offset;
1447 
1448     if (unlikely(AM_CHECK(dc))) {
1449         target &= 0xffffffffULL;
1450     }
1451     flush_cond(dc);
1452     gen_cond_reg(cpu_cond, cond, r_reg);
1453     if (a) {
1454         gen_branch_a(dc, target);
1455     } else {
1456         gen_branch_n(dc, target);
1457     }
1458 }
1459 
1460 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1461 {
1462     switch (fccno) {
1463     case 0:
1464         gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1465         break;
1466     case 1:
1467         gen_helper_fcmps_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1468         break;
1469     case 2:
1470         gen_helper_fcmps_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1471         break;
1472     case 3:
1473         gen_helper_fcmps_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1474         break;
1475     }
1476 }
1477 
1478 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1479 {
1480     switch (fccno) {
1481     case 0:
1482         gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1483         break;
1484     case 1:
1485         gen_helper_fcmpd_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1486         break;
1487     case 2:
1488         gen_helper_fcmpd_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1489         break;
1490     case 3:
1491         gen_helper_fcmpd_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1492         break;
1493     }
1494 }
1495 
1496 static void gen_op_fcmpq(int fccno)
1497 {
1498     switch (fccno) {
1499     case 0:
1500         gen_helper_fcmpq(cpu_fsr, tcg_env);
1501         break;
1502     case 1:
1503         gen_helper_fcmpq_fcc1(cpu_fsr, tcg_env);
1504         break;
1505     case 2:
1506         gen_helper_fcmpq_fcc2(cpu_fsr, tcg_env);
1507         break;
1508     case 3:
1509         gen_helper_fcmpq_fcc3(cpu_fsr, tcg_env);
1510         break;
1511     }
1512 }
1513 
1514 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1515 {
1516     switch (fccno) {
1517     case 0:
1518         gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1519         break;
1520     case 1:
1521         gen_helper_fcmpes_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1522         break;
1523     case 2:
1524         gen_helper_fcmpes_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1525         break;
1526     case 3:
1527         gen_helper_fcmpes_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1528         break;
1529     }
1530 }
1531 
1532 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1533 {
1534     switch (fccno) {
1535     case 0:
1536         gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1537         break;
1538     case 1:
1539         gen_helper_fcmped_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1540         break;
1541     case 2:
1542         gen_helper_fcmped_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1543         break;
1544     case 3:
1545         gen_helper_fcmped_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1546         break;
1547     }
1548 }
1549 
1550 static void gen_op_fcmpeq(int fccno)
1551 {
1552     switch (fccno) {
1553     case 0:
1554         gen_helper_fcmpeq(cpu_fsr, tcg_env);
1555         break;
1556     case 1:
1557         gen_helper_fcmpeq_fcc1(cpu_fsr, tcg_env);
1558         break;
1559     case 2:
1560         gen_helper_fcmpeq_fcc2(cpu_fsr, tcg_env);
1561         break;
1562     case 3:
1563         gen_helper_fcmpeq_fcc3(cpu_fsr, tcg_env);
1564         break;
1565     }
1566 }
1567 
1568 #else
1569 
1570 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1571 {
1572     gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1573 }
1574 
1575 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1576 {
1577     gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1578 }
1579 
1580 static void gen_op_fcmpq(int fccno)
1581 {
1582     gen_helper_fcmpq(cpu_fsr, tcg_env);
1583 }
1584 
1585 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1586 {
1587     gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1588 }
1589 
1590 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1591 {
1592     gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1593 }
1594 
1595 static void gen_op_fcmpeq(int fccno)
1596 {
1597     gen_helper_fcmpeq(cpu_fsr, tcg_env);
1598 }
1599 #endif
1600 
1601 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1602 {
1603     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1604     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1605     gen_exception(dc, TT_FP_EXCP);
1606 }
1607 
1608 static int gen_trap_ifnofpu(DisasContext *dc)
1609 {
1610 #if !defined(CONFIG_USER_ONLY)
1611     if (!dc->fpu_enabled) {
1612         gen_exception(dc, TT_NFPU_INSN);
1613         return 1;
1614     }
1615 #endif
1616     return 0;
1617 }
1618 
1619 static void gen_op_clear_ieee_excp_and_FTT(void)
1620 {
1621     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1622 }
1623 
1624 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1625                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1626 {
1627     TCGv_i32 dst, src;
1628 
1629     src = gen_load_fpr_F(dc, rs);
1630     dst = gen_dest_fpr_F(dc);
1631 
1632     gen(dst, tcg_env, src);
1633     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1634 
1635     gen_store_fpr_F(dc, rd, dst);
1636 }
1637 
1638 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1639                           void (*gen)(TCGv_i32, TCGv_i32))
1640 {
1641     TCGv_i32 dst, src;
1642 
1643     src = gen_load_fpr_F(dc, rs);
1644     dst = gen_dest_fpr_F(dc);
1645 
1646     gen(dst, src);
1647 
1648     gen_store_fpr_F(dc, rd, dst);
1649 }
1650 
1651 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1652                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1653 {
1654     TCGv_i32 dst, src1, src2;
1655 
1656     src1 = gen_load_fpr_F(dc, rs1);
1657     src2 = gen_load_fpr_F(dc, rs2);
1658     dst = gen_dest_fpr_F(dc);
1659 
1660     gen(dst, tcg_env, src1, src2);
1661     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1662 
1663     gen_store_fpr_F(dc, rd, dst);
1664 }
1665 
1666 #ifdef TARGET_SPARC64
1667 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1668                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1669 {
1670     TCGv_i32 dst, src1, src2;
1671 
1672     src1 = gen_load_fpr_F(dc, rs1);
1673     src2 = gen_load_fpr_F(dc, rs2);
1674     dst = gen_dest_fpr_F(dc);
1675 
1676     gen(dst, src1, src2);
1677 
1678     gen_store_fpr_F(dc, rd, dst);
1679 }
1680 #endif
1681 
1682 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1683                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1684 {
1685     TCGv_i64 dst, src;
1686 
1687     src = gen_load_fpr_D(dc, rs);
1688     dst = gen_dest_fpr_D(dc, rd);
1689 
1690     gen(dst, tcg_env, src);
1691     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1692 
1693     gen_store_fpr_D(dc, rd, dst);
1694 }
1695 
1696 #ifdef TARGET_SPARC64
1697 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1698                           void (*gen)(TCGv_i64, TCGv_i64))
1699 {
1700     TCGv_i64 dst, src;
1701 
1702     src = gen_load_fpr_D(dc, rs);
1703     dst = gen_dest_fpr_D(dc, rd);
1704 
1705     gen(dst, src);
1706 
1707     gen_store_fpr_D(dc, rd, dst);
1708 }
1709 #endif
1710 
1711 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1712                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1713 {
1714     TCGv_i64 dst, src1, src2;
1715 
1716     src1 = gen_load_fpr_D(dc, rs1);
1717     src2 = gen_load_fpr_D(dc, rs2);
1718     dst = gen_dest_fpr_D(dc, rd);
1719 
1720     gen(dst, tcg_env, src1, src2);
1721     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1722 
1723     gen_store_fpr_D(dc, rd, dst);
1724 }
1725 
1726 #ifdef TARGET_SPARC64
1727 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1728                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1729 {
1730     TCGv_i64 dst, src1, src2;
1731 
1732     src1 = gen_load_fpr_D(dc, rs1);
1733     src2 = gen_load_fpr_D(dc, rs2);
1734     dst = gen_dest_fpr_D(dc, rd);
1735 
1736     gen(dst, src1, src2);
1737 
1738     gen_store_fpr_D(dc, rd, dst);
1739 }
1740 
1741 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1742                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1743 {
1744     TCGv_i64 dst, src1, src2;
1745 
1746     src1 = gen_load_fpr_D(dc, rs1);
1747     src2 = gen_load_fpr_D(dc, rs2);
1748     dst = gen_dest_fpr_D(dc, rd);
1749 
1750     gen(dst, cpu_gsr, src1, src2);
1751 
1752     gen_store_fpr_D(dc, rd, dst);
1753 }
1754 
1755 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1756                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1757 {
1758     TCGv_i64 dst, src0, src1, src2;
1759 
1760     src1 = gen_load_fpr_D(dc, rs1);
1761     src2 = gen_load_fpr_D(dc, rs2);
1762     src0 = gen_load_fpr_D(dc, rd);
1763     dst = gen_dest_fpr_D(dc, rd);
1764 
1765     gen(dst, src0, src1, src2);
1766 
1767     gen_store_fpr_D(dc, rd, dst);
1768 }
1769 #endif
1770 
1771 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1772                        void (*gen)(TCGv_ptr))
1773 {
1774     gen_op_load_fpr_QT1(QFPREG(rs));
1775 
1776     gen(tcg_env);
1777     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1778 
1779     gen_op_store_QT0_fpr(QFPREG(rd));
1780     gen_update_fprs_dirty(dc, QFPREG(rd));
1781 }
1782 
1783 #ifdef TARGET_SPARC64
1784 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1785                           void (*gen)(TCGv_ptr))
1786 {
1787     gen_op_load_fpr_QT1(QFPREG(rs));
1788 
1789     gen(tcg_env);
1790 
1791     gen_op_store_QT0_fpr(QFPREG(rd));
1792     gen_update_fprs_dirty(dc, QFPREG(rd));
1793 }
1794 #endif
1795 
1796 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1797                         void (*gen)(TCGv_ptr))
1798 {
1799     gen_op_load_fpr_QT0(QFPREG(rs1));
1800     gen_op_load_fpr_QT1(QFPREG(rs2));
1801 
1802     gen(tcg_env);
1803     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1804 
1805     gen_op_store_QT0_fpr(QFPREG(rd));
1806     gen_update_fprs_dirty(dc, QFPREG(rd));
1807 }
1808 
1809 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1810                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1811 {
1812     TCGv_i64 dst;
1813     TCGv_i32 src1, src2;
1814 
1815     src1 = gen_load_fpr_F(dc, rs1);
1816     src2 = gen_load_fpr_F(dc, rs2);
1817     dst = gen_dest_fpr_D(dc, rd);
1818 
1819     gen(dst, tcg_env, src1, src2);
1820     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1821 
1822     gen_store_fpr_D(dc, rd, dst);
1823 }
1824 
1825 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1826                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1827 {
1828     TCGv_i64 src1, src2;
1829 
1830     src1 = gen_load_fpr_D(dc, rs1);
1831     src2 = gen_load_fpr_D(dc, rs2);
1832 
1833     gen(tcg_env, src1, src2);
1834     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1835 
1836     gen_op_store_QT0_fpr(QFPREG(rd));
1837     gen_update_fprs_dirty(dc, QFPREG(rd));
1838 }
1839 
1840 #ifdef TARGET_SPARC64
1841 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1842                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1843 {
1844     TCGv_i64 dst;
1845     TCGv_i32 src;
1846 
1847     src = gen_load_fpr_F(dc, rs);
1848     dst = gen_dest_fpr_D(dc, rd);
1849 
1850     gen(dst, tcg_env, src);
1851     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1852 
1853     gen_store_fpr_D(dc, rd, dst);
1854 }
1855 #endif
1856 
1857 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1858                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1859 {
1860     TCGv_i64 dst;
1861     TCGv_i32 src;
1862 
1863     src = gen_load_fpr_F(dc, rs);
1864     dst = gen_dest_fpr_D(dc, rd);
1865 
1866     gen(dst, tcg_env, src);
1867 
1868     gen_store_fpr_D(dc, rd, dst);
1869 }
1870 
1871 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1872                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1873 {
1874     TCGv_i32 dst;
1875     TCGv_i64 src;
1876 
1877     src = gen_load_fpr_D(dc, rs);
1878     dst = gen_dest_fpr_F(dc);
1879 
1880     gen(dst, tcg_env, src);
1881     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1882 
1883     gen_store_fpr_F(dc, rd, dst);
1884 }
1885 
1886 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1887                        void (*gen)(TCGv_i32, TCGv_ptr))
1888 {
1889     TCGv_i32 dst;
1890 
1891     gen_op_load_fpr_QT1(QFPREG(rs));
1892     dst = gen_dest_fpr_F(dc);
1893 
1894     gen(dst, tcg_env);
1895     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1896 
1897     gen_store_fpr_F(dc, rd, dst);
1898 }
1899 
1900 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1901                        void (*gen)(TCGv_i64, TCGv_ptr))
1902 {
1903     TCGv_i64 dst;
1904 
1905     gen_op_load_fpr_QT1(QFPREG(rs));
1906     dst = gen_dest_fpr_D(dc, rd);
1907 
1908     gen(dst, tcg_env);
1909     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1910 
1911     gen_store_fpr_D(dc, rd, dst);
1912 }
1913 
1914 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1915                           void (*gen)(TCGv_ptr, TCGv_i32))
1916 {
1917     TCGv_i32 src;
1918 
1919     src = gen_load_fpr_F(dc, rs);
1920 
1921     gen(tcg_env, src);
1922 
1923     gen_op_store_QT0_fpr(QFPREG(rd));
1924     gen_update_fprs_dirty(dc, QFPREG(rd));
1925 }
1926 
1927 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1928                           void (*gen)(TCGv_ptr, TCGv_i64))
1929 {
1930     TCGv_i64 src;
1931 
1932     src = gen_load_fpr_D(dc, rs);
1933 
1934     gen(tcg_env, src);
1935 
1936     gen_op_store_QT0_fpr(QFPREG(rd));
1937     gen_update_fprs_dirty(dc, QFPREG(rd));
1938 }
1939 
1940 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1941                      TCGv addr, int mmu_idx, MemOp memop)
1942 {
1943     gen_address_mask(dc, addr);
1944     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1945 }
1946 
1947 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1948 {
1949     TCGv m1 = tcg_constant_tl(0xff);
1950     gen_address_mask(dc, addr);
1951     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1952 }
1953 
1954 /* asi moves */
1955 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1956 typedef enum {
1957     GET_ASI_HELPER,
1958     GET_ASI_EXCP,
1959     GET_ASI_DIRECT,
1960     GET_ASI_DTWINX,
1961     GET_ASI_BLOCK,
1962     GET_ASI_SHORT,
1963     GET_ASI_BCOPY,
1964     GET_ASI_BFILL,
1965 } ASIType;
1966 
1967 typedef struct {
1968     ASIType type;
1969     int asi;
1970     int mem_idx;
1971     MemOp memop;
1972 } DisasASI;
1973 
1974 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1975 {
1976     int asi = GET_FIELD(insn, 19, 26);
1977     ASIType type = GET_ASI_HELPER;
1978     int mem_idx = dc->mem_idx;
1979 
1980 #ifndef TARGET_SPARC64
1981     /* Before v9, all asis are immediate and privileged.  */
1982     if (IS_IMM) {
1983         gen_exception(dc, TT_ILL_INSN);
1984         type = GET_ASI_EXCP;
1985     } else if (supervisor(dc)
1986                /* Note that LEON accepts ASI_USERDATA in user mode, for
1987                   use with CASA.  Also note that previous versions of
1988                   QEMU allowed (and old versions of gcc emitted) ASI_P
1989                   for LEON, which is incorrect.  */
1990                || (asi == ASI_USERDATA
1991                    && (dc->def->features & CPU_FEATURE_CASA))) {
1992         switch (asi) {
1993         case ASI_USERDATA:   /* User data access */
1994             mem_idx = MMU_USER_IDX;
1995             type = GET_ASI_DIRECT;
1996             break;
1997         case ASI_KERNELDATA: /* Supervisor data access */
1998             mem_idx = MMU_KERNEL_IDX;
1999             type = GET_ASI_DIRECT;
2000             break;
2001         case ASI_M_BYPASS:    /* MMU passthrough */
2002         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
2003             mem_idx = MMU_PHYS_IDX;
2004             type = GET_ASI_DIRECT;
2005             break;
2006         case ASI_M_BCOPY: /* Block copy, sta access */
2007             mem_idx = MMU_KERNEL_IDX;
2008             type = GET_ASI_BCOPY;
2009             break;
2010         case ASI_M_BFILL: /* Block fill, stda access */
2011             mem_idx = MMU_KERNEL_IDX;
2012             type = GET_ASI_BFILL;
2013             break;
2014         }
2015 
2016         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
2017          * permissions check in get_physical_address(..).
2018          */
2019         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
2020     } else {
2021         gen_exception(dc, TT_PRIV_INSN);
2022         type = GET_ASI_EXCP;
2023     }
2024 #else
2025     if (IS_IMM) {
2026         asi = dc->asi;
2027     }
2028     /* With v9, all asis below 0x80 are privileged.  */
2029     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
2030        down that bit into DisasContext.  For the moment that's ok,
2031        since the direct implementations below doesn't have any ASIs
2032        in the restricted [0x30, 0x7f] range, and the check will be
2033        done properly in the helper.  */
2034     if (!supervisor(dc) && asi < 0x80) {
2035         gen_exception(dc, TT_PRIV_ACT);
2036         type = GET_ASI_EXCP;
2037     } else {
2038         switch (asi) {
2039         case ASI_REAL:      /* Bypass */
2040         case ASI_REAL_IO:   /* Bypass, non-cacheable */
2041         case ASI_REAL_L:    /* Bypass LE */
2042         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
2043         case ASI_TWINX_REAL:   /* Real address, twinx */
2044         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
2045         case ASI_QUAD_LDD_PHYS:
2046         case ASI_QUAD_LDD_PHYS_L:
2047             mem_idx = MMU_PHYS_IDX;
2048             break;
2049         case ASI_N:  /* Nucleus */
2050         case ASI_NL: /* Nucleus LE */
2051         case ASI_TWINX_N:
2052         case ASI_TWINX_NL:
2053         case ASI_NUCLEUS_QUAD_LDD:
2054         case ASI_NUCLEUS_QUAD_LDD_L:
2055             if (hypervisor(dc)) {
2056                 mem_idx = MMU_PHYS_IDX;
2057             } else {
2058                 mem_idx = MMU_NUCLEUS_IDX;
2059             }
2060             break;
2061         case ASI_AIUP:  /* As if user primary */
2062         case ASI_AIUPL: /* As if user primary LE */
2063         case ASI_TWINX_AIUP:
2064         case ASI_TWINX_AIUP_L:
2065         case ASI_BLK_AIUP_4V:
2066         case ASI_BLK_AIUP_L_4V:
2067         case ASI_BLK_AIUP:
2068         case ASI_BLK_AIUPL:
2069             mem_idx = MMU_USER_IDX;
2070             break;
2071         case ASI_AIUS:  /* As if user secondary */
2072         case ASI_AIUSL: /* As if user secondary LE */
2073         case ASI_TWINX_AIUS:
2074         case ASI_TWINX_AIUS_L:
2075         case ASI_BLK_AIUS_4V:
2076         case ASI_BLK_AIUS_L_4V:
2077         case ASI_BLK_AIUS:
2078         case ASI_BLK_AIUSL:
2079             mem_idx = MMU_USER_SECONDARY_IDX;
2080             break;
2081         case ASI_S:  /* Secondary */
2082         case ASI_SL: /* Secondary LE */
2083         case ASI_TWINX_S:
2084         case ASI_TWINX_SL:
2085         case ASI_BLK_COMMIT_S:
2086         case ASI_BLK_S:
2087         case ASI_BLK_SL:
2088         case ASI_FL8_S:
2089         case ASI_FL8_SL:
2090         case ASI_FL16_S:
2091         case ASI_FL16_SL:
2092             if (mem_idx == MMU_USER_IDX) {
2093                 mem_idx = MMU_USER_SECONDARY_IDX;
2094             } else if (mem_idx == MMU_KERNEL_IDX) {
2095                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2096             }
2097             break;
2098         case ASI_P:  /* Primary */
2099         case ASI_PL: /* Primary LE */
2100         case ASI_TWINX_P:
2101         case ASI_TWINX_PL:
2102         case ASI_BLK_COMMIT_P:
2103         case ASI_BLK_P:
2104         case ASI_BLK_PL:
2105         case ASI_FL8_P:
2106         case ASI_FL8_PL:
2107         case ASI_FL16_P:
2108         case ASI_FL16_PL:
2109             break;
2110         }
2111         switch (asi) {
2112         case ASI_REAL:
2113         case ASI_REAL_IO:
2114         case ASI_REAL_L:
2115         case ASI_REAL_IO_L:
2116         case ASI_N:
2117         case ASI_NL:
2118         case ASI_AIUP:
2119         case ASI_AIUPL:
2120         case ASI_AIUS:
2121         case ASI_AIUSL:
2122         case ASI_S:
2123         case ASI_SL:
2124         case ASI_P:
2125         case ASI_PL:
2126             type = GET_ASI_DIRECT;
2127             break;
2128         case ASI_TWINX_REAL:
2129         case ASI_TWINX_REAL_L:
2130         case ASI_TWINX_N:
2131         case ASI_TWINX_NL:
2132         case ASI_TWINX_AIUP:
2133         case ASI_TWINX_AIUP_L:
2134         case ASI_TWINX_AIUS:
2135         case ASI_TWINX_AIUS_L:
2136         case ASI_TWINX_P:
2137         case ASI_TWINX_PL:
2138         case ASI_TWINX_S:
2139         case ASI_TWINX_SL:
2140         case ASI_QUAD_LDD_PHYS:
2141         case ASI_QUAD_LDD_PHYS_L:
2142         case ASI_NUCLEUS_QUAD_LDD:
2143         case ASI_NUCLEUS_QUAD_LDD_L:
2144             type = GET_ASI_DTWINX;
2145             break;
2146         case ASI_BLK_COMMIT_P:
2147         case ASI_BLK_COMMIT_S:
2148         case ASI_BLK_AIUP_4V:
2149         case ASI_BLK_AIUP_L_4V:
2150         case ASI_BLK_AIUP:
2151         case ASI_BLK_AIUPL:
2152         case ASI_BLK_AIUS_4V:
2153         case ASI_BLK_AIUS_L_4V:
2154         case ASI_BLK_AIUS:
2155         case ASI_BLK_AIUSL:
2156         case ASI_BLK_S:
2157         case ASI_BLK_SL:
2158         case ASI_BLK_P:
2159         case ASI_BLK_PL:
2160             type = GET_ASI_BLOCK;
2161             break;
2162         case ASI_FL8_S:
2163         case ASI_FL8_SL:
2164         case ASI_FL8_P:
2165         case ASI_FL8_PL:
2166             memop = MO_UB;
2167             type = GET_ASI_SHORT;
2168             break;
2169         case ASI_FL16_S:
2170         case ASI_FL16_SL:
2171         case ASI_FL16_P:
2172         case ASI_FL16_PL:
2173             memop = MO_TEUW;
2174             type = GET_ASI_SHORT;
2175             break;
2176         }
2177         /* The little-endian asis all have bit 3 set.  */
2178         if (asi & 8) {
2179             memop ^= MO_BSWAP;
2180         }
2181     }
2182 #endif
2183 
2184     return (DisasASI){ type, asi, mem_idx, memop };
2185 }
2186 
2187 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2188                        int insn, MemOp memop)
2189 {
2190     DisasASI da = get_asi(dc, insn, memop);
2191 
2192     switch (da.type) {
2193     case GET_ASI_EXCP:
2194         break;
2195     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2196         gen_exception(dc, TT_ILL_INSN);
2197         break;
2198     case GET_ASI_DIRECT:
2199         gen_address_mask(dc, addr);
2200         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2201         break;
2202     default:
2203         {
2204             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2205             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2206 
2207             save_state(dc);
2208 #ifdef TARGET_SPARC64
2209             gen_helper_ld_asi(dst, tcg_env, addr, r_asi, r_mop);
2210 #else
2211             {
2212                 TCGv_i64 t64 = tcg_temp_new_i64();
2213                 gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2214                 tcg_gen_trunc_i64_tl(dst, t64);
2215             }
2216 #endif
2217         }
2218         break;
2219     }
2220 }
2221 
2222 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2223                        int insn, MemOp memop)
2224 {
2225     DisasASI da = get_asi(dc, insn, memop);
2226 
2227     switch (da.type) {
2228     case GET_ASI_EXCP:
2229         break;
2230     case GET_ASI_DTWINX: /* Reserved for stda.  */
2231 #ifndef TARGET_SPARC64
2232         gen_exception(dc, TT_ILL_INSN);
2233         break;
2234 #else
2235         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2236             /* Pre OpenSPARC CPUs don't have these */
2237             gen_exception(dc, TT_ILL_INSN);
2238             return;
2239         }
2240         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2241          * are ST_BLKINIT_ ASIs */
2242 #endif
2243         /* fall through */
2244     case GET_ASI_DIRECT:
2245         gen_address_mask(dc, addr);
2246         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2247         break;
2248 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2249     case GET_ASI_BCOPY:
2250         /* Copy 32 bytes from the address in SRC to ADDR.  */
2251         /* ??? The original qemu code suggests 4-byte alignment, dropping
2252            the low bits, but the only place I can see this used is in the
2253            Linux kernel with 32 byte alignment, which would make more sense
2254            as a cacheline-style operation.  */
2255         {
2256             TCGv saddr = tcg_temp_new();
2257             TCGv daddr = tcg_temp_new();
2258             TCGv four = tcg_constant_tl(4);
2259             TCGv_i32 tmp = tcg_temp_new_i32();
2260             int i;
2261 
2262             tcg_gen_andi_tl(saddr, src, -4);
2263             tcg_gen_andi_tl(daddr, addr, -4);
2264             for (i = 0; i < 32; i += 4) {
2265                 /* Since the loads and stores are paired, allow the
2266                    copy to happen in the host endianness.  */
2267                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2268                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2269                 tcg_gen_add_tl(saddr, saddr, four);
2270                 tcg_gen_add_tl(daddr, daddr, four);
2271             }
2272         }
2273         break;
2274 #endif
2275     default:
2276         {
2277             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2278             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2279 
2280             save_state(dc);
2281 #ifdef TARGET_SPARC64
2282             gen_helper_st_asi(tcg_env, addr, src, r_asi, r_mop);
2283 #else
2284             {
2285                 TCGv_i64 t64 = tcg_temp_new_i64();
2286                 tcg_gen_extu_tl_i64(t64, src);
2287                 gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2288             }
2289 #endif
2290 
2291             /* A write to a TLB register may alter page maps.  End the TB. */
2292             dc->npc = DYNAMIC_PC;
2293         }
2294         break;
2295     }
2296 }
2297 
2298 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2299                          TCGv addr, int insn)
2300 {
2301     DisasASI da = get_asi(dc, insn, MO_TEUL);
2302 
2303     switch (da.type) {
2304     case GET_ASI_EXCP:
2305         break;
2306     case GET_ASI_DIRECT:
2307         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2308         break;
2309     default:
2310         /* ??? Should be DAE_invalid_asi.  */
2311         gen_exception(dc, TT_DATA_ACCESS);
2312         break;
2313     }
2314 }
2315 
2316 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2317                         int insn, int rd)
2318 {
2319     DisasASI da = get_asi(dc, insn, MO_TEUL);
2320     TCGv oldv;
2321 
2322     switch (da.type) {
2323     case GET_ASI_EXCP:
2324         return;
2325     case GET_ASI_DIRECT:
2326         oldv = tcg_temp_new();
2327         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2328                                   da.mem_idx, da.memop | MO_ALIGN);
2329         gen_store_gpr(dc, rd, oldv);
2330         break;
2331     default:
2332         /* ??? Should be DAE_invalid_asi.  */
2333         gen_exception(dc, TT_DATA_ACCESS);
2334         break;
2335     }
2336 }
2337 
2338 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2339 {
2340     DisasASI da = get_asi(dc, insn, MO_UB);
2341 
2342     switch (da.type) {
2343     case GET_ASI_EXCP:
2344         break;
2345     case GET_ASI_DIRECT:
2346         gen_ldstub(dc, dst, addr, da.mem_idx);
2347         break;
2348     default:
2349         /* ??? In theory, this should be raise DAE_invalid_asi.
2350            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2351         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2352             gen_helper_exit_atomic(tcg_env);
2353         } else {
2354             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2355             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2356             TCGv_i64 s64, t64;
2357 
2358             save_state(dc);
2359             t64 = tcg_temp_new_i64();
2360             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2361 
2362             s64 = tcg_constant_i64(0xff);
2363             gen_helper_st_asi(tcg_env, addr, s64, r_asi, r_mop);
2364 
2365             tcg_gen_trunc_i64_tl(dst, t64);
2366 
2367             /* End the TB.  */
2368             dc->npc = DYNAMIC_PC;
2369         }
2370         break;
2371     }
2372 }
2373 #endif
2374 
2375 #ifdef TARGET_SPARC64
2376 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2377                         int insn, int size, int rd)
2378 {
2379     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2380     TCGv_i32 d32;
2381     TCGv_i64 d64;
2382 
2383     switch (da.type) {
2384     case GET_ASI_EXCP:
2385         break;
2386 
2387     case GET_ASI_DIRECT:
2388         gen_address_mask(dc, addr);
2389         switch (size) {
2390         case 4:
2391             d32 = gen_dest_fpr_F(dc);
2392             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2393             gen_store_fpr_F(dc, rd, d32);
2394             break;
2395         case 8:
2396             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2397                                 da.memop | MO_ALIGN_4);
2398             break;
2399         case 16:
2400             d64 = tcg_temp_new_i64();
2401             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2402             tcg_gen_addi_tl(addr, addr, 8);
2403             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2404                                 da.memop | MO_ALIGN_4);
2405             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2406             break;
2407         default:
2408             g_assert_not_reached();
2409         }
2410         break;
2411 
2412     case GET_ASI_BLOCK:
2413         /* Valid for lddfa on aligned registers only.  */
2414         if (size == 8 && (rd & 7) == 0) {
2415             MemOp memop;
2416             TCGv eight;
2417             int i;
2418 
2419             gen_address_mask(dc, addr);
2420 
2421             /* The first operation checks required alignment.  */
2422             memop = da.memop | MO_ALIGN_64;
2423             eight = tcg_constant_tl(8);
2424             for (i = 0; ; ++i) {
2425                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2426                                     da.mem_idx, memop);
2427                 if (i == 7) {
2428                     break;
2429                 }
2430                 tcg_gen_add_tl(addr, addr, eight);
2431                 memop = da.memop;
2432             }
2433         } else {
2434             gen_exception(dc, TT_ILL_INSN);
2435         }
2436         break;
2437 
2438     case GET_ASI_SHORT:
2439         /* Valid for lddfa only.  */
2440         if (size == 8) {
2441             gen_address_mask(dc, addr);
2442             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2443                                 da.memop | MO_ALIGN);
2444         } else {
2445             gen_exception(dc, TT_ILL_INSN);
2446         }
2447         break;
2448 
2449     default:
2450         {
2451             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2452             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2453 
2454             save_state(dc);
2455             /* According to the table in the UA2011 manual, the only
2456                other asis that are valid for ldfa/lddfa/ldqfa are
2457                the NO_FAULT asis.  We still need a helper for these,
2458                but we can just use the integer asi helper for them.  */
2459             switch (size) {
2460             case 4:
2461                 d64 = tcg_temp_new_i64();
2462                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2463                 d32 = gen_dest_fpr_F(dc);
2464                 tcg_gen_extrl_i64_i32(d32, d64);
2465                 gen_store_fpr_F(dc, rd, d32);
2466                 break;
2467             case 8:
2468                 gen_helper_ld_asi(cpu_fpr[rd / 2], tcg_env, addr, r_asi, r_mop);
2469                 break;
2470             case 16:
2471                 d64 = tcg_temp_new_i64();
2472                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2473                 tcg_gen_addi_tl(addr, addr, 8);
2474                 gen_helper_ld_asi(cpu_fpr[rd/2+1], tcg_env, addr, r_asi, r_mop);
2475                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2476                 break;
2477             default:
2478                 g_assert_not_reached();
2479             }
2480         }
2481         break;
2482     }
2483 }
2484 
2485 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2486                         int insn, int size, int rd)
2487 {
2488     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2489     TCGv_i32 d32;
2490 
2491     switch (da.type) {
2492     case GET_ASI_EXCP:
2493         break;
2494 
2495     case GET_ASI_DIRECT:
2496         gen_address_mask(dc, addr);
2497         switch (size) {
2498         case 4:
2499             d32 = gen_load_fpr_F(dc, rd);
2500             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2501             break;
2502         case 8:
2503             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2504                                 da.memop | MO_ALIGN_4);
2505             break;
2506         case 16:
2507             /* Only 4-byte alignment required.  However, it is legal for the
2508                cpu to signal the alignment fault, and the OS trap handler is
2509                required to fix it up.  Requiring 16-byte alignment here avoids
2510                having to probe the second page before performing the first
2511                write.  */
2512             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2513                                 da.memop | MO_ALIGN_16);
2514             tcg_gen_addi_tl(addr, addr, 8);
2515             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2516             break;
2517         default:
2518             g_assert_not_reached();
2519         }
2520         break;
2521 
2522     case GET_ASI_BLOCK:
2523         /* Valid for stdfa on aligned registers only.  */
2524         if (size == 8 && (rd & 7) == 0) {
2525             MemOp memop;
2526             TCGv eight;
2527             int i;
2528 
2529             gen_address_mask(dc, addr);
2530 
2531             /* The first operation checks required alignment.  */
2532             memop = da.memop | MO_ALIGN_64;
2533             eight = tcg_constant_tl(8);
2534             for (i = 0; ; ++i) {
2535                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2536                                     da.mem_idx, memop);
2537                 if (i == 7) {
2538                     break;
2539                 }
2540                 tcg_gen_add_tl(addr, addr, eight);
2541                 memop = da.memop;
2542             }
2543         } else {
2544             gen_exception(dc, TT_ILL_INSN);
2545         }
2546         break;
2547 
2548     case GET_ASI_SHORT:
2549         /* Valid for stdfa only.  */
2550         if (size == 8) {
2551             gen_address_mask(dc, addr);
2552             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2553                                 da.memop | MO_ALIGN);
2554         } else {
2555             gen_exception(dc, TT_ILL_INSN);
2556         }
2557         break;
2558 
2559     default:
2560         /* According to the table in the UA2011 manual, the only
2561            other asis that are valid for ldfa/lddfa/ldqfa are
2562            the PST* asis, which aren't currently handled.  */
2563         gen_exception(dc, TT_ILL_INSN);
2564         break;
2565     }
2566 }
2567 
2568 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2569 {
2570     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2571     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2572     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2573 
2574     switch (da.type) {
2575     case GET_ASI_EXCP:
2576         return;
2577 
2578     case GET_ASI_DTWINX:
2579         gen_address_mask(dc, addr);
2580         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2581         tcg_gen_addi_tl(addr, addr, 8);
2582         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2583         break;
2584 
2585     case GET_ASI_DIRECT:
2586         {
2587             TCGv_i64 tmp = tcg_temp_new_i64();
2588 
2589             gen_address_mask(dc, addr);
2590             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2591 
2592             /* Note that LE ldda acts as if each 32-bit register
2593                result is byte swapped.  Having just performed one
2594                64-bit bswap, we need now to swap the writebacks.  */
2595             if ((da.memop & MO_BSWAP) == MO_TE) {
2596                 tcg_gen_extr32_i64(lo, hi, tmp);
2597             } else {
2598                 tcg_gen_extr32_i64(hi, lo, tmp);
2599             }
2600         }
2601         break;
2602 
2603     default:
2604         /* ??? In theory we've handled all of the ASIs that are valid
2605            for ldda, and this should raise DAE_invalid_asi.  However,
2606            real hardware allows others.  This can be seen with e.g.
2607            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2608         {
2609             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2610             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2611             TCGv_i64 tmp = tcg_temp_new_i64();
2612 
2613             save_state(dc);
2614             gen_helper_ld_asi(tmp, tcg_env, addr, r_asi, r_mop);
2615 
2616             /* See above.  */
2617             if ((da.memop & MO_BSWAP) == MO_TE) {
2618                 tcg_gen_extr32_i64(lo, hi, tmp);
2619             } else {
2620                 tcg_gen_extr32_i64(hi, lo, tmp);
2621             }
2622         }
2623         break;
2624     }
2625 
2626     gen_store_gpr(dc, rd, hi);
2627     gen_store_gpr(dc, rd + 1, lo);
2628 }
2629 
2630 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2631                          int insn, int rd)
2632 {
2633     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2634     TCGv lo = gen_load_gpr(dc, rd + 1);
2635 
2636     switch (da.type) {
2637     case GET_ASI_EXCP:
2638         break;
2639 
2640     case GET_ASI_DTWINX:
2641         gen_address_mask(dc, addr);
2642         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2643         tcg_gen_addi_tl(addr, addr, 8);
2644         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2645         break;
2646 
2647     case GET_ASI_DIRECT:
2648         {
2649             TCGv_i64 t64 = tcg_temp_new_i64();
2650 
2651             /* Note that LE stda acts as if each 32-bit register result is
2652                byte swapped.  We will perform one 64-bit LE store, so now
2653                we must swap the order of the construction.  */
2654             if ((da.memop & MO_BSWAP) == MO_TE) {
2655                 tcg_gen_concat32_i64(t64, lo, hi);
2656             } else {
2657                 tcg_gen_concat32_i64(t64, hi, lo);
2658             }
2659             gen_address_mask(dc, addr);
2660             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2661         }
2662         break;
2663 
2664     default:
2665         /* ??? In theory we've handled all of the ASIs that are valid
2666            for stda, and this should raise DAE_invalid_asi.  */
2667         {
2668             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2669             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2670             TCGv_i64 t64 = tcg_temp_new_i64();
2671 
2672             /* See above.  */
2673             if ((da.memop & MO_BSWAP) == MO_TE) {
2674                 tcg_gen_concat32_i64(t64, lo, hi);
2675             } else {
2676                 tcg_gen_concat32_i64(t64, hi, lo);
2677             }
2678 
2679             save_state(dc);
2680             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2681         }
2682         break;
2683     }
2684 }
2685 
2686 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2687                          int insn, int rd)
2688 {
2689     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2690     TCGv oldv;
2691 
2692     switch (da.type) {
2693     case GET_ASI_EXCP:
2694         return;
2695     case GET_ASI_DIRECT:
2696         oldv = tcg_temp_new();
2697         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2698                                   da.mem_idx, da.memop | MO_ALIGN);
2699         gen_store_gpr(dc, rd, oldv);
2700         break;
2701     default:
2702         /* ??? Should be DAE_invalid_asi.  */
2703         gen_exception(dc, TT_DATA_ACCESS);
2704         break;
2705     }
2706 }
2707 
2708 #elif !defined(CONFIG_USER_ONLY)
2709 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2710 {
2711     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2712        whereby "rd + 1" elicits "error: array subscript is above array".
2713        Since we have already asserted that rd is even, the semantics
2714        are unchanged.  */
2715     TCGv lo = gen_dest_gpr(dc, rd | 1);
2716     TCGv hi = gen_dest_gpr(dc, rd);
2717     TCGv_i64 t64 = tcg_temp_new_i64();
2718     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2719 
2720     switch (da.type) {
2721     case GET_ASI_EXCP:
2722         return;
2723     case GET_ASI_DIRECT:
2724         gen_address_mask(dc, addr);
2725         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2726         break;
2727     default:
2728         {
2729             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2730             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2731 
2732             save_state(dc);
2733             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2734         }
2735         break;
2736     }
2737 
2738     tcg_gen_extr_i64_i32(lo, hi, t64);
2739     gen_store_gpr(dc, rd | 1, lo);
2740     gen_store_gpr(dc, rd, hi);
2741 }
2742 
2743 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2744                          int insn, int rd)
2745 {
2746     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2747     TCGv lo = gen_load_gpr(dc, rd + 1);
2748     TCGv_i64 t64 = tcg_temp_new_i64();
2749 
2750     tcg_gen_concat_tl_i64(t64, lo, hi);
2751 
2752     switch (da.type) {
2753     case GET_ASI_EXCP:
2754         break;
2755     case GET_ASI_DIRECT:
2756         gen_address_mask(dc, addr);
2757         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2758         break;
2759     case GET_ASI_BFILL:
2760         /* Store 32 bytes of T64 to ADDR.  */
2761         /* ??? The original qemu code suggests 8-byte alignment, dropping
2762            the low bits, but the only place I can see this used is in the
2763            Linux kernel with 32 byte alignment, which would make more sense
2764            as a cacheline-style operation.  */
2765         {
2766             TCGv d_addr = tcg_temp_new();
2767             TCGv eight = tcg_constant_tl(8);
2768             int i;
2769 
2770             tcg_gen_andi_tl(d_addr, addr, -8);
2771             for (i = 0; i < 32; i += 8) {
2772                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2773                 tcg_gen_add_tl(d_addr, d_addr, eight);
2774             }
2775         }
2776         break;
2777     default:
2778         {
2779             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2780             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2781 
2782             save_state(dc);
2783             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2784         }
2785         break;
2786     }
2787 }
2788 #endif
2789 
2790 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2791 {
2792     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2793     return gen_load_gpr(dc, rs1);
2794 }
2795 
2796 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2797 {
2798     if (IS_IMM) { /* immediate */
2799         target_long simm = GET_FIELDs(insn, 19, 31);
2800         TCGv t = tcg_temp_new();
2801         tcg_gen_movi_tl(t, simm);
2802         return t;
2803     } else {      /* register */
2804         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2805         return gen_load_gpr(dc, rs2);
2806     }
2807 }
2808 
2809 #ifdef TARGET_SPARC64
2810 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2811 {
2812     TCGv_i32 c32, zero, dst, s1, s2;
2813 
2814     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2815        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2816        the later.  */
2817     c32 = tcg_temp_new_i32();
2818     if (cmp->is_bool) {
2819         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2820     } else {
2821         TCGv_i64 c64 = tcg_temp_new_i64();
2822         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2823         tcg_gen_extrl_i64_i32(c32, c64);
2824     }
2825 
2826     s1 = gen_load_fpr_F(dc, rs);
2827     s2 = gen_load_fpr_F(dc, rd);
2828     dst = gen_dest_fpr_F(dc);
2829     zero = tcg_constant_i32(0);
2830 
2831     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2832 
2833     gen_store_fpr_F(dc, rd, dst);
2834 }
2835 
2836 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2837 {
2838     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2839     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2840                         gen_load_fpr_D(dc, rs),
2841                         gen_load_fpr_D(dc, rd));
2842     gen_store_fpr_D(dc, rd, dst);
2843 }
2844 
2845 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2846 {
2847     int qd = QFPREG(rd);
2848     int qs = QFPREG(rs);
2849 
2850     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2851                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2852     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2853                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2854 
2855     gen_update_fprs_dirty(dc, qd);
2856 }
2857 
2858 #ifndef CONFIG_USER_ONLY
2859 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env tcg_env)
2860 {
2861     TCGv_i32 r_tl = tcg_temp_new_i32();
2862 
2863     /* load env->tl into r_tl */
2864     tcg_gen_ld_i32(r_tl, tcg_env, offsetof(CPUSPARCState, tl));
2865 
2866     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2867     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2868 
2869     /* calculate offset to current trap state from env->ts, reuse r_tl */
2870     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2871     tcg_gen_addi_ptr(r_tsptr, tcg_env, offsetof(CPUSPARCState, ts));
2872 
2873     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2874     {
2875         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2876         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2877         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2878     }
2879 }
2880 #endif
2881 
2882 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2883                      int width, bool cc, bool left)
2884 {
2885     TCGv lo1, lo2;
2886     uint64_t amask, tabl, tabr;
2887     int shift, imask, omask;
2888 
2889     if (cc) {
2890         tcg_gen_mov_tl(cpu_cc_src, s1);
2891         tcg_gen_mov_tl(cpu_cc_src2, s2);
2892         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2893         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2894         dc->cc_op = CC_OP_SUB;
2895     }
2896 
2897     /* Theory of operation: there are two tables, left and right (not to
2898        be confused with the left and right versions of the opcode).  These
2899        are indexed by the low 3 bits of the inputs.  To make things "easy",
2900        these tables are loaded into two constants, TABL and TABR below.
2901        The operation index = (input & imask) << shift calculates the index
2902        into the constant, while val = (table >> index) & omask calculates
2903        the value we're looking for.  */
2904     switch (width) {
2905     case 8:
2906         imask = 0x7;
2907         shift = 3;
2908         omask = 0xff;
2909         if (left) {
2910             tabl = 0x80c0e0f0f8fcfeffULL;
2911             tabr = 0xff7f3f1f0f070301ULL;
2912         } else {
2913             tabl = 0x0103070f1f3f7fffULL;
2914             tabr = 0xfffefcf8f0e0c080ULL;
2915         }
2916         break;
2917     case 16:
2918         imask = 0x6;
2919         shift = 1;
2920         omask = 0xf;
2921         if (left) {
2922             tabl = 0x8cef;
2923             tabr = 0xf731;
2924         } else {
2925             tabl = 0x137f;
2926             tabr = 0xfec8;
2927         }
2928         break;
2929     case 32:
2930         imask = 0x4;
2931         shift = 0;
2932         omask = 0x3;
2933         if (left) {
2934             tabl = (2 << 2) | 3;
2935             tabr = (3 << 2) | 1;
2936         } else {
2937             tabl = (1 << 2) | 3;
2938             tabr = (3 << 2) | 2;
2939         }
2940         break;
2941     default:
2942         abort();
2943     }
2944 
2945     lo1 = tcg_temp_new();
2946     lo2 = tcg_temp_new();
2947     tcg_gen_andi_tl(lo1, s1, imask);
2948     tcg_gen_andi_tl(lo2, s2, imask);
2949     tcg_gen_shli_tl(lo1, lo1, shift);
2950     tcg_gen_shli_tl(lo2, lo2, shift);
2951 
2952     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2953     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2954     tcg_gen_andi_tl(lo1, lo1, omask);
2955     tcg_gen_andi_tl(lo2, lo2, omask);
2956 
2957     amask = -8;
2958     if (AM_CHECK(dc)) {
2959         amask &= 0xffffffffULL;
2960     }
2961     tcg_gen_andi_tl(s1, s1, amask);
2962     tcg_gen_andi_tl(s2, s2, amask);
2963 
2964     /* Compute dst = (s1 == s2 ? lo1 : lo1 & lo2). */
2965     tcg_gen_and_tl(lo2, lo2, lo1);
2966     tcg_gen_movcond_tl(TCG_COND_EQ, dst, s1, s2, lo1, lo2);
2967 }
2968 
2969 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2970 {
2971     TCGv tmp = tcg_temp_new();
2972 
2973     tcg_gen_add_tl(tmp, s1, s2);
2974     tcg_gen_andi_tl(dst, tmp, -8);
2975     if (left) {
2976         tcg_gen_neg_tl(tmp, tmp);
2977     }
2978     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2979 }
2980 
2981 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2982 {
2983     TCGv t1, t2, shift;
2984 
2985     t1 = tcg_temp_new();
2986     t2 = tcg_temp_new();
2987     shift = tcg_temp_new();
2988 
2989     tcg_gen_andi_tl(shift, gsr, 7);
2990     tcg_gen_shli_tl(shift, shift, 3);
2991     tcg_gen_shl_tl(t1, s1, shift);
2992 
2993     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2994        shift of (up to 63) followed by a constant shift of 1.  */
2995     tcg_gen_xori_tl(shift, shift, 63);
2996     tcg_gen_shr_tl(t2, s2, shift);
2997     tcg_gen_shri_tl(t2, t2, 1);
2998 
2999     tcg_gen_or_tl(dst, t1, t2);
3000 }
3001 #endif
3002 
3003 /* Include the auto-generated decoder.  */
3004 #include "decode-insns.c.inc"
3005 
3006 #define TRANS(NAME, AVAIL, FUNC, ...) \
3007     static bool trans_##NAME(DisasContext *dc, arg_##NAME *a) \
3008     { return avail_##AVAIL(dc) && FUNC(dc, __VA_ARGS__); }
3009 
3010 #define avail_ALL(C)      true
3011 #ifdef TARGET_SPARC64
3012 # define avail_32(C)      false
3013 # define avail_64(C)      true
3014 #else
3015 # define avail_32(C)      true
3016 # define avail_64(C)      false
3017 #endif
3018 
3019 /* Default case for non jump instructions. */
3020 static bool advance_pc(DisasContext *dc)
3021 {
3022     if (dc->npc & 3) {
3023         switch (dc->npc) {
3024         case DYNAMIC_PC:
3025         case DYNAMIC_PC_LOOKUP:
3026             dc->pc = dc->npc;
3027             gen_op_next_insn();
3028             break;
3029         case JUMP_PC:
3030             /* we can do a static jump */
3031             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
3032             dc->base.is_jmp = DISAS_NORETURN;
3033             break;
3034         default:
3035             g_assert_not_reached();
3036         }
3037     } else {
3038         dc->pc = dc->npc;
3039         dc->npc = dc->npc + 4;
3040     }
3041     return true;
3042 }
3043 
3044 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3045     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3046         goto illegal_insn;
3047 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3048     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3049         goto nfpu_insn;
3050 
3051 /* before an instruction, dc->pc must be static */
3052 static void disas_sparc_legacy(DisasContext *dc, unsigned int insn)
3053 {
3054     unsigned int opc, rs1, rs2, rd;
3055     TCGv cpu_src1, cpu_src2;
3056     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3057     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3058     target_long simm;
3059 
3060     opc = GET_FIELD(insn, 0, 1);
3061     rd = GET_FIELD(insn, 2, 6);
3062 
3063     switch (opc) {
3064     case 0:                     /* branches/sethi */
3065         {
3066             unsigned int xop = GET_FIELD(insn, 7, 9);
3067             int32_t target;
3068             switch (xop) {
3069 #ifdef TARGET_SPARC64
3070             case 0x1:           /* V9 BPcc */
3071                 {
3072                     int cc;
3073 
3074                     target = GET_FIELD_SP(insn, 0, 18);
3075                     target = sign_extend(target, 19);
3076                     target <<= 2;
3077                     cc = GET_FIELD_SP(insn, 20, 21);
3078                     if (cc == 0)
3079                         do_branch(dc, target, insn, 0);
3080                     else if (cc == 2)
3081                         do_branch(dc, target, insn, 1);
3082                     else
3083                         goto illegal_insn;
3084                     goto jmp_insn;
3085                 }
3086             case 0x3:           /* V9 BPr */
3087                 {
3088                     target = GET_FIELD_SP(insn, 0, 13) |
3089                         (GET_FIELD_SP(insn, 20, 21) << 14);
3090                     target = sign_extend(target, 16);
3091                     target <<= 2;
3092                     cpu_src1 = get_src1(dc, insn);
3093                     do_branch_reg(dc, target, insn, cpu_src1);
3094                     goto jmp_insn;
3095                 }
3096             case 0x5:           /* V9 FBPcc */
3097                 {
3098                     int cc = GET_FIELD_SP(insn, 20, 21);
3099                     if (gen_trap_ifnofpu(dc)) {
3100                         goto jmp_insn;
3101                     }
3102                     target = GET_FIELD_SP(insn, 0, 18);
3103                     target = sign_extend(target, 19);
3104                     target <<= 2;
3105                     do_fbranch(dc, target, insn, cc);
3106                     goto jmp_insn;
3107                 }
3108 #else
3109             case 0x7:           /* CBN+x */
3110                 {
3111                     goto ncp_insn;
3112                 }
3113 #endif
3114             case 0x2:           /* BN+x */
3115                 {
3116                     target = GET_FIELD(insn, 10, 31);
3117                     target = sign_extend(target, 22);
3118                     target <<= 2;
3119                     do_branch(dc, target, insn, 0);
3120                     goto jmp_insn;
3121                 }
3122             case 0x6:           /* FBN+x */
3123                 {
3124                     if (gen_trap_ifnofpu(dc)) {
3125                         goto jmp_insn;
3126                     }
3127                     target = GET_FIELD(insn, 10, 31);
3128                     target = sign_extend(target, 22);
3129                     target <<= 2;
3130                     do_fbranch(dc, target, insn, 0);
3131                     goto jmp_insn;
3132                 }
3133             case 0x4:           /* SETHI */
3134                 /* Special-case %g0 because that's the canonical nop.  */
3135                 if (rd) {
3136                     uint32_t value = GET_FIELD(insn, 10, 31);
3137                     TCGv t = gen_dest_gpr(dc, rd);
3138                     tcg_gen_movi_tl(t, value << 10);
3139                     gen_store_gpr(dc, rd, t);
3140                 }
3141                 break;
3142             case 0x0:           /* UNIMPL */
3143             default:
3144                 goto illegal_insn;
3145             }
3146             break;
3147         }
3148         break;
3149     case 1:                     /*CALL*/
3150         {
3151             target_long target = GET_FIELDs(insn, 2, 31) << 2;
3152             TCGv o7 = gen_dest_gpr(dc, 15);
3153 
3154             tcg_gen_movi_tl(o7, dc->pc);
3155             gen_store_gpr(dc, 15, o7);
3156             target += dc->pc;
3157             gen_mov_pc_npc(dc);
3158 #ifdef TARGET_SPARC64
3159             if (unlikely(AM_CHECK(dc))) {
3160                 target &= 0xffffffffULL;
3161             }
3162 #endif
3163             dc->npc = target;
3164         }
3165         goto jmp_insn;
3166     case 2:                     /* FPU & Logical Operations */
3167         {
3168             unsigned int xop = GET_FIELD(insn, 7, 12);
3169             TCGv cpu_dst = tcg_temp_new();
3170             TCGv cpu_tmp0;
3171 
3172             if (xop == 0x3a) {  /* generate trap */
3173                 int cond = GET_FIELD(insn, 3, 6);
3174                 TCGv_i32 trap;
3175                 TCGLabel *l1 = NULL;
3176                 int mask;
3177 
3178                 if (cond == 0) {
3179                     /* Trap never.  */
3180                     break;
3181                 }
3182 
3183                 save_state(dc);
3184 
3185                 if (cond != 8) {
3186                     /* Conditional trap.  */
3187                     DisasCompare cmp;
3188 #ifdef TARGET_SPARC64
3189                     /* V9 icc/xcc */
3190                     int cc = GET_FIELD_SP(insn, 11, 12);
3191                     if (cc == 0) {
3192                         gen_compare(&cmp, 0, cond, dc);
3193                     } else if (cc == 2) {
3194                         gen_compare(&cmp, 1, cond, dc);
3195                     } else {
3196                         goto illegal_insn;
3197                     }
3198 #else
3199                     gen_compare(&cmp, 0, cond, dc);
3200 #endif
3201                     l1 = gen_new_label();
3202                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3203                                       cmp.c1, cmp.c2, l1);
3204                 }
3205 
3206                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3207                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3208 
3209                 /* Don't use the normal temporaries, as they may well have
3210                    gone out of scope with the branch above.  While we're
3211                    doing that we might as well pre-truncate to 32-bit.  */
3212                 trap = tcg_temp_new_i32();
3213 
3214                 rs1 = GET_FIELD_SP(insn, 14, 18);
3215                 if (IS_IMM) {
3216                     rs2 = GET_FIELD_SP(insn, 0, 7);
3217                     if (rs1 == 0) {
3218                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3219                         /* Signal that the trap value is fully constant.  */
3220                         mask = 0;
3221                     } else {
3222                         TCGv t1 = gen_load_gpr(dc, rs1);
3223                         tcg_gen_trunc_tl_i32(trap, t1);
3224                         tcg_gen_addi_i32(trap, trap, rs2);
3225                     }
3226                 } else {
3227                     TCGv t1, t2;
3228                     rs2 = GET_FIELD_SP(insn, 0, 4);
3229                     t1 = gen_load_gpr(dc, rs1);
3230                     t2 = gen_load_gpr(dc, rs2);
3231                     tcg_gen_add_tl(t1, t1, t2);
3232                     tcg_gen_trunc_tl_i32(trap, t1);
3233                 }
3234                 if (mask != 0) {
3235                     tcg_gen_andi_i32(trap, trap, mask);
3236                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3237                 }
3238 
3239                 gen_helper_raise_exception(tcg_env, trap);
3240 
3241                 if (cond == 8) {
3242                     /* An unconditional trap ends the TB.  */
3243                     dc->base.is_jmp = DISAS_NORETURN;
3244                     goto jmp_insn;
3245                 } else {
3246                     /* A conditional trap falls through to the next insn.  */
3247                     gen_set_label(l1);
3248                     break;
3249                 }
3250             } else if (xop == 0x28) {
3251                 rs1 = GET_FIELD(insn, 13, 17);
3252                 switch(rs1) {
3253                 case 0: /* rdy */
3254 #ifndef TARGET_SPARC64
3255                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3256                                        manual, rdy on the microSPARC
3257                                        II */
3258                 case 0x0f:          /* stbar in the SPARCv8 manual,
3259                                        rdy on the microSPARC II */
3260                 case 0x10 ... 0x1f: /* implementation-dependent in the
3261                                        SPARCv8 manual, rdy on the
3262                                        microSPARC II */
3263                     /* Read Asr17 */
3264                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3265                         TCGv t = gen_dest_gpr(dc, rd);
3266                         /* Read Asr17 for a Leon3 monoprocessor */
3267                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3268                         gen_store_gpr(dc, rd, t);
3269                         break;
3270                     }
3271 #endif
3272                     gen_store_gpr(dc, rd, cpu_y);
3273                     break;
3274 #ifdef TARGET_SPARC64
3275                 case 0x2: /* V9 rdccr */
3276                     update_psr(dc);
3277                     gen_helper_rdccr(cpu_dst, tcg_env);
3278                     gen_store_gpr(dc, rd, cpu_dst);
3279                     break;
3280                 case 0x3: /* V9 rdasi */
3281                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3282                     gen_store_gpr(dc, rd, cpu_dst);
3283                     break;
3284                 case 0x4: /* V9 rdtick */
3285                     {
3286                         TCGv_ptr r_tickptr;
3287                         TCGv_i32 r_const;
3288 
3289                         r_tickptr = tcg_temp_new_ptr();
3290                         r_const = tcg_constant_i32(dc->mem_idx);
3291                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3292                                        offsetof(CPUSPARCState, tick));
3293                         if (translator_io_start(&dc->base)) {
3294                             dc->base.is_jmp = DISAS_EXIT;
3295                         }
3296                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3297                                                   r_const);
3298                         gen_store_gpr(dc, rd, cpu_dst);
3299                     }
3300                     break;
3301                 case 0x5: /* V9 rdpc */
3302                     {
3303                         TCGv t = gen_dest_gpr(dc, rd);
3304                         if (unlikely(AM_CHECK(dc))) {
3305                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3306                         } else {
3307                             tcg_gen_movi_tl(t, dc->pc);
3308                         }
3309                         gen_store_gpr(dc, rd, t);
3310                     }
3311                     break;
3312                 case 0x6: /* V9 rdfprs */
3313                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3314                     gen_store_gpr(dc, rd, cpu_dst);
3315                     break;
3316                 case 0xf: /* V9 membar */
3317                     break; /* no effect */
3318                 case 0x13: /* Graphics Status */
3319                     if (gen_trap_ifnofpu(dc)) {
3320                         goto jmp_insn;
3321                     }
3322                     gen_store_gpr(dc, rd, cpu_gsr);
3323                     break;
3324                 case 0x16: /* Softint */
3325                     tcg_gen_ld32s_tl(cpu_dst, tcg_env,
3326                                      offsetof(CPUSPARCState, softint));
3327                     gen_store_gpr(dc, rd, cpu_dst);
3328                     break;
3329                 case 0x17: /* Tick compare */
3330                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3331                     break;
3332                 case 0x18: /* System tick */
3333                     {
3334                         TCGv_ptr r_tickptr;
3335                         TCGv_i32 r_const;
3336 
3337                         r_tickptr = tcg_temp_new_ptr();
3338                         r_const = tcg_constant_i32(dc->mem_idx);
3339                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3340                                        offsetof(CPUSPARCState, stick));
3341                         if (translator_io_start(&dc->base)) {
3342                             dc->base.is_jmp = DISAS_EXIT;
3343                         }
3344                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3345                                                   r_const);
3346                         gen_store_gpr(dc, rd, cpu_dst);
3347                     }
3348                     break;
3349                 case 0x19: /* System tick compare */
3350                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3351                     break;
3352                 case 0x1a: /* UltraSPARC-T1 Strand status */
3353                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3354                      * this ASR as impl. dep
3355                      */
3356                     CHECK_IU_FEATURE(dc, HYPV);
3357                     {
3358                         TCGv t = gen_dest_gpr(dc, rd);
3359                         tcg_gen_movi_tl(t, 1UL);
3360                         gen_store_gpr(dc, rd, t);
3361                     }
3362                     break;
3363                 case 0x10: /* Performance Control */
3364                 case 0x11: /* Performance Instrumentation Counter */
3365                 case 0x12: /* Dispatch Control */
3366                 case 0x14: /* Softint set, WO */
3367                 case 0x15: /* Softint clear, WO */
3368 #endif
3369                 default:
3370                     goto illegal_insn;
3371                 }
3372 #if !defined(CONFIG_USER_ONLY)
3373             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3374 #ifndef TARGET_SPARC64
3375                 if (!supervisor(dc)) {
3376                     goto priv_insn;
3377                 }
3378                 update_psr(dc);
3379                 gen_helper_rdpsr(cpu_dst, tcg_env);
3380 #else
3381                 CHECK_IU_FEATURE(dc, HYPV);
3382                 if (!hypervisor(dc))
3383                     goto priv_insn;
3384                 rs1 = GET_FIELD(insn, 13, 17);
3385                 switch (rs1) {
3386                 case 0: // hpstate
3387                     tcg_gen_ld_i64(cpu_dst, tcg_env,
3388                                    offsetof(CPUSPARCState, hpstate));
3389                     break;
3390                 case 1: // htstate
3391                     // gen_op_rdhtstate();
3392                     break;
3393                 case 3: // hintp
3394                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3395                     break;
3396                 case 5: // htba
3397                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3398                     break;
3399                 case 6: // hver
3400                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3401                     break;
3402                 case 31: // hstick_cmpr
3403                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3404                     break;
3405                 default:
3406                     goto illegal_insn;
3407                 }
3408 #endif
3409                 gen_store_gpr(dc, rd, cpu_dst);
3410                 break;
3411             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3412                 if (!supervisor(dc)) {
3413                     goto priv_insn;
3414                 }
3415                 cpu_tmp0 = tcg_temp_new();
3416 #ifdef TARGET_SPARC64
3417                 rs1 = GET_FIELD(insn, 13, 17);
3418                 switch (rs1) {
3419                 case 0: // tpc
3420                     {
3421                         TCGv_ptr r_tsptr;
3422 
3423                         r_tsptr = tcg_temp_new_ptr();
3424                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3425                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3426                                       offsetof(trap_state, tpc));
3427                     }
3428                     break;
3429                 case 1: // tnpc
3430                     {
3431                         TCGv_ptr r_tsptr;
3432 
3433                         r_tsptr = tcg_temp_new_ptr();
3434                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3435                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3436                                       offsetof(trap_state, tnpc));
3437                     }
3438                     break;
3439                 case 2: // tstate
3440                     {
3441                         TCGv_ptr r_tsptr;
3442 
3443                         r_tsptr = tcg_temp_new_ptr();
3444                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3445                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3446                                       offsetof(trap_state, tstate));
3447                     }
3448                     break;
3449                 case 3: // tt
3450                     {
3451                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3452 
3453                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3454                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3455                                          offsetof(trap_state, tt));
3456                     }
3457                     break;
3458                 case 4: // tick
3459                     {
3460                         TCGv_ptr r_tickptr;
3461                         TCGv_i32 r_const;
3462 
3463                         r_tickptr = tcg_temp_new_ptr();
3464                         r_const = tcg_constant_i32(dc->mem_idx);
3465                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3466                                        offsetof(CPUSPARCState, tick));
3467                         if (translator_io_start(&dc->base)) {
3468                             dc->base.is_jmp = DISAS_EXIT;
3469                         }
3470                         gen_helper_tick_get_count(cpu_tmp0, tcg_env,
3471                                                   r_tickptr, r_const);
3472                     }
3473                     break;
3474                 case 5: // tba
3475                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3476                     break;
3477                 case 6: // pstate
3478                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3479                                      offsetof(CPUSPARCState, pstate));
3480                     break;
3481                 case 7: // tl
3482                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3483                                      offsetof(CPUSPARCState, tl));
3484                     break;
3485                 case 8: // pil
3486                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3487                                      offsetof(CPUSPARCState, psrpil));
3488                     break;
3489                 case 9: // cwp
3490                     gen_helper_rdcwp(cpu_tmp0, tcg_env);
3491                     break;
3492                 case 10: // cansave
3493                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3494                                      offsetof(CPUSPARCState, cansave));
3495                     break;
3496                 case 11: // canrestore
3497                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3498                                      offsetof(CPUSPARCState, canrestore));
3499                     break;
3500                 case 12: // cleanwin
3501                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3502                                      offsetof(CPUSPARCState, cleanwin));
3503                     break;
3504                 case 13: // otherwin
3505                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3506                                      offsetof(CPUSPARCState, otherwin));
3507                     break;
3508                 case 14: // wstate
3509                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3510                                      offsetof(CPUSPARCState, wstate));
3511                     break;
3512                 case 16: // UA2005 gl
3513                     CHECK_IU_FEATURE(dc, GL);
3514                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3515                                      offsetof(CPUSPARCState, gl));
3516                     break;
3517                 case 26: // UA2005 strand status
3518                     CHECK_IU_FEATURE(dc, HYPV);
3519                     if (!hypervisor(dc))
3520                         goto priv_insn;
3521                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3522                     break;
3523                 case 31: // ver
3524                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3525                     break;
3526                 case 15: // fq
3527                 default:
3528                     goto illegal_insn;
3529                 }
3530 #else
3531                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3532 #endif
3533                 gen_store_gpr(dc, rd, cpu_tmp0);
3534                 break;
3535 #endif
3536 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3537             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3538 #ifdef TARGET_SPARC64
3539                 gen_helper_flushw(tcg_env);
3540 #else
3541                 if (!supervisor(dc))
3542                     goto priv_insn;
3543                 gen_store_gpr(dc, rd, cpu_tbr);
3544 #endif
3545                 break;
3546 #endif
3547             } else if (xop == 0x34) {   /* FPU Operations */
3548                 if (gen_trap_ifnofpu(dc)) {
3549                     goto jmp_insn;
3550                 }
3551                 gen_op_clear_ieee_excp_and_FTT();
3552                 rs1 = GET_FIELD(insn, 13, 17);
3553                 rs2 = GET_FIELD(insn, 27, 31);
3554                 xop = GET_FIELD(insn, 18, 26);
3555 
3556                 switch (xop) {
3557                 case 0x1: /* fmovs */
3558                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3559                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3560                     break;
3561                 case 0x5: /* fnegs */
3562                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3563                     break;
3564                 case 0x9: /* fabss */
3565                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3566                     break;
3567                 case 0x29: /* fsqrts */
3568                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3569                     break;
3570                 case 0x2a: /* fsqrtd */
3571                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3572                     break;
3573                 case 0x2b: /* fsqrtq */
3574                     CHECK_FPU_FEATURE(dc, FLOAT128);
3575                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3576                     break;
3577                 case 0x41: /* fadds */
3578                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3579                     break;
3580                 case 0x42: /* faddd */
3581                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3582                     break;
3583                 case 0x43: /* faddq */
3584                     CHECK_FPU_FEATURE(dc, FLOAT128);
3585                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3586                     break;
3587                 case 0x45: /* fsubs */
3588                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3589                     break;
3590                 case 0x46: /* fsubd */
3591                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3592                     break;
3593                 case 0x47: /* fsubq */
3594                     CHECK_FPU_FEATURE(dc, FLOAT128);
3595                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3596                     break;
3597                 case 0x49: /* fmuls */
3598                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3599                     break;
3600                 case 0x4a: /* fmuld */
3601                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3602                     break;
3603                 case 0x4b: /* fmulq */
3604                     CHECK_FPU_FEATURE(dc, FLOAT128);
3605                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3606                     break;
3607                 case 0x4d: /* fdivs */
3608                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3609                     break;
3610                 case 0x4e: /* fdivd */
3611                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3612                     break;
3613                 case 0x4f: /* fdivq */
3614                     CHECK_FPU_FEATURE(dc, FLOAT128);
3615                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3616                     break;
3617                 case 0x69: /* fsmuld */
3618                     CHECK_FPU_FEATURE(dc, FSMULD);
3619                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3620                     break;
3621                 case 0x6e: /* fdmulq */
3622                     CHECK_FPU_FEATURE(dc, FLOAT128);
3623                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3624                     break;
3625                 case 0xc4: /* fitos */
3626                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3627                     break;
3628                 case 0xc6: /* fdtos */
3629                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3630                     break;
3631                 case 0xc7: /* fqtos */
3632                     CHECK_FPU_FEATURE(dc, FLOAT128);
3633                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3634                     break;
3635                 case 0xc8: /* fitod */
3636                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3637                     break;
3638                 case 0xc9: /* fstod */
3639                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3640                     break;
3641                 case 0xcb: /* fqtod */
3642                     CHECK_FPU_FEATURE(dc, FLOAT128);
3643                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3644                     break;
3645                 case 0xcc: /* fitoq */
3646                     CHECK_FPU_FEATURE(dc, FLOAT128);
3647                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3648                     break;
3649                 case 0xcd: /* fstoq */
3650                     CHECK_FPU_FEATURE(dc, FLOAT128);
3651                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3652                     break;
3653                 case 0xce: /* fdtoq */
3654                     CHECK_FPU_FEATURE(dc, FLOAT128);
3655                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3656                     break;
3657                 case 0xd1: /* fstoi */
3658                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3659                     break;
3660                 case 0xd2: /* fdtoi */
3661                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3662                     break;
3663                 case 0xd3: /* fqtoi */
3664                     CHECK_FPU_FEATURE(dc, FLOAT128);
3665                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3666                     break;
3667 #ifdef TARGET_SPARC64
3668                 case 0x2: /* V9 fmovd */
3669                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3670                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3671                     break;
3672                 case 0x3: /* V9 fmovq */
3673                     CHECK_FPU_FEATURE(dc, FLOAT128);
3674                     gen_move_Q(dc, rd, rs2);
3675                     break;
3676                 case 0x6: /* V9 fnegd */
3677                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3678                     break;
3679                 case 0x7: /* V9 fnegq */
3680                     CHECK_FPU_FEATURE(dc, FLOAT128);
3681                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3682                     break;
3683                 case 0xa: /* V9 fabsd */
3684                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3685                     break;
3686                 case 0xb: /* V9 fabsq */
3687                     CHECK_FPU_FEATURE(dc, FLOAT128);
3688                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3689                     break;
3690                 case 0x81: /* V9 fstox */
3691                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3692                     break;
3693                 case 0x82: /* V9 fdtox */
3694                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3695                     break;
3696                 case 0x83: /* V9 fqtox */
3697                     CHECK_FPU_FEATURE(dc, FLOAT128);
3698                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3699                     break;
3700                 case 0x84: /* V9 fxtos */
3701                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3702                     break;
3703                 case 0x88: /* V9 fxtod */
3704                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3705                     break;
3706                 case 0x8c: /* V9 fxtoq */
3707                     CHECK_FPU_FEATURE(dc, FLOAT128);
3708                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3709                     break;
3710 #endif
3711                 default:
3712                     goto illegal_insn;
3713                 }
3714             } else if (xop == 0x35) {   /* FPU Operations */
3715 #ifdef TARGET_SPARC64
3716                 int cond;
3717 #endif
3718                 if (gen_trap_ifnofpu(dc)) {
3719                     goto jmp_insn;
3720                 }
3721                 gen_op_clear_ieee_excp_and_FTT();
3722                 rs1 = GET_FIELD(insn, 13, 17);
3723                 rs2 = GET_FIELD(insn, 27, 31);
3724                 xop = GET_FIELD(insn, 18, 26);
3725 
3726 #ifdef TARGET_SPARC64
3727 #define FMOVR(sz)                                                  \
3728                 do {                                               \
3729                     DisasCompare cmp;                              \
3730                     cond = GET_FIELD_SP(insn, 10, 12);             \
3731                     cpu_src1 = get_src1(dc, insn);                 \
3732                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3733                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3734                 } while (0)
3735 
3736                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3737                     FMOVR(s);
3738                     break;
3739                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3740                     FMOVR(d);
3741                     break;
3742                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3743                     CHECK_FPU_FEATURE(dc, FLOAT128);
3744                     FMOVR(q);
3745                     break;
3746                 }
3747 #undef FMOVR
3748 #endif
3749                 switch (xop) {
3750 #ifdef TARGET_SPARC64
3751 #define FMOVCC(fcc, sz)                                                 \
3752                     do {                                                \
3753                         DisasCompare cmp;                               \
3754                         cond = GET_FIELD_SP(insn, 14, 17);              \
3755                         gen_fcompare(&cmp, fcc, cond);                  \
3756                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3757                     } while (0)
3758 
3759                     case 0x001: /* V9 fmovscc %fcc0 */
3760                         FMOVCC(0, s);
3761                         break;
3762                     case 0x002: /* V9 fmovdcc %fcc0 */
3763                         FMOVCC(0, d);
3764                         break;
3765                     case 0x003: /* V9 fmovqcc %fcc0 */
3766                         CHECK_FPU_FEATURE(dc, FLOAT128);
3767                         FMOVCC(0, q);
3768                         break;
3769                     case 0x041: /* V9 fmovscc %fcc1 */
3770                         FMOVCC(1, s);
3771                         break;
3772                     case 0x042: /* V9 fmovdcc %fcc1 */
3773                         FMOVCC(1, d);
3774                         break;
3775                     case 0x043: /* V9 fmovqcc %fcc1 */
3776                         CHECK_FPU_FEATURE(dc, FLOAT128);
3777                         FMOVCC(1, q);
3778                         break;
3779                     case 0x081: /* V9 fmovscc %fcc2 */
3780                         FMOVCC(2, s);
3781                         break;
3782                     case 0x082: /* V9 fmovdcc %fcc2 */
3783                         FMOVCC(2, d);
3784                         break;
3785                     case 0x083: /* V9 fmovqcc %fcc2 */
3786                         CHECK_FPU_FEATURE(dc, FLOAT128);
3787                         FMOVCC(2, q);
3788                         break;
3789                     case 0x0c1: /* V9 fmovscc %fcc3 */
3790                         FMOVCC(3, s);
3791                         break;
3792                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3793                         FMOVCC(3, d);
3794                         break;
3795                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3796                         CHECK_FPU_FEATURE(dc, FLOAT128);
3797                         FMOVCC(3, q);
3798                         break;
3799 #undef FMOVCC
3800 #define FMOVCC(xcc, sz)                                                 \
3801                     do {                                                \
3802                         DisasCompare cmp;                               \
3803                         cond = GET_FIELD_SP(insn, 14, 17);              \
3804                         gen_compare(&cmp, xcc, cond, dc);               \
3805                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3806                     } while (0)
3807 
3808                     case 0x101: /* V9 fmovscc %icc */
3809                         FMOVCC(0, s);
3810                         break;
3811                     case 0x102: /* V9 fmovdcc %icc */
3812                         FMOVCC(0, d);
3813                         break;
3814                     case 0x103: /* V9 fmovqcc %icc */
3815                         CHECK_FPU_FEATURE(dc, FLOAT128);
3816                         FMOVCC(0, q);
3817                         break;
3818                     case 0x181: /* V9 fmovscc %xcc */
3819                         FMOVCC(1, s);
3820                         break;
3821                     case 0x182: /* V9 fmovdcc %xcc */
3822                         FMOVCC(1, d);
3823                         break;
3824                     case 0x183: /* V9 fmovqcc %xcc */
3825                         CHECK_FPU_FEATURE(dc, FLOAT128);
3826                         FMOVCC(1, q);
3827                         break;
3828 #undef FMOVCC
3829 #endif
3830                     case 0x51: /* fcmps, V9 %fcc */
3831                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3832                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3833                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3834                         break;
3835                     case 0x52: /* fcmpd, V9 %fcc */
3836                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3837                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3838                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3839                         break;
3840                     case 0x53: /* fcmpq, V9 %fcc */
3841                         CHECK_FPU_FEATURE(dc, FLOAT128);
3842                         gen_op_load_fpr_QT0(QFPREG(rs1));
3843                         gen_op_load_fpr_QT1(QFPREG(rs2));
3844                         gen_op_fcmpq(rd & 3);
3845                         break;
3846                     case 0x55: /* fcmpes, V9 %fcc */
3847                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3848                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3849                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3850                         break;
3851                     case 0x56: /* fcmped, V9 %fcc */
3852                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3853                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3854                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3855                         break;
3856                     case 0x57: /* fcmpeq, V9 %fcc */
3857                         CHECK_FPU_FEATURE(dc, FLOAT128);
3858                         gen_op_load_fpr_QT0(QFPREG(rs1));
3859                         gen_op_load_fpr_QT1(QFPREG(rs2));
3860                         gen_op_fcmpeq(rd & 3);
3861                         break;
3862                     default:
3863                         goto illegal_insn;
3864                 }
3865             } else if (xop == 0x2) {
3866                 TCGv dst = gen_dest_gpr(dc, rd);
3867                 rs1 = GET_FIELD(insn, 13, 17);
3868                 if (rs1 == 0) {
3869                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3870                     if (IS_IMM) {       /* immediate */
3871                         simm = GET_FIELDs(insn, 19, 31);
3872                         tcg_gen_movi_tl(dst, simm);
3873                         gen_store_gpr(dc, rd, dst);
3874                     } else {            /* register */
3875                         rs2 = GET_FIELD(insn, 27, 31);
3876                         if (rs2 == 0) {
3877                             tcg_gen_movi_tl(dst, 0);
3878                             gen_store_gpr(dc, rd, dst);
3879                         } else {
3880                             cpu_src2 = gen_load_gpr(dc, rs2);
3881                             gen_store_gpr(dc, rd, cpu_src2);
3882                         }
3883                     }
3884                 } else {
3885                     cpu_src1 = get_src1(dc, insn);
3886                     if (IS_IMM) {       /* immediate */
3887                         simm = GET_FIELDs(insn, 19, 31);
3888                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3889                         gen_store_gpr(dc, rd, dst);
3890                     } else {            /* register */
3891                         rs2 = GET_FIELD(insn, 27, 31);
3892                         if (rs2 == 0) {
3893                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3894                             gen_store_gpr(dc, rd, cpu_src1);
3895                         } else {
3896                             cpu_src2 = gen_load_gpr(dc, rs2);
3897                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3898                             gen_store_gpr(dc, rd, dst);
3899                         }
3900                     }
3901                 }
3902 #ifdef TARGET_SPARC64
3903             } else if (xop == 0x25) { /* sll, V9 sllx */
3904                 cpu_src1 = get_src1(dc, insn);
3905                 if (IS_IMM) {   /* immediate */
3906                     simm = GET_FIELDs(insn, 20, 31);
3907                     if (insn & (1 << 12)) {
3908                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3909                     } else {
3910                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3911                     }
3912                 } else {                /* register */
3913                     rs2 = GET_FIELD(insn, 27, 31);
3914                     cpu_src2 = gen_load_gpr(dc, rs2);
3915                     cpu_tmp0 = tcg_temp_new();
3916                     if (insn & (1 << 12)) {
3917                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3918                     } else {
3919                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3920                     }
3921                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3922                 }
3923                 gen_store_gpr(dc, rd, cpu_dst);
3924             } else if (xop == 0x26) { /* srl, V9 srlx */
3925                 cpu_src1 = get_src1(dc, insn);
3926                 if (IS_IMM) {   /* immediate */
3927                     simm = GET_FIELDs(insn, 20, 31);
3928                     if (insn & (1 << 12)) {
3929                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3930                     } else {
3931                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3932                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3933                     }
3934                 } else {                /* register */
3935                     rs2 = GET_FIELD(insn, 27, 31);
3936                     cpu_src2 = gen_load_gpr(dc, rs2);
3937                     cpu_tmp0 = tcg_temp_new();
3938                     if (insn & (1 << 12)) {
3939                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3940                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3941                     } else {
3942                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3943                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3944                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3945                     }
3946                 }
3947                 gen_store_gpr(dc, rd, cpu_dst);
3948             } else if (xop == 0x27) { /* sra, V9 srax */
3949                 cpu_src1 = get_src1(dc, insn);
3950                 if (IS_IMM) {   /* immediate */
3951                     simm = GET_FIELDs(insn, 20, 31);
3952                     if (insn & (1 << 12)) {
3953                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3954                     } else {
3955                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3956                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3957                     }
3958                 } else {                /* register */
3959                     rs2 = GET_FIELD(insn, 27, 31);
3960                     cpu_src2 = gen_load_gpr(dc, rs2);
3961                     cpu_tmp0 = tcg_temp_new();
3962                     if (insn & (1 << 12)) {
3963                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3964                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3965                     } else {
3966                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3967                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3968                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3969                     }
3970                 }
3971                 gen_store_gpr(dc, rd, cpu_dst);
3972 #endif
3973             } else if (xop < 0x36) {
3974                 if (xop < 0x20) {
3975                     cpu_src1 = get_src1(dc, insn);
3976                     cpu_src2 = get_src2(dc, insn);
3977                     switch (xop & ~0x10) {
3978                     case 0x0: /* add */
3979                         if (xop & 0x10) {
3980                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3981                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3982                             dc->cc_op = CC_OP_ADD;
3983                         } else {
3984                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3985                         }
3986                         break;
3987                     case 0x1: /* and */
3988                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3989                         if (xop & 0x10) {
3990                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3991                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3992                             dc->cc_op = CC_OP_LOGIC;
3993                         }
3994                         break;
3995                     case 0x2: /* or */
3996                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3997                         if (xop & 0x10) {
3998                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3999                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4000                             dc->cc_op = CC_OP_LOGIC;
4001                         }
4002                         break;
4003                     case 0x3: /* xor */
4004                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
4005                         if (xop & 0x10) {
4006                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4007                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4008                             dc->cc_op = CC_OP_LOGIC;
4009                         }
4010                         break;
4011                     case 0x4: /* sub */
4012                         if (xop & 0x10) {
4013                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4014                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
4015                             dc->cc_op = CC_OP_SUB;
4016                         } else {
4017                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
4018                         }
4019                         break;
4020                     case 0x5: /* andn */
4021                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4022                         if (xop & 0x10) {
4023                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4024                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4025                             dc->cc_op = CC_OP_LOGIC;
4026                         }
4027                         break;
4028                     case 0x6: /* orn */
4029                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4030                         if (xop & 0x10) {
4031                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4032                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4033                             dc->cc_op = CC_OP_LOGIC;
4034                         }
4035                         break;
4036                     case 0x7: /* xorn */
4037                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4038                         if (xop & 0x10) {
4039                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4040                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4041                             dc->cc_op = CC_OP_LOGIC;
4042                         }
4043                         break;
4044                     case 0x8: /* addx, V9 addc */
4045                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4046                                         (xop & 0x10));
4047                         break;
4048 #ifdef TARGET_SPARC64
4049                     case 0x9: /* V9 mulx */
4050                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4051                         break;
4052 #endif
4053                     case 0xa: /* umul */
4054                         CHECK_IU_FEATURE(dc, MUL);
4055                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4056                         if (xop & 0x10) {
4057                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4058                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4059                             dc->cc_op = CC_OP_LOGIC;
4060                         }
4061                         break;
4062                     case 0xb: /* smul */
4063                         CHECK_IU_FEATURE(dc, MUL);
4064                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4065                         if (xop & 0x10) {
4066                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4067                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4068                             dc->cc_op = CC_OP_LOGIC;
4069                         }
4070                         break;
4071                     case 0xc: /* subx, V9 subc */
4072                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4073                                         (xop & 0x10));
4074                         break;
4075 #ifdef TARGET_SPARC64
4076                     case 0xd: /* V9 udivx */
4077                         gen_helper_udivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4078                         break;
4079 #endif
4080                     case 0xe: /* udiv */
4081                         CHECK_IU_FEATURE(dc, DIV);
4082                         if (xop & 0x10) {
4083                             gen_helper_udiv_cc(cpu_dst, tcg_env, cpu_src1,
4084                                                cpu_src2);
4085                             dc->cc_op = CC_OP_DIV;
4086                         } else {
4087                             gen_helper_udiv(cpu_dst, tcg_env, cpu_src1,
4088                                             cpu_src2);
4089                         }
4090                         break;
4091                     case 0xf: /* sdiv */
4092                         CHECK_IU_FEATURE(dc, DIV);
4093                         if (xop & 0x10) {
4094                             gen_helper_sdiv_cc(cpu_dst, tcg_env, cpu_src1,
4095                                                cpu_src2);
4096                             dc->cc_op = CC_OP_DIV;
4097                         } else {
4098                             gen_helper_sdiv(cpu_dst, tcg_env, cpu_src1,
4099                                             cpu_src2);
4100                         }
4101                         break;
4102                     default:
4103                         goto illegal_insn;
4104                     }
4105                     gen_store_gpr(dc, rd, cpu_dst);
4106                 } else {
4107                     cpu_src1 = get_src1(dc, insn);
4108                     cpu_src2 = get_src2(dc, insn);
4109                     switch (xop) {
4110                     case 0x20: /* taddcc */
4111                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4112                         gen_store_gpr(dc, rd, cpu_dst);
4113                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4114                         dc->cc_op = CC_OP_TADD;
4115                         break;
4116                     case 0x21: /* tsubcc */
4117                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4118                         gen_store_gpr(dc, rd, cpu_dst);
4119                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4120                         dc->cc_op = CC_OP_TSUB;
4121                         break;
4122                     case 0x22: /* taddcctv */
4123                         gen_helper_taddcctv(cpu_dst, tcg_env,
4124                                             cpu_src1, cpu_src2);
4125                         gen_store_gpr(dc, rd, cpu_dst);
4126                         dc->cc_op = CC_OP_TADDTV;
4127                         break;
4128                     case 0x23: /* tsubcctv */
4129                         gen_helper_tsubcctv(cpu_dst, tcg_env,
4130                                             cpu_src1, cpu_src2);
4131                         gen_store_gpr(dc, rd, cpu_dst);
4132                         dc->cc_op = CC_OP_TSUBTV;
4133                         break;
4134                     case 0x24: /* mulscc */
4135                         update_psr(dc);
4136                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4137                         gen_store_gpr(dc, rd, cpu_dst);
4138                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4139                         dc->cc_op = CC_OP_ADD;
4140                         break;
4141 #ifndef TARGET_SPARC64
4142                     case 0x25:  /* sll */
4143                         if (IS_IMM) { /* immediate */
4144                             simm = GET_FIELDs(insn, 20, 31);
4145                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4146                         } else { /* register */
4147                             cpu_tmp0 = tcg_temp_new();
4148                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4149                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4150                         }
4151                         gen_store_gpr(dc, rd, cpu_dst);
4152                         break;
4153                     case 0x26:  /* srl */
4154                         if (IS_IMM) { /* immediate */
4155                             simm = GET_FIELDs(insn, 20, 31);
4156                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4157                         } else { /* register */
4158                             cpu_tmp0 = tcg_temp_new();
4159                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4160                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4161                         }
4162                         gen_store_gpr(dc, rd, cpu_dst);
4163                         break;
4164                     case 0x27:  /* sra */
4165                         if (IS_IMM) { /* immediate */
4166                             simm = GET_FIELDs(insn, 20, 31);
4167                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4168                         } else { /* register */
4169                             cpu_tmp0 = tcg_temp_new();
4170                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4171                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4172                         }
4173                         gen_store_gpr(dc, rd, cpu_dst);
4174                         break;
4175 #endif
4176                     case 0x30:
4177                         {
4178                             cpu_tmp0 = tcg_temp_new();
4179                             switch(rd) {
4180                             case 0: /* wry */
4181                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4182                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4183                                 break;
4184 #ifndef TARGET_SPARC64
4185                             case 0x01 ... 0x0f: /* undefined in the
4186                                                    SPARCv8 manual, nop
4187                                                    on the microSPARC
4188                                                    II */
4189                             case 0x10 ... 0x1f: /* implementation-dependent
4190                                                    in the SPARCv8
4191                                                    manual, nop on the
4192                                                    microSPARC II */
4193                                 if ((rd == 0x13) && (dc->def->features &
4194                                                      CPU_FEATURE_POWERDOWN)) {
4195                                     /* LEON3 power-down */
4196                                     save_state(dc);
4197                                     gen_helper_power_down(tcg_env);
4198                                 }
4199                                 break;
4200 #else
4201                             case 0x2: /* V9 wrccr */
4202                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4203                                 gen_helper_wrccr(tcg_env, cpu_tmp0);
4204                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4205                                 dc->cc_op = CC_OP_FLAGS;
4206                                 break;
4207                             case 0x3: /* V9 wrasi */
4208                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4209                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4210                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4211                                                 offsetof(CPUSPARCState, asi));
4212                                 /*
4213                                  * End TB to notice changed ASI.
4214                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4215                                  * update DisasContext and not exit the TB.
4216                                  */
4217                                 save_state(dc);
4218                                 gen_op_next_insn();
4219                                 tcg_gen_lookup_and_goto_ptr();
4220                                 dc->base.is_jmp = DISAS_NORETURN;
4221                                 break;
4222                             case 0x6: /* V9 wrfprs */
4223                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4224                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4225                                 dc->fprs_dirty = 0;
4226                                 save_state(dc);
4227                                 gen_op_next_insn();
4228                                 tcg_gen_exit_tb(NULL, 0);
4229                                 dc->base.is_jmp = DISAS_NORETURN;
4230                                 break;
4231                             case 0xf: /* V9 sir, nop if user */
4232 #if !defined(CONFIG_USER_ONLY)
4233                                 if (supervisor(dc)) {
4234                                     ; // XXX
4235                                 }
4236 #endif
4237                                 break;
4238                             case 0x13: /* Graphics Status */
4239                                 if (gen_trap_ifnofpu(dc)) {
4240                                     goto jmp_insn;
4241                                 }
4242                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4243                                 break;
4244                             case 0x14: /* Softint set */
4245                                 if (!supervisor(dc))
4246                                     goto illegal_insn;
4247                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4248                                 gen_helper_set_softint(tcg_env, cpu_tmp0);
4249                                 break;
4250                             case 0x15: /* Softint clear */
4251                                 if (!supervisor(dc))
4252                                     goto illegal_insn;
4253                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4254                                 gen_helper_clear_softint(tcg_env, cpu_tmp0);
4255                                 break;
4256                             case 0x16: /* Softint write */
4257                                 if (!supervisor(dc))
4258                                     goto illegal_insn;
4259                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4260                                 gen_helper_write_softint(tcg_env, cpu_tmp0);
4261                                 break;
4262                             case 0x17: /* Tick compare */
4263 #if !defined(CONFIG_USER_ONLY)
4264                                 if (!supervisor(dc))
4265                                     goto illegal_insn;
4266 #endif
4267                                 {
4268                                     TCGv_ptr r_tickptr;
4269 
4270                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4271                                                    cpu_src2);
4272                                     r_tickptr = tcg_temp_new_ptr();
4273                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4274                                                    offsetof(CPUSPARCState, tick));
4275                                     translator_io_start(&dc->base);
4276                                     gen_helper_tick_set_limit(r_tickptr,
4277                                                               cpu_tick_cmpr);
4278                                     /* End TB to handle timer interrupt */
4279                                     dc->base.is_jmp = DISAS_EXIT;
4280                                 }
4281                                 break;
4282                             case 0x18: /* System tick */
4283 #if !defined(CONFIG_USER_ONLY)
4284                                 if (!supervisor(dc))
4285                                     goto illegal_insn;
4286 #endif
4287                                 {
4288                                     TCGv_ptr r_tickptr;
4289 
4290                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4291                                                    cpu_src2);
4292                                     r_tickptr = tcg_temp_new_ptr();
4293                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4294                                                    offsetof(CPUSPARCState, stick));
4295                                     translator_io_start(&dc->base);
4296                                     gen_helper_tick_set_count(r_tickptr,
4297                                                               cpu_tmp0);
4298                                     /* End TB to handle timer interrupt */
4299                                     dc->base.is_jmp = DISAS_EXIT;
4300                                 }
4301                                 break;
4302                             case 0x19: /* System tick compare */
4303 #if !defined(CONFIG_USER_ONLY)
4304                                 if (!supervisor(dc))
4305                                     goto illegal_insn;
4306 #endif
4307                                 {
4308                                     TCGv_ptr r_tickptr;
4309 
4310                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4311                                                    cpu_src2);
4312                                     r_tickptr = tcg_temp_new_ptr();
4313                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4314                                                    offsetof(CPUSPARCState, stick));
4315                                     translator_io_start(&dc->base);
4316                                     gen_helper_tick_set_limit(r_tickptr,
4317                                                               cpu_stick_cmpr);
4318                                     /* End TB to handle timer interrupt */
4319                                     dc->base.is_jmp = DISAS_EXIT;
4320                                 }
4321                                 break;
4322 
4323                             case 0x10: /* Performance Control */
4324                             case 0x11: /* Performance Instrumentation
4325                                           Counter */
4326                             case 0x12: /* Dispatch Control */
4327 #endif
4328                             default:
4329                                 goto illegal_insn;
4330                             }
4331                         }
4332                         break;
4333 #if !defined(CONFIG_USER_ONLY)
4334                     case 0x31: /* wrpsr, V9 saved, restored */
4335                         {
4336                             if (!supervisor(dc))
4337                                 goto priv_insn;
4338 #ifdef TARGET_SPARC64
4339                             switch (rd) {
4340                             case 0:
4341                                 gen_helper_saved(tcg_env);
4342                                 break;
4343                             case 1:
4344                                 gen_helper_restored(tcg_env);
4345                                 break;
4346                             case 2: /* UA2005 allclean */
4347                             case 3: /* UA2005 otherw */
4348                             case 4: /* UA2005 normalw */
4349                             case 5: /* UA2005 invalw */
4350                                 // XXX
4351                             default:
4352                                 goto illegal_insn;
4353                             }
4354 #else
4355                             cpu_tmp0 = tcg_temp_new();
4356                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4357                             gen_helper_wrpsr(tcg_env, cpu_tmp0);
4358                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4359                             dc->cc_op = CC_OP_FLAGS;
4360                             save_state(dc);
4361                             gen_op_next_insn();
4362                             tcg_gen_exit_tb(NULL, 0);
4363                             dc->base.is_jmp = DISAS_NORETURN;
4364 #endif
4365                         }
4366                         break;
4367                     case 0x32: /* wrwim, V9 wrpr */
4368                         {
4369                             if (!supervisor(dc))
4370                                 goto priv_insn;
4371                             cpu_tmp0 = tcg_temp_new();
4372                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4373 #ifdef TARGET_SPARC64
4374                             switch (rd) {
4375                             case 0: // tpc
4376                                 {
4377                                     TCGv_ptr r_tsptr;
4378 
4379                                     r_tsptr = tcg_temp_new_ptr();
4380                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4381                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4382                                                   offsetof(trap_state, tpc));
4383                                 }
4384                                 break;
4385                             case 1: // tnpc
4386                                 {
4387                                     TCGv_ptr r_tsptr;
4388 
4389                                     r_tsptr = tcg_temp_new_ptr();
4390                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4391                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4392                                                   offsetof(trap_state, tnpc));
4393                                 }
4394                                 break;
4395                             case 2: // tstate
4396                                 {
4397                                     TCGv_ptr r_tsptr;
4398 
4399                                     r_tsptr = tcg_temp_new_ptr();
4400                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4401                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4402                                                   offsetof(trap_state,
4403                                                            tstate));
4404                                 }
4405                                 break;
4406                             case 3: // tt
4407                                 {
4408                                     TCGv_ptr r_tsptr;
4409 
4410                                     r_tsptr = tcg_temp_new_ptr();
4411                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4412                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4413                                                     offsetof(trap_state, tt));
4414                                 }
4415                                 break;
4416                             case 4: // tick
4417                                 {
4418                                     TCGv_ptr r_tickptr;
4419 
4420                                     r_tickptr = tcg_temp_new_ptr();
4421                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4422                                                    offsetof(CPUSPARCState, tick));
4423                                     translator_io_start(&dc->base);
4424                                     gen_helper_tick_set_count(r_tickptr,
4425                                                               cpu_tmp0);
4426                                     /* End TB to handle timer interrupt */
4427                                     dc->base.is_jmp = DISAS_EXIT;
4428                                 }
4429                                 break;
4430                             case 5: // tba
4431                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4432                                 break;
4433                             case 6: // pstate
4434                                 save_state(dc);
4435                                 if (translator_io_start(&dc->base)) {
4436                                     dc->base.is_jmp = DISAS_EXIT;
4437                                 }
4438                                 gen_helper_wrpstate(tcg_env, cpu_tmp0);
4439                                 dc->npc = DYNAMIC_PC;
4440                                 break;
4441                             case 7: // tl
4442                                 save_state(dc);
4443                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4444                                                offsetof(CPUSPARCState, tl));
4445                                 dc->npc = DYNAMIC_PC;
4446                                 break;
4447                             case 8: // pil
4448                                 if (translator_io_start(&dc->base)) {
4449                                     dc->base.is_jmp = DISAS_EXIT;
4450                                 }
4451                                 gen_helper_wrpil(tcg_env, cpu_tmp0);
4452                                 break;
4453                             case 9: // cwp
4454                                 gen_helper_wrcwp(tcg_env, cpu_tmp0);
4455                                 break;
4456                             case 10: // cansave
4457                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4458                                                 offsetof(CPUSPARCState,
4459                                                          cansave));
4460                                 break;
4461                             case 11: // canrestore
4462                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4463                                                 offsetof(CPUSPARCState,
4464                                                          canrestore));
4465                                 break;
4466                             case 12: // cleanwin
4467                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4468                                                 offsetof(CPUSPARCState,
4469                                                          cleanwin));
4470                                 break;
4471                             case 13: // otherwin
4472                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4473                                                 offsetof(CPUSPARCState,
4474                                                          otherwin));
4475                                 break;
4476                             case 14: // wstate
4477                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4478                                                 offsetof(CPUSPARCState,
4479                                                          wstate));
4480                                 break;
4481                             case 16: // UA2005 gl
4482                                 CHECK_IU_FEATURE(dc, GL);
4483                                 gen_helper_wrgl(tcg_env, cpu_tmp0);
4484                                 break;
4485                             case 26: // UA2005 strand status
4486                                 CHECK_IU_FEATURE(dc, HYPV);
4487                                 if (!hypervisor(dc))
4488                                     goto priv_insn;
4489                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4490                                 break;
4491                             default:
4492                                 goto illegal_insn;
4493                             }
4494 #else
4495                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4496                             if (dc->def->nwindows != 32) {
4497                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4498                                                 (1 << dc->def->nwindows) - 1);
4499                             }
4500 #endif
4501                         }
4502                         break;
4503                     case 0x33: /* wrtbr, UA2005 wrhpr */
4504                         {
4505 #ifndef TARGET_SPARC64
4506                             if (!supervisor(dc))
4507                                 goto priv_insn;
4508                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4509 #else
4510                             CHECK_IU_FEATURE(dc, HYPV);
4511                             if (!hypervisor(dc))
4512                                 goto priv_insn;
4513                             cpu_tmp0 = tcg_temp_new();
4514                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4515                             switch (rd) {
4516                             case 0: // hpstate
4517                                 tcg_gen_st_i64(cpu_tmp0, tcg_env,
4518                                                offsetof(CPUSPARCState,
4519                                                         hpstate));
4520                                 save_state(dc);
4521                                 gen_op_next_insn();
4522                                 tcg_gen_exit_tb(NULL, 0);
4523                                 dc->base.is_jmp = DISAS_NORETURN;
4524                                 break;
4525                             case 1: // htstate
4526                                 // XXX gen_op_wrhtstate();
4527                                 break;
4528                             case 3: // hintp
4529                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4530                                 break;
4531                             case 5: // htba
4532                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4533                                 break;
4534                             case 31: // hstick_cmpr
4535                                 {
4536                                     TCGv_ptr r_tickptr;
4537 
4538                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4539                                     r_tickptr = tcg_temp_new_ptr();
4540                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4541                                                    offsetof(CPUSPARCState, hstick));
4542                                     translator_io_start(&dc->base);
4543                                     gen_helper_tick_set_limit(r_tickptr,
4544                                                               cpu_hstick_cmpr);
4545                                     /* End TB to handle timer interrupt */
4546                                     dc->base.is_jmp = DISAS_EXIT;
4547                                 }
4548                                 break;
4549                             case 6: // hver readonly
4550                             default:
4551                                 goto illegal_insn;
4552                             }
4553 #endif
4554                         }
4555                         break;
4556 #endif
4557 #ifdef TARGET_SPARC64
4558                     case 0x2c: /* V9 movcc */
4559                         {
4560                             int cc = GET_FIELD_SP(insn, 11, 12);
4561                             int cond = GET_FIELD_SP(insn, 14, 17);
4562                             DisasCompare cmp;
4563                             TCGv dst;
4564 
4565                             if (insn & (1 << 18)) {
4566                                 if (cc == 0) {
4567                                     gen_compare(&cmp, 0, cond, dc);
4568                                 } else if (cc == 2) {
4569                                     gen_compare(&cmp, 1, cond, dc);
4570                                 } else {
4571                                     goto illegal_insn;
4572                                 }
4573                             } else {
4574                                 gen_fcompare(&cmp, cc, cond);
4575                             }
4576 
4577                             /* The get_src2 above loaded the normal 13-bit
4578                                immediate field, not the 11-bit field we have
4579                                in movcc.  But it did handle the reg case.  */
4580                             if (IS_IMM) {
4581                                 simm = GET_FIELD_SPs(insn, 0, 10);
4582                                 tcg_gen_movi_tl(cpu_src2, simm);
4583                             }
4584 
4585                             dst = gen_load_gpr(dc, rd);
4586                             tcg_gen_movcond_tl(cmp.cond, dst,
4587                                                cmp.c1, cmp.c2,
4588                                                cpu_src2, dst);
4589                             gen_store_gpr(dc, rd, dst);
4590                             break;
4591                         }
4592                     case 0x2d: /* V9 sdivx */
4593                         gen_helper_sdivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4594                         gen_store_gpr(dc, rd, cpu_dst);
4595                         break;
4596                     case 0x2e: /* V9 popc */
4597                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4598                         gen_store_gpr(dc, rd, cpu_dst);
4599                         break;
4600                     case 0x2f: /* V9 movr */
4601                         {
4602                             int cond = GET_FIELD_SP(insn, 10, 12);
4603                             DisasCompare cmp;
4604                             TCGv dst;
4605 
4606                             gen_compare_reg(&cmp, cond, cpu_src1);
4607 
4608                             /* The get_src2 above loaded the normal 13-bit
4609                                immediate field, not the 10-bit field we have
4610                                in movr.  But it did handle the reg case.  */
4611                             if (IS_IMM) {
4612                                 simm = GET_FIELD_SPs(insn, 0, 9);
4613                                 tcg_gen_movi_tl(cpu_src2, simm);
4614                             }
4615 
4616                             dst = gen_load_gpr(dc, rd);
4617                             tcg_gen_movcond_tl(cmp.cond, dst,
4618                                                cmp.c1, cmp.c2,
4619                                                cpu_src2, dst);
4620                             gen_store_gpr(dc, rd, dst);
4621                             break;
4622                         }
4623 #endif
4624                     default:
4625                         goto illegal_insn;
4626                     }
4627                 }
4628             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4629 #ifdef TARGET_SPARC64
4630                 int opf = GET_FIELD_SP(insn, 5, 13);
4631                 rs1 = GET_FIELD(insn, 13, 17);
4632                 rs2 = GET_FIELD(insn, 27, 31);
4633                 if (gen_trap_ifnofpu(dc)) {
4634                     goto jmp_insn;
4635                 }
4636 
4637                 switch (opf) {
4638                 case 0x000: /* VIS I edge8cc */
4639                     CHECK_FPU_FEATURE(dc, VIS1);
4640                     cpu_src1 = gen_load_gpr(dc, rs1);
4641                     cpu_src2 = gen_load_gpr(dc, rs2);
4642                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4643                     gen_store_gpr(dc, rd, cpu_dst);
4644                     break;
4645                 case 0x001: /* VIS II edge8n */
4646                     CHECK_FPU_FEATURE(dc, VIS2);
4647                     cpu_src1 = gen_load_gpr(dc, rs1);
4648                     cpu_src2 = gen_load_gpr(dc, rs2);
4649                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4650                     gen_store_gpr(dc, rd, cpu_dst);
4651                     break;
4652                 case 0x002: /* VIS I edge8lcc */
4653                     CHECK_FPU_FEATURE(dc, VIS1);
4654                     cpu_src1 = gen_load_gpr(dc, rs1);
4655                     cpu_src2 = gen_load_gpr(dc, rs2);
4656                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4657                     gen_store_gpr(dc, rd, cpu_dst);
4658                     break;
4659                 case 0x003: /* VIS II edge8ln */
4660                     CHECK_FPU_FEATURE(dc, VIS2);
4661                     cpu_src1 = gen_load_gpr(dc, rs1);
4662                     cpu_src2 = gen_load_gpr(dc, rs2);
4663                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4664                     gen_store_gpr(dc, rd, cpu_dst);
4665                     break;
4666                 case 0x004: /* VIS I edge16cc */
4667                     CHECK_FPU_FEATURE(dc, VIS1);
4668                     cpu_src1 = gen_load_gpr(dc, rs1);
4669                     cpu_src2 = gen_load_gpr(dc, rs2);
4670                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4671                     gen_store_gpr(dc, rd, cpu_dst);
4672                     break;
4673                 case 0x005: /* VIS II edge16n */
4674                     CHECK_FPU_FEATURE(dc, VIS2);
4675                     cpu_src1 = gen_load_gpr(dc, rs1);
4676                     cpu_src2 = gen_load_gpr(dc, rs2);
4677                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4678                     gen_store_gpr(dc, rd, cpu_dst);
4679                     break;
4680                 case 0x006: /* VIS I edge16lcc */
4681                     CHECK_FPU_FEATURE(dc, VIS1);
4682                     cpu_src1 = gen_load_gpr(dc, rs1);
4683                     cpu_src2 = gen_load_gpr(dc, rs2);
4684                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4685                     gen_store_gpr(dc, rd, cpu_dst);
4686                     break;
4687                 case 0x007: /* VIS II edge16ln */
4688                     CHECK_FPU_FEATURE(dc, VIS2);
4689                     cpu_src1 = gen_load_gpr(dc, rs1);
4690                     cpu_src2 = gen_load_gpr(dc, rs2);
4691                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4692                     gen_store_gpr(dc, rd, cpu_dst);
4693                     break;
4694                 case 0x008: /* VIS I edge32cc */
4695                     CHECK_FPU_FEATURE(dc, VIS1);
4696                     cpu_src1 = gen_load_gpr(dc, rs1);
4697                     cpu_src2 = gen_load_gpr(dc, rs2);
4698                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4699                     gen_store_gpr(dc, rd, cpu_dst);
4700                     break;
4701                 case 0x009: /* VIS II edge32n */
4702                     CHECK_FPU_FEATURE(dc, VIS2);
4703                     cpu_src1 = gen_load_gpr(dc, rs1);
4704                     cpu_src2 = gen_load_gpr(dc, rs2);
4705                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4706                     gen_store_gpr(dc, rd, cpu_dst);
4707                     break;
4708                 case 0x00a: /* VIS I edge32lcc */
4709                     CHECK_FPU_FEATURE(dc, VIS1);
4710                     cpu_src1 = gen_load_gpr(dc, rs1);
4711                     cpu_src2 = gen_load_gpr(dc, rs2);
4712                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4713                     gen_store_gpr(dc, rd, cpu_dst);
4714                     break;
4715                 case 0x00b: /* VIS II edge32ln */
4716                     CHECK_FPU_FEATURE(dc, VIS2);
4717                     cpu_src1 = gen_load_gpr(dc, rs1);
4718                     cpu_src2 = gen_load_gpr(dc, rs2);
4719                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4720                     gen_store_gpr(dc, rd, cpu_dst);
4721                     break;
4722                 case 0x010: /* VIS I array8 */
4723                     CHECK_FPU_FEATURE(dc, VIS1);
4724                     cpu_src1 = gen_load_gpr(dc, rs1);
4725                     cpu_src2 = gen_load_gpr(dc, rs2);
4726                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4727                     gen_store_gpr(dc, rd, cpu_dst);
4728                     break;
4729                 case 0x012: /* VIS I array16 */
4730                     CHECK_FPU_FEATURE(dc, VIS1);
4731                     cpu_src1 = gen_load_gpr(dc, rs1);
4732                     cpu_src2 = gen_load_gpr(dc, rs2);
4733                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4734                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4735                     gen_store_gpr(dc, rd, cpu_dst);
4736                     break;
4737                 case 0x014: /* VIS I array32 */
4738                     CHECK_FPU_FEATURE(dc, VIS1);
4739                     cpu_src1 = gen_load_gpr(dc, rs1);
4740                     cpu_src2 = gen_load_gpr(dc, rs2);
4741                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4742                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4743                     gen_store_gpr(dc, rd, cpu_dst);
4744                     break;
4745                 case 0x018: /* VIS I alignaddr */
4746                     CHECK_FPU_FEATURE(dc, VIS1);
4747                     cpu_src1 = gen_load_gpr(dc, rs1);
4748                     cpu_src2 = gen_load_gpr(dc, rs2);
4749                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4750                     gen_store_gpr(dc, rd, cpu_dst);
4751                     break;
4752                 case 0x01a: /* VIS I alignaddrl */
4753                     CHECK_FPU_FEATURE(dc, VIS1);
4754                     cpu_src1 = gen_load_gpr(dc, rs1);
4755                     cpu_src2 = gen_load_gpr(dc, rs2);
4756                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4757                     gen_store_gpr(dc, rd, cpu_dst);
4758                     break;
4759                 case 0x019: /* VIS II bmask */
4760                     CHECK_FPU_FEATURE(dc, VIS2);
4761                     cpu_src1 = gen_load_gpr(dc, rs1);
4762                     cpu_src2 = gen_load_gpr(dc, rs2);
4763                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4764                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4765                     gen_store_gpr(dc, rd, cpu_dst);
4766                     break;
4767                 case 0x020: /* VIS I fcmple16 */
4768                     CHECK_FPU_FEATURE(dc, VIS1);
4769                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4770                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4771                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4772                     gen_store_gpr(dc, rd, cpu_dst);
4773                     break;
4774                 case 0x022: /* VIS I fcmpne16 */
4775                     CHECK_FPU_FEATURE(dc, VIS1);
4776                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4777                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4778                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4779                     gen_store_gpr(dc, rd, cpu_dst);
4780                     break;
4781                 case 0x024: /* VIS I fcmple32 */
4782                     CHECK_FPU_FEATURE(dc, VIS1);
4783                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4784                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4785                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4786                     gen_store_gpr(dc, rd, cpu_dst);
4787                     break;
4788                 case 0x026: /* VIS I fcmpne32 */
4789                     CHECK_FPU_FEATURE(dc, VIS1);
4790                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4791                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4792                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4793                     gen_store_gpr(dc, rd, cpu_dst);
4794                     break;
4795                 case 0x028: /* VIS I fcmpgt16 */
4796                     CHECK_FPU_FEATURE(dc, VIS1);
4797                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4798                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4799                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4800                     gen_store_gpr(dc, rd, cpu_dst);
4801                     break;
4802                 case 0x02a: /* VIS I fcmpeq16 */
4803                     CHECK_FPU_FEATURE(dc, VIS1);
4804                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4805                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4806                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4807                     gen_store_gpr(dc, rd, cpu_dst);
4808                     break;
4809                 case 0x02c: /* VIS I fcmpgt32 */
4810                     CHECK_FPU_FEATURE(dc, VIS1);
4811                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4812                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4813                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4814                     gen_store_gpr(dc, rd, cpu_dst);
4815                     break;
4816                 case 0x02e: /* VIS I fcmpeq32 */
4817                     CHECK_FPU_FEATURE(dc, VIS1);
4818                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4819                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4820                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4821                     gen_store_gpr(dc, rd, cpu_dst);
4822                     break;
4823                 case 0x031: /* VIS I fmul8x16 */
4824                     CHECK_FPU_FEATURE(dc, VIS1);
4825                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4826                     break;
4827                 case 0x033: /* VIS I fmul8x16au */
4828                     CHECK_FPU_FEATURE(dc, VIS1);
4829                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4830                     break;
4831                 case 0x035: /* VIS I fmul8x16al */
4832                     CHECK_FPU_FEATURE(dc, VIS1);
4833                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4834                     break;
4835                 case 0x036: /* VIS I fmul8sux16 */
4836                     CHECK_FPU_FEATURE(dc, VIS1);
4837                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4838                     break;
4839                 case 0x037: /* VIS I fmul8ulx16 */
4840                     CHECK_FPU_FEATURE(dc, VIS1);
4841                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4842                     break;
4843                 case 0x038: /* VIS I fmuld8sux16 */
4844                     CHECK_FPU_FEATURE(dc, VIS1);
4845                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4846                     break;
4847                 case 0x039: /* VIS I fmuld8ulx16 */
4848                     CHECK_FPU_FEATURE(dc, VIS1);
4849                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4850                     break;
4851                 case 0x03a: /* VIS I fpack32 */
4852                     CHECK_FPU_FEATURE(dc, VIS1);
4853                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4854                     break;
4855                 case 0x03b: /* VIS I fpack16 */
4856                     CHECK_FPU_FEATURE(dc, VIS1);
4857                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4858                     cpu_dst_32 = gen_dest_fpr_F(dc);
4859                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4860                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4861                     break;
4862                 case 0x03d: /* VIS I fpackfix */
4863                     CHECK_FPU_FEATURE(dc, VIS1);
4864                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4865                     cpu_dst_32 = gen_dest_fpr_F(dc);
4866                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4867                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4868                     break;
4869                 case 0x03e: /* VIS I pdist */
4870                     CHECK_FPU_FEATURE(dc, VIS1);
4871                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4872                     break;
4873                 case 0x048: /* VIS I faligndata */
4874                     CHECK_FPU_FEATURE(dc, VIS1);
4875                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4876                     break;
4877                 case 0x04b: /* VIS I fpmerge */
4878                     CHECK_FPU_FEATURE(dc, VIS1);
4879                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4880                     break;
4881                 case 0x04c: /* VIS II bshuffle */
4882                     CHECK_FPU_FEATURE(dc, VIS2);
4883                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4884                     break;
4885                 case 0x04d: /* VIS I fexpand */
4886                     CHECK_FPU_FEATURE(dc, VIS1);
4887                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4888                     break;
4889                 case 0x050: /* VIS I fpadd16 */
4890                     CHECK_FPU_FEATURE(dc, VIS1);
4891                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4892                     break;
4893                 case 0x051: /* VIS I fpadd16s */
4894                     CHECK_FPU_FEATURE(dc, VIS1);
4895                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4896                     break;
4897                 case 0x052: /* VIS I fpadd32 */
4898                     CHECK_FPU_FEATURE(dc, VIS1);
4899                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4900                     break;
4901                 case 0x053: /* VIS I fpadd32s */
4902                     CHECK_FPU_FEATURE(dc, VIS1);
4903                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4904                     break;
4905                 case 0x054: /* VIS I fpsub16 */
4906                     CHECK_FPU_FEATURE(dc, VIS1);
4907                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4908                     break;
4909                 case 0x055: /* VIS I fpsub16s */
4910                     CHECK_FPU_FEATURE(dc, VIS1);
4911                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4912                     break;
4913                 case 0x056: /* VIS I fpsub32 */
4914                     CHECK_FPU_FEATURE(dc, VIS1);
4915                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4916                     break;
4917                 case 0x057: /* VIS I fpsub32s */
4918                     CHECK_FPU_FEATURE(dc, VIS1);
4919                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4920                     break;
4921                 case 0x060: /* VIS I fzero */
4922                     CHECK_FPU_FEATURE(dc, VIS1);
4923                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4924                     tcg_gen_movi_i64(cpu_dst_64, 0);
4925                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4926                     break;
4927                 case 0x061: /* VIS I fzeros */
4928                     CHECK_FPU_FEATURE(dc, VIS1);
4929                     cpu_dst_32 = gen_dest_fpr_F(dc);
4930                     tcg_gen_movi_i32(cpu_dst_32, 0);
4931                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4932                     break;
4933                 case 0x062: /* VIS I fnor */
4934                     CHECK_FPU_FEATURE(dc, VIS1);
4935                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4936                     break;
4937                 case 0x063: /* VIS I fnors */
4938                     CHECK_FPU_FEATURE(dc, VIS1);
4939                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4940                     break;
4941                 case 0x064: /* VIS I fandnot2 */
4942                     CHECK_FPU_FEATURE(dc, VIS1);
4943                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4944                     break;
4945                 case 0x065: /* VIS I fandnot2s */
4946                     CHECK_FPU_FEATURE(dc, VIS1);
4947                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4948                     break;
4949                 case 0x066: /* VIS I fnot2 */
4950                     CHECK_FPU_FEATURE(dc, VIS1);
4951                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4952                     break;
4953                 case 0x067: /* VIS I fnot2s */
4954                     CHECK_FPU_FEATURE(dc, VIS1);
4955                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4956                     break;
4957                 case 0x068: /* VIS I fandnot1 */
4958                     CHECK_FPU_FEATURE(dc, VIS1);
4959                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4960                     break;
4961                 case 0x069: /* VIS I fandnot1s */
4962                     CHECK_FPU_FEATURE(dc, VIS1);
4963                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4964                     break;
4965                 case 0x06a: /* VIS I fnot1 */
4966                     CHECK_FPU_FEATURE(dc, VIS1);
4967                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4968                     break;
4969                 case 0x06b: /* VIS I fnot1s */
4970                     CHECK_FPU_FEATURE(dc, VIS1);
4971                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4972                     break;
4973                 case 0x06c: /* VIS I fxor */
4974                     CHECK_FPU_FEATURE(dc, VIS1);
4975                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4976                     break;
4977                 case 0x06d: /* VIS I fxors */
4978                     CHECK_FPU_FEATURE(dc, VIS1);
4979                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4980                     break;
4981                 case 0x06e: /* VIS I fnand */
4982                     CHECK_FPU_FEATURE(dc, VIS1);
4983                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4984                     break;
4985                 case 0x06f: /* VIS I fnands */
4986                     CHECK_FPU_FEATURE(dc, VIS1);
4987                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4988                     break;
4989                 case 0x070: /* VIS I fand */
4990                     CHECK_FPU_FEATURE(dc, VIS1);
4991                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4992                     break;
4993                 case 0x071: /* VIS I fands */
4994                     CHECK_FPU_FEATURE(dc, VIS1);
4995                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4996                     break;
4997                 case 0x072: /* VIS I fxnor */
4998                     CHECK_FPU_FEATURE(dc, VIS1);
4999                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
5000                     break;
5001                 case 0x073: /* VIS I fxnors */
5002                     CHECK_FPU_FEATURE(dc, VIS1);
5003                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
5004                     break;
5005                 case 0x074: /* VIS I fsrc1 */
5006                     CHECK_FPU_FEATURE(dc, VIS1);
5007                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
5008                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5009                     break;
5010                 case 0x075: /* VIS I fsrc1s */
5011                     CHECK_FPU_FEATURE(dc, VIS1);
5012                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
5013                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5014                     break;
5015                 case 0x076: /* VIS I fornot2 */
5016                     CHECK_FPU_FEATURE(dc, VIS1);
5017                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
5018                     break;
5019                 case 0x077: /* VIS I fornot2s */
5020                     CHECK_FPU_FEATURE(dc, VIS1);
5021                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5022                     break;
5023                 case 0x078: /* VIS I fsrc2 */
5024                     CHECK_FPU_FEATURE(dc, VIS1);
5025                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5026                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5027                     break;
5028                 case 0x079: /* VIS I fsrc2s */
5029                     CHECK_FPU_FEATURE(dc, VIS1);
5030                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5031                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5032                     break;
5033                 case 0x07a: /* VIS I fornot1 */
5034                     CHECK_FPU_FEATURE(dc, VIS1);
5035                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5036                     break;
5037                 case 0x07b: /* VIS I fornot1s */
5038                     CHECK_FPU_FEATURE(dc, VIS1);
5039                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5040                     break;
5041                 case 0x07c: /* VIS I for */
5042                     CHECK_FPU_FEATURE(dc, VIS1);
5043                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5044                     break;
5045                 case 0x07d: /* VIS I fors */
5046                     CHECK_FPU_FEATURE(dc, VIS1);
5047                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5048                     break;
5049                 case 0x07e: /* VIS I fone */
5050                     CHECK_FPU_FEATURE(dc, VIS1);
5051                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5052                     tcg_gen_movi_i64(cpu_dst_64, -1);
5053                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5054                     break;
5055                 case 0x07f: /* VIS I fones */
5056                     CHECK_FPU_FEATURE(dc, VIS1);
5057                     cpu_dst_32 = gen_dest_fpr_F(dc);
5058                     tcg_gen_movi_i32(cpu_dst_32, -1);
5059                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5060                     break;
5061                 case 0x080: /* VIS I shutdown */
5062                 case 0x081: /* VIS II siam */
5063                     // XXX
5064                     goto illegal_insn;
5065                 default:
5066                     goto illegal_insn;
5067                 }
5068 #else
5069                 goto ncp_insn;
5070 #endif
5071             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5072 #ifdef TARGET_SPARC64
5073                 goto illegal_insn;
5074 #else
5075                 goto ncp_insn;
5076 #endif
5077 #ifdef TARGET_SPARC64
5078             } else if (xop == 0x39) { /* V9 return */
5079                 save_state(dc);
5080                 cpu_src1 = get_src1(dc, insn);
5081                 cpu_tmp0 = tcg_temp_new();
5082                 if (IS_IMM) {   /* immediate */
5083                     simm = GET_FIELDs(insn, 19, 31);
5084                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5085                 } else {                /* register */
5086                     rs2 = GET_FIELD(insn, 27, 31);
5087                     if (rs2) {
5088                         cpu_src2 = gen_load_gpr(dc, rs2);
5089                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5090                     } else {
5091                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5092                     }
5093                 }
5094                 gen_check_align(dc, cpu_tmp0, 3);
5095                 gen_helper_restore(tcg_env);
5096                 gen_mov_pc_npc(dc);
5097                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5098                 dc->npc = DYNAMIC_PC_LOOKUP;
5099                 goto jmp_insn;
5100 #endif
5101             } else {
5102                 cpu_src1 = get_src1(dc, insn);
5103                 cpu_tmp0 = tcg_temp_new();
5104                 if (IS_IMM) {   /* immediate */
5105                     simm = GET_FIELDs(insn, 19, 31);
5106                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5107                 } else {                /* register */
5108                     rs2 = GET_FIELD(insn, 27, 31);
5109                     if (rs2) {
5110                         cpu_src2 = gen_load_gpr(dc, rs2);
5111                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5112                     } else {
5113                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5114                     }
5115                 }
5116                 switch (xop) {
5117                 case 0x38:      /* jmpl */
5118                     {
5119                         gen_check_align(dc, cpu_tmp0, 3);
5120                         gen_store_gpr(dc, rd, tcg_constant_tl(dc->pc));
5121                         gen_mov_pc_npc(dc);
5122                         gen_address_mask(dc, cpu_tmp0);
5123                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5124                         dc->npc = DYNAMIC_PC_LOOKUP;
5125                     }
5126                     goto jmp_insn;
5127 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5128                 case 0x39:      /* rett, V9 return */
5129                     {
5130                         if (!supervisor(dc))
5131                             goto priv_insn;
5132                         gen_check_align(dc, cpu_tmp0, 3);
5133                         gen_mov_pc_npc(dc);
5134                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5135                         dc->npc = DYNAMIC_PC;
5136                         gen_helper_rett(tcg_env);
5137                     }
5138                     goto jmp_insn;
5139 #endif
5140                 case 0x3b: /* flush */
5141                     /* nop */
5142                     break;
5143                 case 0x3c:      /* save */
5144                     gen_helper_save(tcg_env);
5145                     gen_store_gpr(dc, rd, cpu_tmp0);
5146                     break;
5147                 case 0x3d:      /* restore */
5148                     gen_helper_restore(tcg_env);
5149                     gen_store_gpr(dc, rd, cpu_tmp0);
5150                     break;
5151 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5152                 case 0x3e:      /* V9 done/retry */
5153                     {
5154                         switch (rd) {
5155                         case 0:
5156                             if (!supervisor(dc))
5157                                 goto priv_insn;
5158                             dc->npc = DYNAMIC_PC;
5159                             dc->pc = DYNAMIC_PC;
5160                             translator_io_start(&dc->base);
5161                             gen_helper_done(tcg_env);
5162                             goto jmp_insn;
5163                         case 1:
5164                             if (!supervisor(dc))
5165                                 goto priv_insn;
5166                             dc->npc = DYNAMIC_PC;
5167                             dc->pc = DYNAMIC_PC;
5168                             translator_io_start(&dc->base);
5169                             gen_helper_retry(tcg_env);
5170                             goto jmp_insn;
5171                         default:
5172                             goto illegal_insn;
5173                         }
5174                     }
5175                     break;
5176 #endif
5177                 default:
5178                     goto illegal_insn;
5179                 }
5180             }
5181             break;
5182         }
5183         break;
5184     case 3:                     /* load/store instructions */
5185         {
5186             unsigned int xop = GET_FIELD(insn, 7, 12);
5187             /* ??? gen_address_mask prevents us from using a source
5188                register directly.  Always generate a temporary.  */
5189             TCGv cpu_addr = tcg_temp_new();
5190 
5191             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5192             if (xop == 0x3c || xop == 0x3e) {
5193                 /* V9 casa/casxa : no offset */
5194             } else if (IS_IMM) {     /* immediate */
5195                 simm = GET_FIELDs(insn, 19, 31);
5196                 if (simm != 0) {
5197                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5198                 }
5199             } else {            /* register */
5200                 rs2 = GET_FIELD(insn, 27, 31);
5201                 if (rs2 != 0) {
5202                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5203                 }
5204             }
5205             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5206                 (xop > 0x17 && xop <= 0x1d ) ||
5207                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5208                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5209 
5210                 switch (xop) {
5211                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5212                     gen_address_mask(dc, cpu_addr);
5213                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5214                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5215                     break;
5216                 case 0x1:       /* ldub, load unsigned byte */
5217                     gen_address_mask(dc, cpu_addr);
5218                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5219                                        dc->mem_idx, MO_UB);
5220                     break;
5221                 case 0x2:       /* lduh, load unsigned halfword */
5222                     gen_address_mask(dc, cpu_addr);
5223                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5224                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5225                     break;
5226                 case 0x3:       /* ldd, load double word */
5227                     if (rd & 1)
5228                         goto illegal_insn;
5229                     else {
5230                         TCGv_i64 t64;
5231 
5232                         gen_address_mask(dc, cpu_addr);
5233                         t64 = tcg_temp_new_i64();
5234                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5235                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5236                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5237                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5238                         gen_store_gpr(dc, rd + 1, cpu_val);
5239                         tcg_gen_shri_i64(t64, t64, 32);
5240                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5241                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5242                     }
5243                     break;
5244                 case 0x9:       /* ldsb, load signed byte */
5245                     gen_address_mask(dc, cpu_addr);
5246                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5247                     break;
5248                 case 0xa:       /* ldsh, load signed halfword */
5249                     gen_address_mask(dc, cpu_addr);
5250                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5251                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5252                     break;
5253                 case 0xd:       /* ldstub */
5254                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5255                     break;
5256                 case 0x0f:
5257                     /* swap, swap register with memory. Also atomically */
5258                     cpu_src1 = gen_load_gpr(dc, rd);
5259                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5260                              dc->mem_idx, MO_TEUL);
5261                     break;
5262 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5263                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5264                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5265                     break;
5266                 case 0x11:      /* lduba, load unsigned byte alternate */
5267                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5268                     break;
5269                 case 0x12:      /* lduha, load unsigned halfword alternate */
5270                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5271                     break;
5272                 case 0x13:      /* ldda, load double word alternate */
5273                     if (rd & 1) {
5274                         goto illegal_insn;
5275                     }
5276                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5277                     goto skip_move;
5278                 case 0x19:      /* ldsba, load signed byte alternate */
5279                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5280                     break;
5281                 case 0x1a:      /* ldsha, load signed halfword alternate */
5282                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5283                     break;
5284                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5285                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5286                     break;
5287                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5288                                    atomically */
5289                     cpu_src1 = gen_load_gpr(dc, rd);
5290                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5291                     break;
5292 
5293 #ifndef TARGET_SPARC64
5294                 case 0x30: /* ldc */
5295                 case 0x31: /* ldcsr */
5296                 case 0x33: /* lddc */
5297                     goto ncp_insn;
5298 #endif
5299 #endif
5300 #ifdef TARGET_SPARC64
5301                 case 0x08: /* V9 ldsw */
5302                     gen_address_mask(dc, cpu_addr);
5303                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5304                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5305                     break;
5306                 case 0x0b: /* V9 ldx */
5307                     gen_address_mask(dc, cpu_addr);
5308                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5309                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5310                     break;
5311                 case 0x18: /* V9 ldswa */
5312                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5313                     break;
5314                 case 0x1b: /* V9 ldxa */
5315                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5316                     break;
5317                 case 0x2d: /* V9 prefetch, no effect */
5318                     goto skip_move;
5319                 case 0x30: /* V9 ldfa */
5320                     if (gen_trap_ifnofpu(dc)) {
5321                         goto jmp_insn;
5322                     }
5323                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5324                     gen_update_fprs_dirty(dc, rd);
5325                     goto skip_move;
5326                 case 0x33: /* V9 lddfa */
5327                     if (gen_trap_ifnofpu(dc)) {
5328                         goto jmp_insn;
5329                     }
5330                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5331                     gen_update_fprs_dirty(dc, DFPREG(rd));
5332                     goto skip_move;
5333                 case 0x3d: /* V9 prefetcha, no effect */
5334                     goto skip_move;
5335                 case 0x32: /* V9 ldqfa */
5336                     CHECK_FPU_FEATURE(dc, FLOAT128);
5337                     if (gen_trap_ifnofpu(dc)) {
5338                         goto jmp_insn;
5339                     }
5340                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5341                     gen_update_fprs_dirty(dc, QFPREG(rd));
5342                     goto skip_move;
5343 #endif
5344                 default:
5345                     goto illegal_insn;
5346                 }
5347                 gen_store_gpr(dc, rd, cpu_val);
5348 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5349             skip_move: ;
5350 #endif
5351             } else if (xop >= 0x20 && xop < 0x24) {
5352                 if (gen_trap_ifnofpu(dc)) {
5353                     goto jmp_insn;
5354                 }
5355                 switch (xop) {
5356                 case 0x20:      /* ldf, load fpreg */
5357                     gen_address_mask(dc, cpu_addr);
5358                     cpu_dst_32 = gen_dest_fpr_F(dc);
5359                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5360                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5361                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5362                     break;
5363                 case 0x21:      /* ldfsr, V9 ldxfsr */
5364 #ifdef TARGET_SPARC64
5365                     gen_address_mask(dc, cpu_addr);
5366                     if (rd == 1) {
5367                         TCGv_i64 t64 = tcg_temp_new_i64();
5368                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5369                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5370                         gen_helper_ldxfsr(cpu_fsr, tcg_env, cpu_fsr, t64);
5371                         break;
5372                     }
5373 #endif
5374                     cpu_dst_32 = tcg_temp_new_i32();
5375                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5376                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5377                     gen_helper_ldfsr(cpu_fsr, tcg_env, cpu_fsr, cpu_dst_32);
5378                     break;
5379                 case 0x22:      /* ldqf, load quad fpreg */
5380                     CHECK_FPU_FEATURE(dc, FLOAT128);
5381                     gen_address_mask(dc, cpu_addr);
5382                     cpu_src1_64 = tcg_temp_new_i64();
5383                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5384                                         MO_TEUQ | MO_ALIGN_4);
5385                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5386                     cpu_src2_64 = tcg_temp_new_i64();
5387                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5388                                         MO_TEUQ | MO_ALIGN_4);
5389                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5390                     break;
5391                 case 0x23:      /* lddf, load double fpreg */
5392                     gen_address_mask(dc, cpu_addr);
5393                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5394                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5395                                         MO_TEUQ | MO_ALIGN_4);
5396                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5397                     break;
5398                 default:
5399                     goto illegal_insn;
5400                 }
5401             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5402                        xop == 0xe || xop == 0x1e) {
5403                 TCGv cpu_val = gen_load_gpr(dc, rd);
5404 
5405                 switch (xop) {
5406                 case 0x4: /* st, store word */
5407                     gen_address_mask(dc, cpu_addr);
5408                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5409                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5410                     break;
5411                 case 0x5: /* stb, store byte */
5412                     gen_address_mask(dc, cpu_addr);
5413                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5414                     break;
5415                 case 0x6: /* sth, store halfword */
5416                     gen_address_mask(dc, cpu_addr);
5417                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5418                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5419                     break;
5420                 case 0x7: /* std, store double word */
5421                     if (rd & 1)
5422                         goto illegal_insn;
5423                     else {
5424                         TCGv_i64 t64;
5425                         TCGv lo;
5426 
5427                         gen_address_mask(dc, cpu_addr);
5428                         lo = gen_load_gpr(dc, rd + 1);
5429                         t64 = tcg_temp_new_i64();
5430                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5431                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5432                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5433                     }
5434                     break;
5435 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5436                 case 0x14: /* sta, V9 stwa, store word alternate */
5437                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5438                     break;
5439                 case 0x15: /* stba, store byte alternate */
5440                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5441                     break;
5442                 case 0x16: /* stha, store halfword alternate */
5443                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5444                     break;
5445                 case 0x17: /* stda, store double word alternate */
5446                     if (rd & 1) {
5447                         goto illegal_insn;
5448                     }
5449                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5450                     break;
5451 #endif
5452 #ifdef TARGET_SPARC64
5453                 case 0x0e: /* V9 stx */
5454                     gen_address_mask(dc, cpu_addr);
5455                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5456                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5457                     break;
5458                 case 0x1e: /* V9 stxa */
5459                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5460                     break;
5461 #endif
5462                 default:
5463                     goto illegal_insn;
5464                 }
5465             } else if (xop > 0x23 && xop < 0x28) {
5466                 if (gen_trap_ifnofpu(dc)) {
5467                     goto jmp_insn;
5468                 }
5469                 switch (xop) {
5470                 case 0x24: /* stf, store fpreg */
5471                     gen_address_mask(dc, cpu_addr);
5472                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5473                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5474                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5475                     break;
5476                 case 0x25: /* stfsr, V9 stxfsr */
5477                     {
5478 #ifdef TARGET_SPARC64
5479                         gen_address_mask(dc, cpu_addr);
5480                         if (rd == 1) {
5481                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5482                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5483                             break;
5484                         }
5485 #endif
5486                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5487                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5488                     }
5489                     break;
5490                 case 0x26:
5491 #ifdef TARGET_SPARC64
5492                     /* V9 stqf, store quad fpreg */
5493                     CHECK_FPU_FEATURE(dc, FLOAT128);
5494                     gen_address_mask(dc, cpu_addr);
5495                     /* ??? While stqf only requires 4-byte alignment, it is
5496                        legal for the cpu to signal the unaligned exception.
5497                        The OS trap handler is then required to fix it up.
5498                        For qemu, this avoids having to probe the second page
5499                        before performing the first write.  */
5500                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5501                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5502                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5503                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5504                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5505                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5506                                         dc->mem_idx, MO_TEUQ);
5507                     break;
5508 #else /* !TARGET_SPARC64 */
5509                     /* stdfq, store floating point queue */
5510 #if defined(CONFIG_USER_ONLY)
5511                     goto illegal_insn;
5512 #else
5513                     if (!supervisor(dc))
5514                         goto priv_insn;
5515                     if (gen_trap_ifnofpu(dc)) {
5516                         goto jmp_insn;
5517                     }
5518                     goto nfq_insn;
5519 #endif
5520 #endif
5521                 case 0x27: /* stdf, store double fpreg */
5522                     gen_address_mask(dc, cpu_addr);
5523                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5524                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5525                                         MO_TEUQ | MO_ALIGN_4);
5526                     break;
5527                 default:
5528                     goto illegal_insn;
5529                 }
5530             } else if (xop > 0x33 && xop < 0x3f) {
5531                 switch (xop) {
5532 #ifdef TARGET_SPARC64
5533                 case 0x34: /* V9 stfa */
5534                     if (gen_trap_ifnofpu(dc)) {
5535                         goto jmp_insn;
5536                     }
5537                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5538                     break;
5539                 case 0x36: /* V9 stqfa */
5540                     {
5541                         CHECK_FPU_FEATURE(dc, FLOAT128);
5542                         if (gen_trap_ifnofpu(dc)) {
5543                             goto jmp_insn;
5544                         }
5545                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5546                     }
5547                     break;
5548                 case 0x37: /* V9 stdfa */
5549                     if (gen_trap_ifnofpu(dc)) {
5550                         goto jmp_insn;
5551                     }
5552                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5553                     break;
5554                 case 0x3e: /* V9 casxa */
5555                     rs2 = GET_FIELD(insn, 27, 31);
5556                     cpu_src2 = gen_load_gpr(dc, rs2);
5557                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5558                     break;
5559 #else
5560                 case 0x34: /* stc */
5561                 case 0x35: /* stcsr */
5562                 case 0x36: /* stdcq */
5563                 case 0x37: /* stdc */
5564                     goto ncp_insn;
5565 #endif
5566 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5567                 case 0x3c: /* V9 or LEON3 casa */
5568 #ifndef TARGET_SPARC64
5569                     CHECK_IU_FEATURE(dc, CASA);
5570 #endif
5571                     rs2 = GET_FIELD(insn, 27, 31);
5572                     cpu_src2 = gen_load_gpr(dc, rs2);
5573                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5574                     break;
5575 #endif
5576                 default:
5577                     goto illegal_insn;
5578                 }
5579             } else {
5580                 goto illegal_insn;
5581             }
5582         }
5583         break;
5584     }
5585     advance_pc(dc);
5586  jmp_insn:
5587     return;
5588  illegal_insn:
5589     gen_exception(dc, TT_ILL_INSN);
5590     return;
5591 #if !defined(CONFIG_USER_ONLY)
5592  priv_insn:
5593     gen_exception(dc, TT_PRIV_INSN);
5594     return;
5595 #endif
5596  nfpu_insn:
5597     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5598     return;
5599 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5600  nfq_insn:
5601     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5602     return;
5603 #endif
5604 #ifndef TARGET_SPARC64
5605  ncp_insn:
5606     gen_exception(dc, TT_NCP_INSN);
5607     return;
5608 #endif
5609 }
5610 
5611 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5612 {
5613     DisasContext *dc = container_of(dcbase, DisasContext, base);
5614     CPUSPARCState *env = cpu_env(cs);
5615     int bound;
5616 
5617     dc->pc = dc->base.pc_first;
5618     dc->npc = (target_ulong)dc->base.tb->cs_base;
5619     dc->cc_op = CC_OP_DYNAMIC;
5620     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5621     dc->def = &env->def;
5622     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5623     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5624 #ifndef CONFIG_USER_ONLY
5625     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5626 #endif
5627 #ifdef TARGET_SPARC64
5628     dc->fprs_dirty = 0;
5629     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5630 #ifndef CONFIG_USER_ONLY
5631     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5632 #endif
5633 #endif
5634     /*
5635      * if we reach a page boundary, we stop generation so that the
5636      * PC of a TT_TFAULT exception is always in the right page
5637      */
5638     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5639     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5640 }
5641 
5642 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5643 {
5644 }
5645 
5646 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5647 {
5648     DisasContext *dc = container_of(dcbase, DisasContext, base);
5649     target_ulong npc = dc->npc;
5650 
5651     if (npc & 3) {
5652         switch (npc) {
5653         case JUMP_PC:
5654             assert(dc->jump_pc[1] == dc->pc + 4);
5655             npc = dc->jump_pc[0] | JUMP_PC;
5656             break;
5657         case DYNAMIC_PC:
5658         case DYNAMIC_PC_LOOKUP:
5659             npc = DYNAMIC_PC;
5660             break;
5661         default:
5662             g_assert_not_reached();
5663         }
5664     }
5665     tcg_gen_insn_start(dc->pc, npc);
5666 }
5667 
5668 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5669 {
5670     DisasContext *dc = container_of(dcbase, DisasContext, base);
5671     CPUSPARCState *env = cpu_env(cs);
5672     unsigned int insn;
5673 
5674     insn = translator_ldl(env, &dc->base, dc->pc);
5675     dc->base.pc_next += 4;
5676 
5677     if (!decode(dc, insn)) {
5678         disas_sparc_legacy(dc, insn);
5679     }
5680 
5681     if (dc->base.is_jmp == DISAS_NORETURN) {
5682         return;
5683     }
5684     if (dc->pc != dc->base.pc_next) {
5685         dc->base.is_jmp = DISAS_TOO_MANY;
5686     }
5687 }
5688 
5689 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5690 {
5691     DisasContext *dc = container_of(dcbase, DisasContext, base);
5692     DisasDelayException *e, *e_next;
5693     bool may_lookup;
5694 
5695     switch (dc->base.is_jmp) {
5696     case DISAS_NEXT:
5697     case DISAS_TOO_MANY:
5698         if (((dc->pc | dc->npc) & 3) == 0) {
5699             /* static PC and NPC: we can use direct chaining */
5700             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5701             break;
5702         }
5703 
5704         may_lookup = true;
5705         if (dc->pc & 3) {
5706             switch (dc->pc) {
5707             case DYNAMIC_PC_LOOKUP:
5708                 break;
5709             case DYNAMIC_PC:
5710                 may_lookup = false;
5711                 break;
5712             default:
5713                 g_assert_not_reached();
5714             }
5715         } else {
5716             tcg_gen_movi_tl(cpu_pc, dc->pc);
5717         }
5718 
5719         if (dc->npc & 3) {
5720             switch (dc->npc) {
5721             case JUMP_PC:
5722                 gen_generic_branch(dc);
5723                 break;
5724             case DYNAMIC_PC:
5725                 may_lookup = false;
5726                 break;
5727             case DYNAMIC_PC_LOOKUP:
5728                 break;
5729             default:
5730                 g_assert_not_reached();
5731             }
5732         } else {
5733             tcg_gen_movi_tl(cpu_npc, dc->npc);
5734         }
5735         if (may_lookup) {
5736             tcg_gen_lookup_and_goto_ptr();
5737         } else {
5738             tcg_gen_exit_tb(NULL, 0);
5739         }
5740         break;
5741 
5742     case DISAS_NORETURN:
5743        break;
5744 
5745     case DISAS_EXIT:
5746         /* Exit TB */
5747         save_state(dc);
5748         tcg_gen_exit_tb(NULL, 0);
5749         break;
5750 
5751     default:
5752         g_assert_not_reached();
5753     }
5754 
5755     for (e = dc->delay_excp_list; e ; e = e_next) {
5756         gen_set_label(e->lab);
5757 
5758         tcg_gen_movi_tl(cpu_pc, e->pc);
5759         if (e->npc % 4 == 0) {
5760             tcg_gen_movi_tl(cpu_npc, e->npc);
5761         }
5762         gen_helper_raise_exception(tcg_env, e->excp);
5763 
5764         e_next = e->next;
5765         g_free(e);
5766     }
5767 }
5768 
5769 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5770                                CPUState *cpu, FILE *logfile)
5771 {
5772     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5773     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5774 }
5775 
5776 static const TranslatorOps sparc_tr_ops = {
5777     .init_disas_context = sparc_tr_init_disas_context,
5778     .tb_start           = sparc_tr_tb_start,
5779     .insn_start         = sparc_tr_insn_start,
5780     .translate_insn     = sparc_tr_translate_insn,
5781     .tb_stop            = sparc_tr_tb_stop,
5782     .disas_log          = sparc_tr_disas_log,
5783 };
5784 
5785 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5786                            target_ulong pc, void *host_pc)
5787 {
5788     DisasContext dc = {};
5789 
5790     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5791 }
5792 
5793 void sparc_tcg_init(void)
5794 {
5795     static const char gregnames[32][4] = {
5796         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5797         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5798         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5799         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5800     };
5801     static const char fregnames[32][4] = {
5802         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5803         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5804         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5805         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5806     };
5807 
5808     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5809 #ifdef TARGET_SPARC64
5810         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5811         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5812 #else
5813         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5814 #endif
5815         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5816         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5817     };
5818 
5819     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5820 #ifdef TARGET_SPARC64
5821         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5822         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5823         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5824         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5825           "hstick_cmpr" },
5826         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5827         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5828         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5829         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5830         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5831 #endif
5832         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5833         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5834         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5835         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5836         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5837         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5838         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5839         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5840 #ifndef CONFIG_USER_ONLY
5841         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5842 #endif
5843     };
5844 
5845     unsigned int i;
5846 
5847     cpu_regwptr = tcg_global_mem_new_ptr(tcg_env,
5848                                          offsetof(CPUSPARCState, regwptr),
5849                                          "regwptr");
5850 
5851     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5852         *r32[i].ptr = tcg_global_mem_new_i32(tcg_env, r32[i].off, r32[i].name);
5853     }
5854 
5855     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5856         *rtl[i].ptr = tcg_global_mem_new(tcg_env, rtl[i].off, rtl[i].name);
5857     }
5858 
5859     cpu_regs[0] = NULL;
5860     for (i = 1; i < 8; ++i) {
5861         cpu_regs[i] = tcg_global_mem_new(tcg_env,
5862                                          offsetof(CPUSPARCState, gregs[i]),
5863                                          gregnames[i]);
5864     }
5865 
5866     for (i = 8; i < 32; ++i) {
5867         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5868                                          (i - 8) * sizeof(target_ulong),
5869                                          gregnames[i]);
5870     }
5871 
5872     for (i = 0; i < TARGET_DPREGS; i++) {
5873         cpu_fpr[i] = tcg_global_mem_new_i64(tcg_env,
5874                                             offsetof(CPUSPARCState, fpr[i]),
5875                                             fregnames[i]);
5876     }
5877 }
5878 
5879 void sparc_restore_state_to_opc(CPUState *cs,
5880                                 const TranslationBlock *tb,
5881                                 const uint64_t *data)
5882 {
5883     SPARCCPU *cpu = SPARC_CPU(cs);
5884     CPUSPARCState *env = &cpu->env;
5885     target_ulong pc = data[0];
5886     target_ulong npc = data[1];
5887 
5888     env->pc = pc;
5889     if (npc == DYNAMIC_PC) {
5890         /* dynamic NPC: already stored */
5891     } else if (npc & JUMP_PC) {
5892         /* jump PC: use 'cond' and the jump targets of the translation */
5893         if (env->cond) {
5894             env->npc = npc & ~3;
5895         } else {
5896             env->npc = pc + 4;
5897         }
5898     } else {
5899         env->npc = npc;
5900     }
5901 }
5902