xref: /openbmc/qemu/target/sparc/translate.c (revision ab9ffe988a10dcc05e6e609dd721318b7a70bf23)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 
29 #include "exec/helper-gen.h"
30 
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 #include "asi.h"
34 
35 #define HELPER_H "helper.h"
36 #include "exec/helper-info.c.inc"
37 #undef  HELPER_H
38 
39 /* Dynamic PC, must exit to main loop. */
40 #define DYNAMIC_PC         1
41 /* Dynamic PC, one of two values according to jump_pc[T2]. */
42 #define JUMP_PC            2
43 /* Dynamic PC, may lookup next TB. */
44 #define DYNAMIC_PC_LOOKUP  3
45 
46 #define DISAS_EXIT  DISAS_TARGET_0
47 
48 /* global register indexes */
49 static TCGv_ptr cpu_regwptr;
50 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
51 static TCGv_i32 cpu_cc_op;
52 static TCGv_i32 cpu_psr;
53 static TCGv cpu_fsr, cpu_pc, cpu_npc;
54 static TCGv cpu_regs[32];
55 static TCGv cpu_y;
56 #ifndef CONFIG_USER_ONLY
57 static TCGv cpu_tbr;
58 #endif
59 static TCGv cpu_cond;
60 #ifdef TARGET_SPARC64
61 static TCGv_i32 cpu_xcc, cpu_fprs;
62 static TCGv cpu_gsr;
63 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
64 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
65 #else
66 static TCGv cpu_wim;
67 #endif
68 /* Floating point registers */
69 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
70 
71 typedef struct DisasDelayException {
72     struct DisasDelayException *next;
73     TCGLabel *lab;
74     TCGv_i32 excp;
75     /* Saved state at parent insn. */
76     target_ulong pc;
77     target_ulong npc;
78 } DisasDelayException;
79 
80 typedef struct DisasContext {
81     DisasContextBase base;
82     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
83     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
84     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
85     int mem_idx;
86     bool fpu_enabled;
87     bool address_mask_32bit;
88 #ifndef CONFIG_USER_ONLY
89     bool supervisor;
90 #ifdef TARGET_SPARC64
91     bool hypervisor;
92 #endif
93 #endif
94 
95     uint32_t cc_op;  /* current CC operation */
96     sparc_def_t *def;
97 #ifdef TARGET_SPARC64
98     int fprs_dirty;
99     int asi;
100 #endif
101     DisasDelayException *delay_excp_list;
102 } DisasContext;
103 
104 typedef struct {
105     TCGCond cond;
106     bool is_bool;
107     TCGv c1, c2;
108 } DisasCompare;
109 
110 // This function uses non-native bit order
111 #define GET_FIELD(X, FROM, TO)                                  \
112     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
113 
114 // This function uses the order in the manuals, i.e. bit 0 is 2^0
115 #define GET_FIELD_SP(X, FROM, TO)               \
116     GET_FIELD(X, 31 - (TO), 31 - (FROM))
117 
118 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
119 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
120 
121 #ifdef TARGET_SPARC64
122 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
123 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
124 #else
125 #define DFPREG(r) (r & 0x1e)
126 #define QFPREG(r) (r & 0x1c)
127 #endif
128 
129 #define UA2005_HTRAP_MASK 0xff
130 #define V8_TRAP_MASK 0x7f
131 
132 static int sign_extend(int x, int len)
133 {
134     len = 32 - len;
135     return (x << len) >> len;
136 }
137 
138 #define IS_IMM (insn & (1<<13))
139 
140 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     int bit = (rd < 32) ? 1 : 2;
144     /* If we know we've already set this bit within the TB,
145        we can avoid setting it again.  */
146     if (!(dc->fprs_dirty & bit)) {
147         dc->fprs_dirty |= bit;
148         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
149     }
150 #endif
151 }
152 
153 /* floating point registers moves */
154 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
155 {
156     TCGv_i32 ret = tcg_temp_new_i32();
157     if (src & 1) {
158         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
159     } else {
160         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
161     }
162     return ret;
163 }
164 
165 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
166 {
167     TCGv_i64 t = tcg_temp_new_i64();
168 
169     tcg_gen_extu_i32_i64(t, v);
170     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
171                         (dst & 1 ? 0 : 32), 32);
172     gen_update_fprs_dirty(dc, dst);
173 }
174 
175 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
176 {
177     return tcg_temp_new_i32();
178 }
179 
180 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
181 {
182     src = DFPREG(src);
183     return cpu_fpr[src / 2];
184 }
185 
186 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
187 {
188     dst = DFPREG(dst);
189     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
190     gen_update_fprs_dirty(dc, dst);
191 }
192 
193 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
194 {
195     return cpu_fpr[DFPREG(dst) / 2];
196 }
197 
198 static void gen_op_load_fpr_QT0(unsigned int src)
199 {
200     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
201                    offsetof(CPU_QuadU, ll.upper));
202     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.lower));
204 }
205 
206 static void gen_op_load_fpr_QT1(unsigned int src)
207 {
208     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt1) +
209                    offsetof(CPU_QuadU, ll.upper));
210     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.lower));
212 }
213 
214 static void gen_op_store_QT0_fpr(unsigned int dst)
215 {
216     tcg_gen_ld_i64(cpu_fpr[dst / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221 
222 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
223                             TCGv_i64 v1, TCGv_i64 v2)
224 {
225     dst = QFPREG(dst);
226 
227     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
228     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
229     gen_update_fprs_dirty(dc, dst);
230 }
231 
232 #ifdef TARGET_SPARC64
233 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
234 {
235     src = QFPREG(src);
236     return cpu_fpr[src / 2];
237 }
238 
239 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
240 {
241     src = QFPREG(src);
242     return cpu_fpr[src / 2 + 1];
243 }
244 
245 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
246 {
247     rd = QFPREG(rd);
248     rs = QFPREG(rs);
249 
250     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
251     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
252     gen_update_fprs_dirty(dc, rd);
253 }
254 #endif
255 
256 /* moves */
257 #ifdef CONFIG_USER_ONLY
258 #define supervisor(dc) 0
259 #ifdef TARGET_SPARC64
260 #define hypervisor(dc) 0
261 #endif
262 #else
263 #ifdef TARGET_SPARC64
264 #define hypervisor(dc) (dc->hypervisor)
265 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
266 #else
267 #define supervisor(dc) (dc->supervisor)
268 #endif
269 #endif
270 
271 #if !defined(TARGET_SPARC64)
272 # define AM_CHECK(dc)  false
273 #elif defined(TARGET_ABI32)
274 # define AM_CHECK(dc)  true
275 #elif defined(CONFIG_USER_ONLY)
276 # define AM_CHECK(dc)  false
277 #else
278 # define AM_CHECK(dc)  ((dc)->address_mask_32bit)
279 #endif
280 
281 static void gen_address_mask(DisasContext *dc, TCGv addr)
282 {
283     if (AM_CHECK(dc)) {
284         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
285     }
286 }
287 
288 static target_ulong address_mask_i(DisasContext *dc, target_ulong addr)
289 {
290     return AM_CHECK(dc) ? (uint32_t)addr : addr;
291 }
292 
293 static TCGv gen_load_gpr(DisasContext *dc, int reg)
294 {
295     if (reg > 0) {
296         assert(reg < 32);
297         return cpu_regs[reg];
298     } else {
299         TCGv t = tcg_temp_new();
300         tcg_gen_movi_tl(t, 0);
301         return t;
302     }
303 }
304 
305 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
306 {
307     if (reg > 0) {
308         assert(reg < 32);
309         tcg_gen_mov_tl(cpu_regs[reg], v);
310     }
311 }
312 
313 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
314 {
315     if (reg > 0) {
316         assert(reg < 32);
317         return cpu_regs[reg];
318     } else {
319         return tcg_temp_new();
320     }
321 }
322 
323 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
324 {
325     return translator_use_goto_tb(&s->base, pc) &&
326            translator_use_goto_tb(&s->base, npc);
327 }
328 
329 static void gen_goto_tb(DisasContext *s, int tb_num,
330                         target_ulong pc, target_ulong npc)
331 {
332     if (use_goto_tb(s, pc, npc))  {
333         /* jump to same page: we can use a direct jump */
334         tcg_gen_goto_tb(tb_num);
335         tcg_gen_movi_tl(cpu_pc, pc);
336         tcg_gen_movi_tl(cpu_npc, npc);
337         tcg_gen_exit_tb(s->base.tb, tb_num);
338     } else {
339         /* jump to another page: we can use an indirect jump */
340         tcg_gen_movi_tl(cpu_pc, pc);
341         tcg_gen_movi_tl(cpu_npc, npc);
342         tcg_gen_lookup_and_goto_ptr();
343     }
344 }
345 
346 // XXX suboptimal
347 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
348 {
349     tcg_gen_extu_i32_tl(reg, src);
350     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
351 }
352 
353 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
354 {
355     tcg_gen_extu_i32_tl(reg, src);
356     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
357 }
358 
359 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
360 {
361     tcg_gen_extu_i32_tl(reg, src);
362     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
363 }
364 
365 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
366 {
367     tcg_gen_extu_i32_tl(reg, src);
368     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
369 }
370 
371 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
372 {
373     tcg_gen_mov_tl(cpu_cc_src, src1);
374     tcg_gen_mov_tl(cpu_cc_src2, src2);
375     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
376     tcg_gen_mov_tl(dst, cpu_cc_dst);
377 }
378 
379 static TCGv_i32 gen_add32_carry32(void)
380 {
381     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
382 
383     /* Carry is computed from a previous add: (dst < src)  */
384 #if TARGET_LONG_BITS == 64
385     cc_src1_32 = tcg_temp_new_i32();
386     cc_src2_32 = tcg_temp_new_i32();
387     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
388     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
389 #else
390     cc_src1_32 = cpu_cc_dst;
391     cc_src2_32 = cpu_cc_src;
392 #endif
393 
394     carry_32 = tcg_temp_new_i32();
395     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
396 
397     return carry_32;
398 }
399 
400 static TCGv_i32 gen_sub32_carry32(void)
401 {
402     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
403 
404     /* Carry is computed from a previous borrow: (src1 < src2)  */
405 #if TARGET_LONG_BITS == 64
406     cc_src1_32 = tcg_temp_new_i32();
407     cc_src2_32 = tcg_temp_new_i32();
408     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
409     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
410 #else
411     cc_src1_32 = cpu_cc_src;
412     cc_src2_32 = cpu_cc_src2;
413 #endif
414 
415     carry_32 = tcg_temp_new_i32();
416     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
417 
418     return carry_32;
419 }
420 
421 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
422                             TCGv src2, int update_cc)
423 {
424     TCGv_i32 carry_32;
425     TCGv carry;
426 
427     switch (dc->cc_op) {
428     case CC_OP_DIV:
429     case CC_OP_LOGIC:
430         /* Carry is known to be zero.  Fall back to plain ADD.  */
431         if (update_cc) {
432             gen_op_add_cc(dst, src1, src2);
433         } else {
434             tcg_gen_add_tl(dst, src1, src2);
435         }
436         return;
437 
438     case CC_OP_ADD:
439     case CC_OP_TADD:
440     case CC_OP_TADDTV:
441         if (TARGET_LONG_BITS == 32) {
442             /* We can re-use the host's hardware carry generation by using
443                an ADD2 opcode.  We discard the low part of the output.
444                Ideally we'd combine this operation with the add that
445                generated the carry in the first place.  */
446             carry = tcg_temp_new();
447             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
448             goto add_done;
449         }
450         carry_32 = gen_add32_carry32();
451         break;
452 
453     case CC_OP_SUB:
454     case CC_OP_TSUB:
455     case CC_OP_TSUBTV:
456         carry_32 = gen_sub32_carry32();
457         break;
458 
459     default:
460         /* We need external help to produce the carry.  */
461         carry_32 = tcg_temp_new_i32();
462         gen_helper_compute_C_icc(carry_32, tcg_env);
463         break;
464     }
465 
466 #if TARGET_LONG_BITS == 64
467     carry = tcg_temp_new();
468     tcg_gen_extu_i32_i64(carry, carry_32);
469 #else
470     carry = carry_32;
471 #endif
472 
473     tcg_gen_add_tl(dst, src1, src2);
474     tcg_gen_add_tl(dst, dst, carry);
475 
476  add_done:
477     if (update_cc) {
478         tcg_gen_mov_tl(cpu_cc_src, src1);
479         tcg_gen_mov_tl(cpu_cc_src2, src2);
480         tcg_gen_mov_tl(cpu_cc_dst, dst);
481         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
482         dc->cc_op = CC_OP_ADDX;
483     }
484 }
485 
486 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
487 {
488     tcg_gen_mov_tl(cpu_cc_src, src1);
489     tcg_gen_mov_tl(cpu_cc_src2, src2);
490     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
491     tcg_gen_mov_tl(dst, cpu_cc_dst);
492 }
493 
494 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
495                             TCGv src2, int update_cc)
496 {
497     TCGv_i32 carry_32;
498     TCGv carry;
499 
500     switch (dc->cc_op) {
501     case CC_OP_DIV:
502     case CC_OP_LOGIC:
503         /* Carry is known to be zero.  Fall back to plain SUB.  */
504         if (update_cc) {
505             gen_op_sub_cc(dst, src1, src2);
506         } else {
507             tcg_gen_sub_tl(dst, src1, src2);
508         }
509         return;
510 
511     case CC_OP_ADD:
512     case CC_OP_TADD:
513     case CC_OP_TADDTV:
514         carry_32 = gen_add32_carry32();
515         break;
516 
517     case CC_OP_SUB:
518     case CC_OP_TSUB:
519     case CC_OP_TSUBTV:
520         if (TARGET_LONG_BITS == 32) {
521             /* We can re-use the host's hardware carry generation by using
522                a SUB2 opcode.  We discard the low part of the output.
523                Ideally we'd combine this operation with the add that
524                generated the carry in the first place.  */
525             carry = tcg_temp_new();
526             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
527             goto sub_done;
528         }
529         carry_32 = gen_sub32_carry32();
530         break;
531 
532     default:
533         /* We need external help to produce the carry.  */
534         carry_32 = tcg_temp_new_i32();
535         gen_helper_compute_C_icc(carry_32, tcg_env);
536         break;
537     }
538 
539 #if TARGET_LONG_BITS == 64
540     carry = tcg_temp_new();
541     tcg_gen_extu_i32_i64(carry, carry_32);
542 #else
543     carry = carry_32;
544 #endif
545 
546     tcg_gen_sub_tl(dst, src1, src2);
547     tcg_gen_sub_tl(dst, dst, carry);
548 
549  sub_done:
550     if (update_cc) {
551         tcg_gen_mov_tl(cpu_cc_src, src1);
552         tcg_gen_mov_tl(cpu_cc_src2, src2);
553         tcg_gen_mov_tl(cpu_cc_dst, dst);
554         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
555         dc->cc_op = CC_OP_SUBX;
556     }
557 }
558 
559 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
560 {
561     TCGv r_temp, zero, t0;
562 
563     r_temp = tcg_temp_new();
564     t0 = tcg_temp_new();
565 
566     /* old op:
567     if (!(env->y & 1))
568         T1 = 0;
569     */
570     zero = tcg_constant_tl(0);
571     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
572     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
573     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
574     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
575                        zero, cpu_cc_src2);
576 
577     // b2 = T0 & 1;
578     // env->y = (b2 << 31) | (env->y >> 1);
579     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
580     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
581 
582     // b1 = N ^ V;
583     gen_mov_reg_N(t0, cpu_psr);
584     gen_mov_reg_V(r_temp, cpu_psr);
585     tcg_gen_xor_tl(t0, t0, r_temp);
586 
587     // T0 = (b1 << 31) | (T0 >> 1);
588     // src1 = T0;
589     tcg_gen_shli_tl(t0, t0, 31);
590     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
591     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
592 
593     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
594 
595     tcg_gen_mov_tl(dst, cpu_cc_dst);
596 }
597 
598 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
599 {
600 #if TARGET_LONG_BITS == 32
601     if (sign_ext) {
602         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
603     } else {
604         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
605     }
606 #else
607     TCGv t0 = tcg_temp_new_i64();
608     TCGv t1 = tcg_temp_new_i64();
609 
610     if (sign_ext) {
611         tcg_gen_ext32s_i64(t0, src1);
612         tcg_gen_ext32s_i64(t1, src2);
613     } else {
614         tcg_gen_ext32u_i64(t0, src1);
615         tcg_gen_ext32u_i64(t1, src2);
616     }
617 
618     tcg_gen_mul_i64(dst, t0, t1);
619     tcg_gen_shri_i64(cpu_y, dst, 32);
620 #endif
621 }
622 
623 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
624 {
625     /* zero-extend truncated operands before multiplication */
626     gen_op_multiply(dst, src1, src2, 0);
627 }
628 
629 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
630 {
631     /* sign-extend truncated operands before multiplication */
632     gen_op_multiply(dst, src1, src2, 1);
633 }
634 
635 // 1
636 static void gen_op_eval_ba(TCGv dst)
637 {
638     tcg_gen_movi_tl(dst, 1);
639 }
640 
641 // Z
642 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
643 {
644     gen_mov_reg_Z(dst, src);
645 }
646 
647 // Z | (N ^ V)
648 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
649 {
650     TCGv t0 = tcg_temp_new();
651     gen_mov_reg_N(t0, src);
652     gen_mov_reg_V(dst, src);
653     tcg_gen_xor_tl(dst, dst, t0);
654     gen_mov_reg_Z(t0, src);
655     tcg_gen_or_tl(dst, dst, t0);
656 }
657 
658 // N ^ V
659 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
660 {
661     TCGv t0 = tcg_temp_new();
662     gen_mov_reg_V(t0, src);
663     gen_mov_reg_N(dst, src);
664     tcg_gen_xor_tl(dst, dst, t0);
665 }
666 
667 // C | Z
668 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
669 {
670     TCGv t0 = tcg_temp_new();
671     gen_mov_reg_Z(t0, src);
672     gen_mov_reg_C(dst, src);
673     tcg_gen_or_tl(dst, dst, t0);
674 }
675 
676 // C
677 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
678 {
679     gen_mov_reg_C(dst, src);
680 }
681 
682 // V
683 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
684 {
685     gen_mov_reg_V(dst, src);
686 }
687 
688 // 0
689 static void gen_op_eval_bn(TCGv dst)
690 {
691     tcg_gen_movi_tl(dst, 0);
692 }
693 
694 // N
695 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
696 {
697     gen_mov_reg_N(dst, src);
698 }
699 
700 // !Z
701 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
702 {
703     gen_mov_reg_Z(dst, src);
704     tcg_gen_xori_tl(dst, dst, 0x1);
705 }
706 
707 // !(Z | (N ^ V))
708 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
709 {
710     gen_op_eval_ble(dst, src);
711     tcg_gen_xori_tl(dst, dst, 0x1);
712 }
713 
714 // !(N ^ V)
715 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
716 {
717     gen_op_eval_bl(dst, src);
718     tcg_gen_xori_tl(dst, dst, 0x1);
719 }
720 
721 // !(C | Z)
722 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
723 {
724     gen_op_eval_bleu(dst, src);
725     tcg_gen_xori_tl(dst, dst, 0x1);
726 }
727 
728 // !C
729 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
730 {
731     gen_mov_reg_C(dst, src);
732     tcg_gen_xori_tl(dst, dst, 0x1);
733 }
734 
735 // !N
736 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
737 {
738     gen_mov_reg_N(dst, src);
739     tcg_gen_xori_tl(dst, dst, 0x1);
740 }
741 
742 // !V
743 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
744 {
745     gen_mov_reg_V(dst, src);
746     tcg_gen_xori_tl(dst, dst, 0x1);
747 }
748 
749 /*
750   FPSR bit field FCC1 | FCC0:
751    0 =
752    1 <
753    2 >
754    3 unordered
755 */
756 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
757                                     unsigned int fcc_offset)
758 {
759     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
760     tcg_gen_andi_tl(reg, reg, 0x1);
761 }
762 
763 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
764 {
765     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
766     tcg_gen_andi_tl(reg, reg, 0x1);
767 }
768 
769 // !0: FCC0 | FCC1
770 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
771 {
772     TCGv t0 = tcg_temp_new();
773     gen_mov_reg_FCC0(dst, src, fcc_offset);
774     gen_mov_reg_FCC1(t0, src, fcc_offset);
775     tcg_gen_or_tl(dst, dst, t0);
776 }
777 
778 // 1 or 2: FCC0 ^ FCC1
779 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
780 {
781     TCGv t0 = tcg_temp_new();
782     gen_mov_reg_FCC0(dst, src, fcc_offset);
783     gen_mov_reg_FCC1(t0, src, fcc_offset);
784     tcg_gen_xor_tl(dst, dst, t0);
785 }
786 
787 // 1 or 3: FCC0
788 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
789 {
790     gen_mov_reg_FCC0(dst, src, fcc_offset);
791 }
792 
793 // 1: FCC0 & !FCC1
794 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
795 {
796     TCGv t0 = tcg_temp_new();
797     gen_mov_reg_FCC0(dst, src, fcc_offset);
798     gen_mov_reg_FCC1(t0, src, fcc_offset);
799     tcg_gen_andc_tl(dst, dst, t0);
800 }
801 
802 // 2 or 3: FCC1
803 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
804 {
805     gen_mov_reg_FCC1(dst, src, fcc_offset);
806 }
807 
808 // 2: !FCC0 & FCC1
809 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
810 {
811     TCGv t0 = tcg_temp_new();
812     gen_mov_reg_FCC0(dst, src, fcc_offset);
813     gen_mov_reg_FCC1(t0, src, fcc_offset);
814     tcg_gen_andc_tl(dst, t0, dst);
815 }
816 
817 // 3: FCC0 & FCC1
818 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
819 {
820     TCGv t0 = tcg_temp_new();
821     gen_mov_reg_FCC0(dst, src, fcc_offset);
822     gen_mov_reg_FCC1(t0, src, fcc_offset);
823     tcg_gen_and_tl(dst, dst, t0);
824 }
825 
826 // 0: !(FCC0 | FCC1)
827 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
828 {
829     TCGv t0 = tcg_temp_new();
830     gen_mov_reg_FCC0(dst, src, fcc_offset);
831     gen_mov_reg_FCC1(t0, src, fcc_offset);
832     tcg_gen_or_tl(dst, dst, t0);
833     tcg_gen_xori_tl(dst, dst, 0x1);
834 }
835 
836 // 0 or 3: !(FCC0 ^ FCC1)
837 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
838 {
839     TCGv t0 = tcg_temp_new();
840     gen_mov_reg_FCC0(dst, src, fcc_offset);
841     gen_mov_reg_FCC1(t0, src, fcc_offset);
842     tcg_gen_xor_tl(dst, dst, t0);
843     tcg_gen_xori_tl(dst, dst, 0x1);
844 }
845 
846 // 0 or 2: !FCC0
847 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
848 {
849     gen_mov_reg_FCC0(dst, src, fcc_offset);
850     tcg_gen_xori_tl(dst, dst, 0x1);
851 }
852 
853 // !1: !(FCC0 & !FCC1)
854 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
855 {
856     TCGv t0 = tcg_temp_new();
857     gen_mov_reg_FCC0(dst, src, fcc_offset);
858     gen_mov_reg_FCC1(t0, src, fcc_offset);
859     tcg_gen_andc_tl(dst, dst, t0);
860     tcg_gen_xori_tl(dst, dst, 0x1);
861 }
862 
863 // 0 or 1: !FCC1
864 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
865 {
866     gen_mov_reg_FCC1(dst, src, fcc_offset);
867     tcg_gen_xori_tl(dst, dst, 0x1);
868 }
869 
870 // !2: !(!FCC0 & FCC1)
871 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
872 {
873     TCGv t0 = tcg_temp_new();
874     gen_mov_reg_FCC0(dst, src, fcc_offset);
875     gen_mov_reg_FCC1(t0, src, fcc_offset);
876     tcg_gen_andc_tl(dst, t0, dst);
877     tcg_gen_xori_tl(dst, dst, 0x1);
878 }
879 
880 // !3: !(FCC0 & FCC1)
881 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
882 {
883     TCGv t0 = tcg_temp_new();
884     gen_mov_reg_FCC0(dst, src, fcc_offset);
885     gen_mov_reg_FCC1(t0, src, fcc_offset);
886     tcg_gen_and_tl(dst, dst, t0);
887     tcg_gen_xori_tl(dst, dst, 0x1);
888 }
889 
890 static void gen_branch2(DisasContext *dc, target_ulong pc1,
891                         target_ulong pc2, TCGv r_cond)
892 {
893     TCGLabel *l1 = gen_new_label();
894 
895     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
896 
897     gen_goto_tb(dc, 0, pc1, pc1 + 4);
898 
899     gen_set_label(l1);
900     gen_goto_tb(dc, 1, pc2, pc2 + 4);
901 }
902 
903 static void gen_branch_a(DisasContext *dc, target_ulong pc1)
904 {
905     TCGLabel *l1 = gen_new_label();
906     target_ulong npc = dc->npc;
907 
908     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_cond, 0, l1);
909 
910     gen_goto_tb(dc, 0, npc, pc1);
911 
912     gen_set_label(l1);
913     gen_goto_tb(dc, 1, npc + 4, npc + 8);
914 
915     dc->base.is_jmp = DISAS_NORETURN;
916 }
917 
918 static void gen_branch_n(DisasContext *dc, target_ulong pc1)
919 {
920     target_ulong npc = dc->npc;
921 
922     if (npc & 3) {
923         switch (npc) {
924         case DYNAMIC_PC:
925         case DYNAMIC_PC_LOOKUP:
926             tcg_gen_mov_tl(cpu_pc, cpu_npc);
927             tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
928             tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc,
929                                cpu_cond, tcg_constant_tl(0),
930                                tcg_constant_tl(pc1), cpu_npc);
931             dc->pc = npc;
932             break;
933         default:
934             g_assert_not_reached();
935         }
936     } else {
937         dc->pc = npc;
938         dc->jump_pc[0] = pc1;
939         dc->jump_pc[1] = npc + 4;
940         dc->npc = JUMP_PC;
941     }
942 }
943 
944 static void gen_generic_branch(DisasContext *dc)
945 {
946     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
947     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
948     TCGv zero = tcg_constant_tl(0);
949 
950     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
951 }
952 
953 /* call this function before using the condition register as it may
954    have been set for a jump */
955 static void flush_cond(DisasContext *dc)
956 {
957     if (dc->npc == JUMP_PC) {
958         gen_generic_branch(dc);
959         dc->npc = DYNAMIC_PC_LOOKUP;
960     }
961 }
962 
963 static void save_npc(DisasContext *dc)
964 {
965     if (dc->npc & 3) {
966         switch (dc->npc) {
967         case JUMP_PC:
968             gen_generic_branch(dc);
969             dc->npc = DYNAMIC_PC_LOOKUP;
970             break;
971         case DYNAMIC_PC:
972         case DYNAMIC_PC_LOOKUP:
973             break;
974         default:
975             g_assert_not_reached();
976         }
977     } else {
978         tcg_gen_movi_tl(cpu_npc, dc->npc);
979     }
980 }
981 
982 static void update_psr(DisasContext *dc)
983 {
984     if (dc->cc_op != CC_OP_FLAGS) {
985         dc->cc_op = CC_OP_FLAGS;
986         gen_helper_compute_psr(tcg_env);
987     }
988 }
989 
990 static void save_state(DisasContext *dc)
991 {
992     tcg_gen_movi_tl(cpu_pc, dc->pc);
993     save_npc(dc);
994 }
995 
996 static void gen_exception(DisasContext *dc, int which)
997 {
998     save_state(dc);
999     gen_helper_raise_exception(tcg_env, tcg_constant_i32(which));
1000     dc->base.is_jmp = DISAS_NORETURN;
1001 }
1002 
1003 static TCGLabel *delay_exceptionv(DisasContext *dc, TCGv_i32 excp)
1004 {
1005     DisasDelayException *e = g_new0(DisasDelayException, 1);
1006 
1007     e->next = dc->delay_excp_list;
1008     dc->delay_excp_list = e;
1009 
1010     e->lab = gen_new_label();
1011     e->excp = excp;
1012     e->pc = dc->pc;
1013     /* Caller must have used flush_cond before branch. */
1014     assert(e->npc != JUMP_PC);
1015     e->npc = dc->npc;
1016 
1017     return e->lab;
1018 }
1019 
1020 static TCGLabel *delay_exception(DisasContext *dc, int excp)
1021 {
1022     return delay_exceptionv(dc, tcg_constant_i32(excp));
1023 }
1024 
1025 static void gen_check_align(DisasContext *dc, TCGv addr, int mask)
1026 {
1027     TCGv t = tcg_temp_new();
1028     TCGLabel *lab;
1029 
1030     tcg_gen_andi_tl(t, addr, mask);
1031 
1032     flush_cond(dc);
1033     lab = delay_exception(dc, TT_UNALIGNED);
1034     tcg_gen_brcondi_tl(TCG_COND_NE, t, 0, lab);
1035 }
1036 
1037 static void gen_mov_pc_npc(DisasContext *dc)
1038 {
1039     if (dc->npc & 3) {
1040         switch (dc->npc) {
1041         case JUMP_PC:
1042             gen_generic_branch(dc);
1043             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1044             dc->pc = DYNAMIC_PC_LOOKUP;
1045             break;
1046         case DYNAMIC_PC:
1047         case DYNAMIC_PC_LOOKUP:
1048             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1049             dc->pc = dc->npc;
1050             break;
1051         default:
1052             g_assert_not_reached();
1053         }
1054     } else {
1055         dc->pc = dc->npc;
1056     }
1057 }
1058 
1059 static void gen_op_next_insn(void)
1060 {
1061     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1062     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1063 }
1064 
1065 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1066                         DisasContext *dc)
1067 {
1068     static int subcc_cond[16] = {
1069         TCG_COND_NEVER,
1070         TCG_COND_EQ,
1071         TCG_COND_LE,
1072         TCG_COND_LT,
1073         TCG_COND_LEU,
1074         TCG_COND_LTU,
1075         -1, /* neg */
1076         -1, /* overflow */
1077         TCG_COND_ALWAYS,
1078         TCG_COND_NE,
1079         TCG_COND_GT,
1080         TCG_COND_GE,
1081         TCG_COND_GTU,
1082         TCG_COND_GEU,
1083         -1, /* pos */
1084         -1, /* no overflow */
1085     };
1086 
1087     static int logic_cond[16] = {
1088         TCG_COND_NEVER,
1089         TCG_COND_EQ,     /* eq:  Z */
1090         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1091         TCG_COND_LT,     /* lt:  N ^ V -> N */
1092         TCG_COND_EQ,     /* leu: C | Z -> Z */
1093         TCG_COND_NEVER,  /* ltu: C -> 0 */
1094         TCG_COND_LT,     /* neg: N */
1095         TCG_COND_NEVER,  /* vs:  V -> 0 */
1096         TCG_COND_ALWAYS,
1097         TCG_COND_NE,     /* ne:  !Z */
1098         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1099         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1100         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1101         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1102         TCG_COND_GE,     /* pos: !N */
1103         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1104     };
1105 
1106     TCGv_i32 r_src;
1107     TCGv r_dst;
1108 
1109 #ifdef TARGET_SPARC64
1110     if (xcc) {
1111         r_src = cpu_xcc;
1112     } else {
1113         r_src = cpu_psr;
1114     }
1115 #else
1116     r_src = cpu_psr;
1117 #endif
1118 
1119     switch (dc->cc_op) {
1120     case CC_OP_LOGIC:
1121         cmp->cond = logic_cond[cond];
1122     do_compare_dst_0:
1123         cmp->is_bool = false;
1124         cmp->c2 = tcg_constant_tl(0);
1125 #ifdef TARGET_SPARC64
1126         if (!xcc) {
1127             cmp->c1 = tcg_temp_new();
1128             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1129             break;
1130         }
1131 #endif
1132         cmp->c1 = cpu_cc_dst;
1133         break;
1134 
1135     case CC_OP_SUB:
1136         switch (cond) {
1137         case 6:  /* neg */
1138         case 14: /* pos */
1139             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1140             goto do_compare_dst_0;
1141 
1142         case 7: /* overflow */
1143         case 15: /* !overflow */
1144             goto do_dynamic;
1145 
1146         default:
1147             cmp->cond = subcc_cond[cond];
1148             cmp->is_bool = false;
1149 #ifdef TARGET_SPARC64
1150             if (!xcc) {
1151                 /* Note that sign-extension works for unsigned compares as
1152                    long as both operands are sign-extended.  */
1153                 cmp->c1 = tcg_temp_new();
1154                 cmp->c2 = tcg_temp_new();
1155                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1156                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1157                 break;
1158             }
1159 #endif
1160             cmp->c1 = cpu_cc_src;
1161             cmp->c2 = cpu_cc_src2;
1162             break;
1163         }
1164         break;
1165 
1166     default:
1167     do_dynamic:
1168         gen_helper_compute_psr(tcg_env);
1169         dc->cc_op = CC_OP_FLAGS;
1170         /* FALLTHRU */
1171 
1172     case CC_OP_FLAGS:
1173         /* We're going to generate a boolean result.  */
1174         cmp->cond = TCG_COND_NE;
1175         cmp->is_bool = true;
1176         cmp->c1 = r_dst = tcg_temp_new();
1177         cmp->c2 = tcg_constant_tl(0);
1178 
1179         switch (cond) {
1180         case 0x0:
1181             gen_op_eval_bn(r_dst);
1182             break;
1183         case 0x1:
1184             gen_op_eval_be(r_dst, r_src);
1185             break;
1186         case 0x2:
1187             gen_op_eval_ble(r_dst, r_src);
1188             break;
1189         case 0x3:
1190             gen_op_eval_bl(r_dst, r_src);
1191             break;
1192         case 0x4:
1193             gen_op_eval_bleu(r_dst, r_src);
1194             break;
1195         case 0x5:
1196             gen_op_eval_bcs(r_dst, r_src);
1197             break;
1198         case 0x6:
1199             gen_op_eval_bneg(r_dst, r_src);
1200             break;
1201         case 0x7:
1202             gen_op_eval_bvs(r_dst, r_src);
1203             break;
1204         case 0x8:
1205             gen_op_eval_ba(r_dst);
1206             break;
1207         case 0x9:
1208             gen_op_eval_bne(r_dst, r_src);
1209             break;
1210         case 0xa:
1211             gen_op_eval_bg(r_dst, r_src);
1212             break;
1213         case 0xb:
1214             gen_op_eval_bge(r_dst, r_src);
1215             break;
1216         case 0xc:
1217             gen_op_eval_bgu(r_dst, r_src);
1218             break;
1219         case 0xd:
1220             gen_op_eval_bcc(r_dst, r_src);
1221             break;
1222         case 0xe:
1223             gen_op_eval_bpos(r_dst, r_src);
1224             break;
1225         case 0xf:
1226             gen_op_eval_bvc(r_dst, r_src);
1227             break;
1228         }
1229         break;
1230     }
1231 }
1232 
1233 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1234 {
1235     unsigned int offset;
1236     TCGv r_dst;
1237 
1238     /* For now we still generate a straight boolean result.  */
1239     cmp->cond = TCG_COND_NE;
1240     cmp->is_bool = true;
1241     cmp->c1 = r_dst = tcg_temp_new();
1242     cmp->c2 = tcg_constant_tl(0);
1243 
1244     switch (cc) {
1245     default:
1246     case 0x0:
1247         offset = 0;
1248         break;
1249     case 0x1:
1250         offset = 32 - 10;
1251         break;
1252     case 0x2:
1253         offset = 34 - 10;
1254         break;
1255     case 0x3:
1256         offset = 36 - 10;
1257         break;
1258     }
1259 
1260     switch (cond) {
1261     case 0x0:
1262         gen_op_eval_bn(r_dst);
1263         break;
1264     case 0x1:
1265         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1266         break;
1267     case 0x2:
1268         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1269         break;
1270     case 0x3:
1271         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1272         break;
1273     case 0x4:
1274         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1275         break;
1276     case 0x5:
1277         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1278         break;
1279     case 0x6:
1280         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1281         break;
1282     case 0x7:
1283         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1284         break;
1285     case 0x8:
1286         gen_op_eval_ba(r_dst);
1287         break;
1288     case 0x9:
1289         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1290         break;
1291     case 0xa:
1292         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1293         break;
1294     case 0xb:
1295         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1296         break;
1297     case 0xc:
1298         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1299         break;
1300     case 0xd:
1301         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1302         break;
1303     case 0xe:
1304         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1305         break;
1306     case 0xf:
1307         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1308         break;
1309     }
1310 }
1311 
1312 static void gen_cond(TCGv r_dst, unsigned int cc, unsigned int cond,
1313                      DisasContext *dc)
1314 {
1315     DisasCompare cmp;
1316     gen_compare(&cmp, cc, cond, dc);
1317 
1318     /* The interface is to return a boolean in r_dst.  */
1319     if (cmp.is_bool) {
1320         tcg_gen_mov_tl(r_dst, cmp.c1);
1321     } else {
1322         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1323     }
1324 }
1325 
1326 static void gen_fcond(TCGv r_dst, unsigned int cc, unsigned int cond)
1327 {
1328     DisasCompare cmp;
1329     gen_fcompare(&cmp, cc, cond);
1330 
1331     /* The interface is to return a boolean in r_dst.  */
1332     if (cmp.is_bool) {
1333         tcg_gen_mov_tl(r_dst, cmp.c1);
1334     } else {
1335         tcg_gen_setcond_tl(cmp.cond, r_dst, cmp.c1, cmp.c2);
1336     }
1337 }
1338 
1339 // Inverted logic
1340 static const TCGCond gen_tcg_cond_reg[8] = {
1341     TCG_COND_NEVER,  /* reserved */
1342     TCG_COND_NE,
1343     TCG_COND_GT,
1344     TCG_COND_GE,
1345     TCG_COND_NEVER,  /* reserved */
1346     TCG_COND_EQ,
1347     TCG_COND_LE,
1348     TCG_COND_LT,
1349 };
1350 
1351 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1352 {
1353     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1354     cmp->is_bool = false;
1355     cmp->c1 = r_src;
1356     cmp->c2 = tcg_constant_tl(0);
1357 }
1358 
1359 static void do_fbranch(DisasContext *dc, int32_t offset, uint32_t insn, int cc)
1360 {
1361     unsigned int cond = GET_FIELD(insn, 3, 6), a = (insn & (1 << 29));
1362     target_ulong target = dc->pc + offset;
1363 
1364     if (unlikely(AM_CHECK(dc))) {
1365         target &= 0xffffffffULL;
1366     }
1367     if (cond == 0x0) {
1368         /* unconditional not taken */
1369         if (a) {
1370             dc->pc = dc->npc + 4;
1371             dc->npc = dc->pc + 4;
1372         } else {
1373             dc->pc = dc->npc;
1374             dc->npc = dc->pc + 4;
1375         }
1376     } else if (cond == 0x8) {
1377         /* unconditional taken */
1378         if (a) {
1379             dc->pc = target;
1380             dc->npc = dc->pc + 4;
1381         } else {
1382             dc->pc = dc->npc;
1383             dc->npc = target;
1384             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1385         }
1386     } else {
1387         flush_cond(dc);
1388         gen_fcond(cpu_cond, cc, cond);
1389         if (a) {
1390             gen_branch_a(dc, target);
1391         } else {
1392             gen_branch_n(dc, target);
1393         }
1394     }
1395 }
1396 
1397 #ifdef TARGET_SPARC64
1398 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1399 {
1400     switch (fccno) {
1401     case 0:
1402         gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1403         break;
1404     case 1:
1405         gen_helper_fcmps_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1406         break;
1407     case 2:
1408         gen_helper_fcmps_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1409         break;
1410     case 3:
1411         gen_helper_fcmps_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1412         break;
1413     }
1414 }
1415 
1416 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1417 {
1418     switch (fccno) {
1419     case 0:
1420         gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1421         break;
1422     case 1:
1423         gen_helper_fcmpd_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1424         break;
1425     case 2:
1426         gen_helper_fcmpd_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1427         break;
1428     case 3:
1429         gen_helper_fcmpd_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1430         break;
1431     }
1432 }
1433 
1434 static void gen_op_fcmpq(int fccno)
1435 {
1436     switch (fccno) {
1437     case 0:
1438         gen_helper_fcmpq(cpu_fsr, tcg_env);
1439         break;
1440     case 1:
1441         gen_helper_fcmpq_fcc1(cpu_fsr, tcg_env);
1442         break;
1443     case 2:
1444         gen_helper_fcmpq_fcc2(cpu_fsr, tcg_env);
1445         break;
1446     case 3:
1447         gen_helper_fcmpq_fcc3(cpu_fsr, tcg_env);
1448         break;
1449     }
1450 }
1451 
1452 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1453 {
1454     switch (fccno) {
1455     case 0:
1456         gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1457         break;
1458     case 1:
1459         gen_helper_fcmpes_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1460         break;
1461     case 2:
1462         gen_helper_fcmpes_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1463         break;
1464     case 3:
1465         gen_helper_fcmpes_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1466         break;
1467     }
1468 }
1469 
1470 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1471 {
1472     switch (fccno) {
1473     case 0:
1474         gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1475         break;
1476     case 1:
1477         gen_helper_fcmped_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1478         break;
1479     case 2:
1480         gen_helper_fcmped_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1481         break;
1482     case 3:
1483         gen_helper_fcmped_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1484         break;
1485     }
1486 }
1487 
1488 static void gen_op_fcmpeq(int fccno)
1489 {
1490     switch (fccno) {
1491     case 0:
1492         gen_helper_fcmpeq(cpu_fsr, tcg_env);
1493         break;
1494     case 1:
1495         gen_helper_fcmpeq_fcc1(cpu_fsr, tcg_env);
1496         break;
1497     case 2:
1498         gen_helper_fcmpeq_fcc2(cpu_fsr, tcg_env);
1499         break;
1500     case 3:
1501         gen_helper_fcmpeq_fcc3(cpu_fsr, tcg_env);
1502         break;
1503     }
1504 }
1505 
1506 #else
1507 
1508 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1509 {
1510     gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1511 }
1512 
1513 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1514 {
1515     gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1516 }
1517 
1518 static void gen_op_fcmpq(int fccno)
1519 {
1520     gen_helper_fcmpq(cpu_fsr, tcg_env);
1521 }
1522 
1523 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1524 {
1525     gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1526 }
1527 
1528 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1529 {
1530     gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1531 }
1532 
1533 static void gen_op_fcmpeq(int fccno)
1534 {
1535     gen_helper_fcmpeq(cpu_fsr, tcg_env);
1536 }
1537 #endif
1538 
1539 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1540 {
1541     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1542     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1543     gen_exception(dc, TT_FP_EXCP);
1544 }
1545 
1546 static int gen_trap_ifnofpu(DisasContext *dc)
1547 {
1548 #if !defined(CONFIG_USER_ONLY)
1549     if (!dc->fpu_enabled) {
1550         gen_exception(dc, TT_NFPU_INSN);
1551         return 1;
1552     }
1553 #endif
1554     return 0;
1555 }
1556 
1557 static void gen_op_clear_ieee_excp_and_FTT(void)
1558 {
1559     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1560 }
1561 
1562 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1563                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1564 {
1565     TCGv_i32 dst, src;
1566 
1567     src = gen_load_fpr_F(dc, rs);
1568     dst = gen_dest_fpr_F(dc);
1569 
1570     gen(dst, tcg_env, src);
1571     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1572 
1573     gen_store_fpr_F(dc, rd, dst);
1574 }
1575 
1576 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1577                           void (*gen)(TCGv_i32, TCGv_i32))
1578 {
1579     TCGv_i32 dst, src;
1580 
1581     src = gen_load_fpr_F(dc, rs);
1582     dst = gen_dest_fpr_F(dc);
1583 
1584     gen(dst, src);
1585 
1586     gen_store_fpr_F(dc, rd, dst);
1587 }
1588 
1589 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1590                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1591 {
1592     TCGv_i32 dst, src1, src2;
1593 
1594     src1 = gen_load_fpr_F(dc, rs1);
1595     src2 = gen_load_fpr_F(dc, rs2);
1596     dst = gen_dest_fpr_F(dc);
1597 
1598     gen(dst, tcg_env, src1, src2);
1599     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1600 
1601     gen_store_fpr_F(dc, rd, dst);
1602 }
1603 
1604 #ifdef TARGET_SPARC64
1605 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1606                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1607 {
1608     TCGv_i32 dst, src1, src2;
1609 
1610     src1 = gen_load_fpr_F(dc, rs1);
1611     src2 = gen_load_fpr_F(dc, rs2);
1612     dst = gen_dest_fpr_F(dc);
1613 
1614     gen(dst, src1, src2);
1615 
1616     gen_store_fpr_F(dc, rd, dst);
1617 }
1618 #endif
1619 
1620 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1621                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1622 {
1623     TCGv_i64 dst, src;
1624 
1625     src = gen_load_fpr_D(dc, rs);
1626     dst = gen_dest_fpr_D(dc, rd);
1627 
1628     gen(dst, tcg_env, src);
1629     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1630 
1631     gen_store_fpr_D(dc, rd, dst);
1632 }
1633 
1634 #ifdef TARGET_SPARC64
1635 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1636                           void (*gen)(TCGv_i64, TCGv_i64))
1637 {
1638     TCGv_i64 dst, src;
1639 
1640     src = gen_load_fpr_D(dc, rs);
1641     dst = gen_dest_fpr_D(dc, rd);
1642 
1643     gen(dst, src);
1644 
1645     gen_store_fpr_D(dc, rd, dst);
1646 }
1647 #endif
1648 
1649 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1650                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1651 {
1652     TCGv_i64 dst, src1, src2;
1653 
1654     src1 = gen_load_fpr_D(dc, rs1);
1655     src2 = gen_load_fpr_D(dc, rs2);
1656     dst = gen_dest_fpr_D(dc, rd);
1657 
1658     gen(dst, tcg_env, src1, src2);
1659     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1660 
1661     gen_store_fpr_D(dc, rd, dst);
1662 }
1663 
1664 #ifdef TARGET_SPARC64
1665 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1666                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1667 {
1668     TCGv_i64 dst, src1, src2;
1669 
1670     src1 = gen_load_fpr_D(dc, rs1);
1671     src2 = gen_load_fpr_D(dc, rs2);
1672     dst = gen_dest_fpr_D(dc, rd);
1673 
1674     gen(dst, src1, src2);
1675 
1676     gen_store_fpr_D(dc, rd, dst);
1677 }
1678 
1679 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1680                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1681 {
1682     TCGv_i64 dst, src1, src2;
1683 
1684     src1 = gen_load_fpr_D(dc, rs1);
1685     src2 = gen_load_fpr_D(dc, rs2);
1686     dst = gen_dest_fpr_D(dc, rd);
1687 
1688     gen(dst, cpu_gsr, src1, src2);
1689 
1690     gen_store_fpr_D(dc, rd, dst);
1691 }
1692 
1693 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1694                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1695 {
1696     TCGv_i64 dst, src0, src1, src2;
1697 
1698     src1 = gen_load_fpr_D(dc, rs1);
1699     src2 = gen_load_fpr_D(dc, rs2);
1700     src0 = gen_load_fpr_D(dc, rd);
1701     dst = gen_dest_fpr_D(dc, rd);
1702 
1703     gen(dst, src0, src1, src2);
1704 
1705     gen_store_fpr_D(dc, rd, dst);
1706 }
1707 #endif
1708 
1709 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1710                        void (*gen)(TCGv_ptr))
1711 {
1712     gen_op_load_fpr_QT1(QFPREG(rs));
1713 
1714     gen(tcg_env);
1715     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1716 
1717     gen_op_store_QT0_fpr(QFPREG(rd));
1718     gen_update_fprs_dirty(dc, QFPREG(rd));
1719 }
1720 
1721 #ifdef TARGET_SPARC64
1722 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1723                           void (*gen)(TCGv_ptr))
1724 {
1725     gen_op_load_fpr_QT1(QFPREG(rs));
1726 
1727     gen(tcg_env);
1728 
1729     gen_op_store_QT0_fpr(QFPREG(rd));
1730     gen_update_fprs_dirty(dc, QFPREG(rd));
1731 }
1732 #endif
1733 
1734 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1735                         void (*gen)(TCGv_ptr))
1736 {
1737     gen_op_load_fpr_QT0(QFPREG(rs1));
1738     gen_op_load_fpr_QT1(QFPREG(rs2));
1739 
1740     gen(tcg_env);
1741     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1742 
1743     gen_op_store_QT0_fpr(QFPREG(rd));
1744     gen_update_fprs_dirty(dc, QFPREG(rd));
1745 }
1746 
1747 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1748                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1749 {
1750     TCGv_i64 dst;
1751     TCGv_i32 src1, src2;
1752 
1753     src1 = gen_load_fpr_F(dc, rs1);
1754     src2 = gen_load_fpr_F(dc, rs2);
1755     dst = gen_dest_fpr_D(dc, rd);
1756 
1757     gen(dst, tcg_env, src1, src2);
1758     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1759 
1760     gen_store_fpr_D(dc, rd, dst);
1761 }
1762 
1763 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1764                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1765 {
1766     TCGv_i64 src1, src2;
1767 
1768     src1 = gen_load_fpr_D(dc, rs1);
1769     src2 = gen_load_fpr_D(dc, rs2);
1770 
1771     gen(tcg_env, src1, src2);
1772     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1773 
1774     gen_op_store_QT0_fpr(QFPREG(rd));
1775     gen_update_fprs_dirty(dc, QFPREG(rd));
1776 }
1777 
1778 #ifdef TARGET_SPARC64
1779 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1780                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1781 {
1782     TCGv_i64 dst;
1783     TCGv_i32 src;
1784 
1785     src = gen_load_fpr_F(dc, rs);
1786     dst = gen_dest_fpr_D(dc, rd);
1787 
1788     gen(dst, tcg_env, src);
1789     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1790 
1791     gen_store_fpr_D(dc, rd, dst);
1792 }
1793 #endif
1794 
1795 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1796                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1797 {
1798     TCGv_i64 dst;
1799     TCGv_i32 src;
1800 
1801     src = gen_load_fpr_F(dc, rs);
1802     dst = gen_dest_fpr_D(dc, rd);
1803 
1804     gen(dst, tcg_env, src);
1805 
1806     gen_store_fpr_D(dc, rd, dst);
1807 }
1808 
1809 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1810                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1811 {
1812     TCGv_i32 dst;
1813     TCGv_i64 src;
1814 
1815     src = gen_load_fpr_D(dc, rs);
1816     dst = gen_dest_fpr_F(dc);
1817 
1818     gen(dst, tcg_env, src);
1819     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1820 
1821     gen_store_fpr_F(dc, rd, dst);
1822 }
1823 
1824 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1825                        void (*gen)(TCGv_i32, TCGv_ptr))
1826 {
1827     TCGv_i32 dst;
1828 
1829     gen_op_load_fpr_QT1(QFPREG(rs));
1830     dst = gen_dest_fpr_F(dc);
1831 
1832     gen(dst, tcg_env);
1833     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1834 
1835     gen_store_fpr_F(dc, rd, dst);
1836 }
1837 
1838 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1839                        void (*gen)(TCGv_i64, TCGv_ptr))
1840 {
1841     TCGv_i64 dst;
1842 
1843     gen_op_load_fpr_QT1(QFPREG(rs));
1844     dst = gen_dest_fpr_D(dc, rd);
1845 
1846     gen(dst, tcg_env);
1847     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1848 
1849     gen_store_fpr_D(dc, rd, dst);
1850 }
1851 
1852 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1853                           void (*gen)(TCGv_ptr, TCGv_i32))
1854 {
1855     TCGv_i32 src;
1856 
1857     src = gen_load_fpr_F(dc, rs);
1858 
1859     gen(tcg_env, src);
1860 
1861     gen_op_store_QT0_fpr(QFPREG(rd));
1862     gen_update_fprs_dirty(dc, QFPREG(rd));
1863 }
1864 
1865 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1866                           void (*gen)(TCGv_ptr, TCGv_i64))
1867 {
1868     TCGv_i64 src;
1869 
1870     src = gen_load_fpr_D(dc, rs);
1871 
1872     gen(tcg_env, src);
1873 
1874     gen_op_store_QT0_fpr(QFPREG(rd));
1875     gen_update_fprs_dirty(dc, QFPREG(rd));
1876 }
1877 
1878 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1879                      TCGv addr, int mmu_idx, MemOp memop)
1880 {
1881     gen_address_mask(dc, addr);
1882     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1883 }
1884 
1885 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1886 {
1887     TCGv m1 = tcg_constant_tl(0xff);
1888     gen_address_mask(dc, addr);
1889     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1890 }
1891 
1892 /* asi moves */
1893 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1894 typedef enum {
1895     GET_ASI_HELPER,
1896     GET_ASI_EXCP,
1897     GET_ASI_DIRECT,
1898     GET_ASI_DTWINX,
1899     GET_ASI_BLOCK,
1900     GET_ASI_SHORT,
1901     GET_ASI_BCOPY,
1902     GET_ASI_BFILL,
1903 } ASIType;
1904 
1905 typedef struct {
1906     ASIType type;
1907     int asi;
1908     int mem_idx;
1909     MemOp memop;
1910 } DisasASI;
1911 
1912 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1913 {
1914     int asi = GET_FIELD(insn, 19, 26);
1915     ASIType type = GET_ASI_HELPER;
1916     int mem_idx = dc->mem_idx;
1917 
1918 #ifndef TARGET_SPARC64
1919     /* Before v9, all asis are immediate and privileged.  */
1920     if (IS_IMM) {
1921         gen_exception(dc, TT_ILL_INSN);
1922         type = GET_ASI_EXCP;
1923     } else if (supervisor(dc)
1924                /* Note that LEON accepts ASI_USERDATA in user mode, for
1925                   use with CASA.  Also note that previous versions of
1926                   QEMU allowed (and old versions of gcc emitted) ASI_P
1927                   for LEON, which is incorrect.  */
1928                || (asi == ASI_USERDATA
1929                    && (dc->def->features & CPU_FEATURE_CASA))) {
1930         switch (asi) {
1931         case ASI_USERDATA:   /* User data access */
1932             mem_idx = MMU_USER_IDX;
1933             type = GET_ASI_DIRECT;
1934             break;
1935         case ASI_KERNELDATA: /* Supervisor data access */
1936             mem_idx = MMU_KERNEL_IDX;
1937             type = GET_ASI_DIRECT;
1938             break;
1939         case ASI_M_BYPASS:    /* MMU passthrough */
1940         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1941             mem_idx = MMU_PHYS_IDX;
1942             type = GET_ASI_DIRECT;
1943             break;
1944         case ASI_M_BCOPY: /* Block copy, sta access */
1945             mem_idx = MMU_KERNEL_IDX;
1946             type = GET_ASI_BCOPY;
1947             break;
1948         case ASI_M_BFILL: /* Block fill, stda access */
1949             mem_idx = MMU_KERNEL_IDX;
1950             type = GET_ASI_BFILL;
1951             break;
1952         }
1953 
1954         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1955          * permissions check in get_physical_address(..).
1956          */
1957         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1958     } else {
1959         gen_exception(dc, TT_PRIV_INSN);
1960         type = GET_ASI_EXCP;
1961     }
1962 #else
1963     if (IS_IMM) {
1964         asi = dc->asi;
1965     }
1966     /* With v9, all asis below 0x80 are privileged.  */
1967     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1968        down that bit into DisasContext.  For the moment that's ok,
1969        since the direct implementations below doesn't have any ASIs
1970        in the restricted [0x30, 0x7f] range, and the check will be
1971        done properly in the helper.  */
1972     if (!supervisor(dc) && asi < 0x80) {
1973         gen_exception(dc, TT_PRIV_ACT);
1974         type = GET_ASI_EXCP;
1975     } else {
1976         switch (asi) {
1977         case ASI_REAL:      /* Bypass */
1978         case ASI_REAL_IO:   /* Bypass, non-cacheable */
1979         case ASI_REAL_L:    /* Bypass LE */
1980         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
1981         case ASI_TWINX_REAL:   /* Real address, twinx */
1982         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
1983         case ASI_QUAD_LDD_PHYS:
1984         case ASI_QUAD_LDD_PHYS_L:
1985             mem_idx = MMU_PHYS_IDX;
1986             break;
1987         case ASI_N:  /* Nucleus */
1988         case ASI_NL: /* Nucleus LE */
1989         case ASI_TWINX_N:
1990         case ASI_TWINX_NL:
1991         case ASI_NUCLEUS_QUAD_LDD:
1992         case ASI_NUCLEUS_QUAD_LDD_L:
1993             if (hypervisor(dc)) {
1994                 mem_idx = MMU_PHYS_IDX;
1995             } else {
1996                 mem_idx = MMU_NUCLEUS_IDX;
1997             }
1998             break;
1999         case ASI_AIUP:  /* As if user primary */
2000         case ASI_AIUPL: /* As if user primary LE */
2001         case ASI_TWINX_AIUP:
2002         case ASI_TWINX_AIUP_L:
2003         case ASI_BLK_AIUP_4V:
2004         case ASI_BLK_AIUP_L_4V:
2005         case ASI_BLK_AIUP:
2006         case ASI_BLK_AIUPL:
2007             mem_idx = MMU_USER_IDX;
2008             break;
2009         case ASI_AIUS:  /* As if user secondary */
2010         case ASI_AIUSL: /* As if user secondary LE */
2011         case ASI_TWINX_AIUS:
2012         case ASI_TWINX_AIUS_L:
2013         case ASI_BLK_AIUS_4V:
2014         case ASI_BLK_AIUS_L_4V:
2015         case ASI_BLK_AIUS:
2016         case ASI_BLK_AIUSL:
2017             mem_idx = MMU_USER_SECONDARY_IDX;
2018             break;
2019         case ASI_S:  /* Secondary */
2020         case ASI_SL: /* Secondary LE */
2021         case ASI_TWINX_S:
2022         case ASI_TWINX_SL:
2023         case ASI_BLK_COMMIT_S:
2024         case ASI_BLK_S:
2025         case ASI_BLK_SL:
2026         case ASI_FL8_S:
2027         case ASI_FL8_SL:
2028         case ASI_FL16_S:
2029         case ASI_FL16_SL:
2030             if (mem_idx == MMU_USER_IDX) {
2031                 mem_idx = MMU_USER_SECONDARY_IDX;
2032             } else if (mem_idx == MMU_KERNEL_IDX) {
2033                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
2034             }
2035             break;
2036         case ASI_P:  /* Primary */
2037         case ASI_PL: /* Primary LE */
2038         case ASI_TWINX_P:
2039         case ASI_TWINX_PL:
2040         case ASI_BLK_COMMIT_P:
2041         case ASI_BLK_P:
2042         case ASI_BLK_PL:
2043         case ASI_FL8_P:
2044         case ASI_FL8_PL:
2045         case ASI_FL16_P:
2046         case ASI_FL16_PL:
2047             break;
2048         }
2049         switch (asi) {
2050         case ASI_REAL:
2051         case ASI_REAL_IO:
2052         case ASI_REAL_L:
2053         case ASI_REAL_IO_L:
2054         case ASI_N:
2055         case ASI_NL:
2056         case ASI_AIUP:
2057         case ASI_AIUPL:
2058         case ASI_AIUS:
2059         case ASI_AIUSL:
2060         case ASI_S:
2061         case ASI_SL:
2062         case ASI_P:
2063         case ASI_PL:
2064             type = GET_ASI_DIRECT;
2065             break;
2066         case ASI_TWINX_REAL:
2067         case ASI_TWINX_REAL_L:
2068         case ASI_TWINX_N:
2069         case ASI_TWINX_NL:
2070         case ASI_TWINX_AIUP:
2071         case ASI_TWINX_AIUP_L:
2072         case ASI_TWINX_AIUS:
2073         case ASI_TWINX_AIUS_L:
2074         case ASI_TWINX_P:
2075         case ASI_TWINX_PL:
2076         case ASI_TWINX_S:
2077         case ASI_TWINX_SL:
2078         case ASI_QUAD_LDD_PHYS:
2079         case ASI_QUAD_LDD_PHYS_L:
2080         case ASI_NUCLEUS_QUAD_LDD:
2081         case ASI_NUCLEUS_QUAD_LDD_L:
2082             type = GET_ASI_DTWINX;
2083             break;
2084         case ASI_BLK_COMMIT_P:
2085         case ASI_BLK_COMMIT_S:
2086         case ASI_BLK_AIUP_4V:
2087         case ASI_BLK_AIUP_L_4V:
2088         case ASI_BLK_AIUP:
2089         case ASI_BLK_AIUPL:
2090         case ASI_BLK_AIUS_4V:
2091         case ASI_BLK_AIUS_L_4V:
2092         case ASI_BLK_AIUS:
2093         case ASI_BLK_AIUSL:
2094         case ASI_BLK_S:
2095         case ASI_BLK_SL:
2096         case ASI_BLK_P:
2097         case ASI_BLK_PL:
2098             type = GET_ASI_BLOCK;
2099             break;
2100         case ASI_FL8_S:
2101         case ASI_FL8_SL:
2102         case ASI_FL8_P:
2103         case ASI_FL8_PL:
2104             memop = MO_UB;
2105             type = GET_ASI_SHORT;
2106             break;
2107         case ASI_FL16_S:
2108         case ASI_FL16_SL:
2109         case ASI_FL16_P:
2110         case ASI_FL16_PL:
2111             memop = MO_TEUW;
2112             type = GET_ASI_SHORT;
2113             break;
2114         }
2115         /* The little-endian asis all have bit 3 set.  */
2116         if (asi & 8) {
2117             memop ^= MO_BSWAP;
2118         }
2119     }
2120 #endif
2121 
2122     return (DisasASI){ type, asi, mem_idx, memop };
2123 }
2124 
2125 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2126                        int insn, MemOp memop)
2127 {
2128     DisasASI da = get_asi(dc, insn, memop);
2129 
2130     switch (da.type) {
2131     case GET_ASI_EXCP:
2132         break;
2133     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2134         gen_exception(dc, TT_ILL_INSN);
2135         break;
2136     case GET_ASI_DIRECT:
2137         gen_address_mask(dc, addr);
2138         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2139         break;
2140     default:
2141         {
2142             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2143             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2144 
2145             save_state(dc);
2146 #ifdef TARGET_SPARC64
2147             gen_helper_ld_asi(dst, tcg_env, addr, r_asi, r_mop);
2148 #else
2149             {
2150                 TCGv_i64 t64 = tcg_temp_new_i64();
2151                 gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2152                 tcg_gen_trunc_i64_tl(dst, t64);
2153             }
2154 #endif
2155         }
2156         break;
2157     }
2158 }
2159 
2160 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2161                        int insn, MemOp memop)
2162 {
2163     DisasASI da = get_asi(dc, insn, memop);
2164 
2165     switch (da.type) {
2166     case GET_ASI_EXCP:
2167         break;
2168     case GET_ASI_DTWINX: /* Reserved for stda.  */
2169 #ifndef TARGET_SPARC64
2170         gen_exception(dc, TT_ILL_INSN);
2171         break;
2172 #else
2173         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2174             /* Pre OpenSPARC CPUs don't have these */
2175             gen_exception(dc, TT_ILL_INSN);
2176             return;
2177         }
2178         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2179          * are ST_BLKINIT_ ASIs */
2180 #endif
2181         /* fall through */
2182     case GET_ASI_DIRECT:
2183         gen_address_mask(dc, addr);
2184         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2185         break;
2186 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2187     case GET_ASI_BCOPY:
2188         /* Copy 32 bytes from the address in SRC to ADDR.  */
2189         /* ??? The original qemu code suggests 4-byte alignment, dropping
2190            the low bits, but the only place I can see this used is in the
2191            Linux kernel with 32 byte alignment, which would make more sense
2192            as a cacheline-style operation.  */
2193         {
2194             TCGv saddr = tcg_temp_new();
2195             TCGv daddr = tcg_temp_new();
2196             TCGv four = tcg_constant_tl(4);
2197             TCGv_i32 tmp = tcg_temp_new_i32();
2198             int i;
2199 
2200             tcg_gen_andi_tl(saddr, src, -4);
2201             tcg_gen_andi_tl(daddr, addr, -4);
2202             for (i = 0; i < 32; i += 4) {
2203                 /* Since the loads and stores are paired, allow the
2204                    copy to happen in the host endianness.  */
2205                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2206                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2207                 tcg_gen_add_tl(saddr, saddr, four);
2208                 tcg_gen_add_tl(daddr, daddr, four);
2209             }
2210         }
2211         break;
2212 #endif
2213     default:
2214         {
2215             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2216             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2217 
2218             save_state(dc);
2219 #ifdef TARGET_SPARC64
2220             gen_helper_st_asi(tcg_env, addr, src, r_asi, r_mop);
2221 #else
2222             {
2223                 TCGv_i64 t64 = tcg_temp_new_i64();
2224                 tcg_gen_extu_tl_i64(t64, src);
2225                 gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2226             }
2227 #endif
2228 
2229             /* A write to a TLB register may alter page maps.  End the TB. */
2230             dc->npc = DYNAMIC_PC;
2231         }
2232         break;
2233     }
2234 }
2235 
2236 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2237                          TCGv addr, int insn)
2238 {
2239     DisasASI da = get_asi(dc, insn, MO_TEUL);
2240 
2241     switch (da.type) {
2242     case GET_ASI_EXCP:
2243         break;
2244     case GET_ASI_DIRECT:
2245         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2246         break;
2247     default:
2248         /* ??? Should be DAE_invalid_asi.  */
2249         gen_exception(dc, TT_DATA_ACCESS);
2250         break;
2251     }
2252 }
2253 
2254 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2255                         int insn, int rd)
2256 {
2257     DisasASI da = get_asi(dc, insn, MO_TEUL);
2258     TCGv oldv;
2259 
2260     switch (da.type) {
2261     case GET_ASI_EXCP:
2262         return;
2263     case GET_ASI_DIRECT:
2264         oldv = tcg_temp_new();
2265         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2266                                   da.mem_idx, da.memop | MO_ALIGN);
2267         gen_store_gpr(dc, rd, oldv);
2268         break;
2269     default:
2270         /* ??? Should be DAE_invalid_asi.  */
2271         gen_exception(dc, TT_DATA_ACCESS);
2272         break;
2273     }
2274 }
2275 
2276 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2277 {
2278     DisasASI da = get_asi(dc, insn, MO_UB);
2279 
2280     switch (da.type) {
2281     case GET_ASI_EXCP:
2282         break;
2283     case GET_ASI_DIRECT:
2284         gen_ldstub(dc, dst, addr, da.mem_idx);
2285         break;
2286     default:
2287         /* ??? In theory, this should be raise DAE_invalid_asi.
2288            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2289         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2290             gen_helper_exit_atomic(tcg_env);
2291         } else {
2292             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2293             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2294             TCGv_i64 s64, t64;
2295 
2296             save_state(dc);
2297             t64 = tcg_temp_new_i64();
2298             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2299 
2300             s64 = tcg_constant_i64(0xff);
2301             gen_helper_st_asi(tcg_env, addr, s64, r_asi, r_mop);
2302 
2303             tcg_gen_trunc_i64_tl(dst, t64);
2304 
2305             /* End the TB.  */
2306             dc->npc = DYNAMIC_PC;
2307         }
2308         break;
2309     }
2310 }
2311 #endif
2312 
2313 #ifdef TARGET_SPARC64
2314 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2315                         int insn, int size, int rd)
2316 {
2317     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2318     TCGv_i32 d32;
2319     TCGv_i64 d64;
2320 
2321     switch (da.type) {
2322     case GET_ASI_EXCP:
2323         break;
2324 
2325     case GET_ASI_DIRECT:
2326         gen_address_mask(dc, addr);
2327         switch (size) {
2328         case 4:
2329             d32 = gen_dest_fpr_F(dc);
2330             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2331             gen_store_fpr_F(dc, rd, d32);
2332             break;
2333         case 8:
2334             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2335                                 da.memop | MO_ALIGN_4);
2336             break;
2337         case 16:
2338             d64 = tcg_temp_new_i64();
2339             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2340             tcg_gen_addi_tl(addr, addr, 8);
2341             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2342                                 da.memop | MO_ALIGN_4);
2343             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2344             break;
2345         default:
2346             g_assert_not_reached();
2347         }
2348         break;
2349 
2350     case GET_ASI_BLOCK:
2351         /* Valid for lddfa on aligned registers only.  */
2352         if (size == 8 && (rd & 7) == 0) {
2353             MemOp memop;
2354             TCGv eight;
2355             int i;
2356 
2357             gen_address_mask(dc, addr);
2358 
2359             /* The first operation checks required alignment.  */
2360             memop = da.memop | MO_ALIGN_64;
2361             eight = tcg_constant_tl(8);
2362             for (i = 0; ; ++i) {
2363                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2364                                     da.mem_idx, memop);
2365                 if (i == 7) {
2366                     break;
2367                 }
2368                 tcg_gen_add_tl(addr, addr, eight);
2369                 memop = da.memop;
2370             }
2371         } else {
2372             gen_exception(dc, TT_ILL_INSN);
2373         }
2374         break;
2375 
2376     case GET_ASI_SHORT:
2377         /* Valid for lddfa only.  */
2378         if (size == 8) {
2379             gen_address_mask(dc, addr);
2380             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2381                                 da.memop | MO_ALIGN);
2382         } else {
2383             gen_exception(dc, TT_ILL_INSN);
2384         }
2385         break;
2386 
2387     default:
2388         {
2389             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2390             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2391 
2392             save_state(dc);
2393             /* According to the table in the UA2011 manual, the only
2394                other asis that are valid for ldfa/lddfa/ldqfa are
2395                the NO_FAULT asis.  We still need a helper for these,
2396                but we can just use the integer asi helper for them.  */
2397             switch (size) {
2398             case 4:
2399                 d64 = tcg_temp_new_i64();
2400                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2401                 d32 = gen_dest_fpr_F(dc);
2402                 tcg_gen_extrl_i64_i32(d32, d64);
2403                 gen_store_fpr_F(dc, rd, d32);
2404                 break;
2405             case 8:
2406                 gen_helper_ld_asi(cpu_fpr[rd / 2], tcg_env, addr, r_asi, r_mop);
2407                 break;
2408             case 16:
2409                 d64 = tcg_temp_new_i64();
2410                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2411                 tcg_gen_addi_tl(addr, addr, 8);
2412                 gen_helper_ld_asi(cpu_fpr[rd/2+1], tcg_env, addr, r_asi, r_mop);
2413                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2414                 break;
2415             default:
2416                 g_assert_not_reached();
2417             }
2418         }
2419         break;
2420     }
2421 }
2422 
2423 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2424                         int insn, int size, int rd)
2425 {
2426     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2427     TCGv_i32 d32;
2428 
2429     switch (da.type) {
2430     case GET_ASI_EXCP:
2431         break;
2432 
2433     case GET_ASI_DIRECT:
2434         gen_address_mask(dc, addr);
2435         switch (size) {
2436         case 4:
2437             d32 = gen_load_fpr_F(dc, rd);
2438             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2439             break;
2440         case 8:
2441             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2442                                 da.memop | MO_ALIGN_4);
2443             break;
2444         case 16:
2445             /* Only 4-byte alignment required.  However, it is legal for the
2446                cpu to signal the alignment fault, and the OS trap handler is
2447                required to fix it up.  Requiring 16-byte alignment here avoids
2448                having to probe the second page before performing the first
2449                write.  */
2450             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2451                                 da.memop | MO_ALIGN_16);
2452             tcg_gen_addi_tl(addr, addr, 8);
2453             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2454             break;
2455         default:
2456             g_assert_not_reached();
2457         }
2458         break;
2459 
2460     case GET_ASI_BLOCK:
2461         /* Valid for stdfa on aligned registers only.  */
2462         if (size == 8 && (rd & 7) == 0) {
2463             MemOp memop;
2464             TCGv eight;
2465             int i;
2466 
2467             gen_address_mask(dc, addr);
2468 
2469             /* The first operation checks required alignment.  */
2470             memop = da.memop | MO_ALIGN_64;
2471             eight = tcg_constant_tl(8);
2472             for (i = 0; ; ++i) {
2473                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2474                                     da.mem_idx, memop);
2475                 if (i == 7) {
2476                     break;
2477                 }
2478                 tcg_gen_add_tl(addr, addr, eight);
2479                 memop = da.memop;
2480             }
2481         } else {
2482             gen_exception(dc, TT_ILL_INSN);
2483         }
2484         break;
2485 
2486     case GET_ASI_SHORT:
2487         /* Valid for stdfa only.  */
2488         if (size == 8) {
2489             gen_address_mask(dc, addr);
2490             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2491                                 da.memop | MO_ALIGN);
2492         } else {
2493             gen_exception(dc, TT_ILL_INSN);
2494         }
2495         break;
2496 
2497     default:
2498         /* According to the table in the UA2011 manual, the only
2499            other asis that are valid for ldfa/lddfa/ldqfa are
2500            the PST* asis, which aren't currently handled.  */
2501         gen_exception(dc, TT_ILL_INSN);
2502         break;
2503     }
2504 }
2505 
2506 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2507 {
2508     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2509     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2510     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2511 
2512     switch (da.type) {
2513     case GET_ASI_EXCP:
2514         return;
2515 
2516     case GET_ASI_DTWINX:
2517         gen_address_mask(dc, addr);
2518         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2519         tcg_gen_addi_tl(addr, addr, 8);
2520         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2521         break;
2522 
2523     case GET_ASI_DIRECT:
2524         {
2525             TCGv_i64 tmp = tcg_temp_new_i64();
2526 
2527             gen_address_mask(dc, addr);
2528             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2529 
2530             /* Note that LE ldda acts as if each 32-bit register
2531                result is byte swapped.  Having just performed one
2532                64-bit bswap, we need now to swap the writebacks.  */
2533             if ((da.memop & MO_BSWAP) == MO_TE) {
2534                 tcg_gen_extr32_i64(lo, hi, tmp);
2535             } else {
2536                 tcg_gen_extr32_i64(hi, lo, tmp);
2537             }
2538         }
2539         break;
2540 
2541     default:
2542         /* ??? In theory we've handled all of the ASIs that are valid
2543            for ldda, and this should raise DAE_invalid_asi.  However,
2544            real hardware allows others.  This can be seen with e.g.
2545            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2546         {
2547             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2548             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2549             TCGv_i64 tmp = tcg_temp_new_i64();
2550 
2551             save_state(dc);
2552             gen_helper_ld_asi(tmp, tcg_env, addr, r_asi, r_mop);
2553 
2554             /* See above.  */
2555             if ((da.memop & MO_BSWAP) == MO_TE) {
2556                 tcg_gen_extr32_i64(lo, hi, tmp);
2557             } else {
2558                 tcg_gen_extr32_i64(hi, lo, tmp);
2559             }
2560         }
2561         break;
2562     }
2563 
2564     gen_store_gpr(dc, rd, hi);
2565     gen_store_gpr(dc, rd + 1, lo);
2566 }
2567 
2568 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2569                          int insn, int rd)
2570 {
2571     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2572     TCGv lo = gen_load_gpr(dc, rd + 1);
2573 
2574     switch (da.type) {
2575     case GET_ASI_EXCP:
2576         break;
2577 
2578     case GET_ASI_DTWINX:
2579         gen_address_mask(dc, addr);
2580         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2581         tcg_gen_addi_tl(addr, addr, 8);
2582         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2583         break;
2584 
2585     case GET_ASI_DIRECT:
2586         {
2587             TCGv_i64 t64 = tcg_temp_new_i64();
2588 
2589             /* Note that LE stda acts as if each 32-bit register result is
2590                byte swapped.  We will perform one 64-bit LE store, so now
2591                we must swap the order of the construction.  */
2592             if ((da.memop & MO_BSWAP) == MO_TE) {
2593                 tcg_gen_concat32_i64(t64, lo, hi);
2594             } else {
2595                 tcg_gen_concat32_i64(t64, hi, lo);
2596             }
2597             gen_address_mask(dc, addr);
2598             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2599         }
2600         break;
2601 
2602     default:
2603         /* ??? In theory we've handled all of the ASIs that are valid
2604            for stda, and this should raise DAE_invalid_asi.  */
2605         {
2606             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2607             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2608             TCGv_i64 t64 = tcg_temp_new_i64();
2609 
2610             /* See above.  */
2611             if ((da.memop & MO_BSWAP) == MO_TE) {
2612                 tcg_gen_concat32_i64(t64, lo, hi);
2613             } else {
2614                 tcg_gen_concat32_i64(t64, hi, lo);
2615             }
2616 
2617             save_state(dc);
2618             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2619         }
2620         break;
2621     }
2622 }
2623 
2624 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2625                          int insn, int rd)
2626 {
2627     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2628     TCGv oldv;
2629 
2630     switch (da.type) {
2631     case GET_ASI_EXCP:
2632         return;
2633     case GET_ASI_DIRECT:
2634         oldv = tcg_temp_new();
2635         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2636                                   da.mem_idx, da.memop | MO_ALIGN);
2637         gen_store_gpr(dc, rd, oldv);
2638         break;
2639     default:
2640         /* ??? Should be DAE_invalid_asi.  */
2641         gen_exception(dc, TT_DATA_ACCESS);
2642         break;
2643     }
2644 }
2645 
2646 #elif !defined(CONFIG_USER_ONLY)
2647 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2648 {
2649     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2650        whereby "rd + 1" elicits "error: array subscript is above array".
2651        Since we have already asserted that rd is even, the semantics
2652        are unchanged.  */
2653     TCGv lo = gen_dest_gpr(dc, rd | 1);
2654     TCGv hi = gen_dest_gpr(dc, rd);
2655     TCGv_i64 t64 = tcg_temp_new_i64();
2656     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2657 
2658     switch (da.type) {
2659     case GET_ASI_EXCP:
2660         return;
2661     case GET_ASI_DIRECT:
2662         gen_address_mask(dc, addr);
2663         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2664         break;
2665     default:
2666         {
2667             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2668             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2669 
2670             save_state(dc);
2671             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2672         }
2673         break;
2674     }
2675 
2676     tcg_gen_extr_i64_i32(lo, hi, t64);
2677     gen_store_gpr(dc, rd | 1, lo);
2678     gen_store_gpr(dc, rd, hi);
2679 }
2680 
2681 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2682                          int insn, int rd)
2683 {
2684     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2685     TCGv lo = gen_load_gpr(dc, rd + 1);
2686     TCGv_i64 t64 = tcg_temp_new_i64();
2687 
2688     tcg_gen_concat_tl_i64(t64, lo, hi);
2689 
2690     switch (da.type) {
2691     case GET_ASI_EXCP:
2692         break;
2693     case GET_ASI_DIRECT:
2694         gen_address_mask(dc, addr);
2695         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2696         break;
2697     case GET_ASI_BFILL:
2698         /* Store 32 bytes of T64 to ADDR.  */
2699         /* ??? The original qemu code suggests 8-byte alignment, dropping
2700            the low bits, but the only place I can see this used is in the
2701            Linux kernel with 32 byte alignment, which would make more sense
2702            as a cacheline-style operation.  */
2703         {
2704             TCGv d_addr = tcg_temp_new();
2705             TCGv eight = tcg_constant_tl(8);
2706             int i;
2707 
2708             tcg_gen_andi_tl(d_addr, addr, -8);
2709             for (i = 0; i < 32; i += 8) {
2710                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2711                 tcg_gen_add_tl(d_addr, d_addr, eight);
2712             }
2713         }
2714         break;
2715     default:
2716         {
2717             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2718             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2719 
2720             save_state(dc);
2721             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2722         }
2723         break;
2724     }
2725 }
2726 #endif
2727 
2728 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2729 {
2730     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2731     return gen_load_gpr(dc, rs1);
2732 }
2733 
2734 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2735 {
2736     if (IS_IMM) { /* immediate */
2737         target_long simm = GET_FIELDs(insn, 19, 31);
2738         TCGv t = tcg_temp_new();
2739         tcg_gen_movi_tl(t, simm);
2740         return t;
2741     } else {      /* register */
2742         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2743         return gen_load_gpr(dc, rs2);
2744     }
2745 }
2746 
2747 #ifdef TARGET_SPARC64
2748 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2749 {
2750     TCGv_i32 c32, zero, dst, s1, s2;
2751 
2752     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2753        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2754        the later.  */
2755     c32 = tcg_temp_new_i32();
2756     if (cmp->is_bool) {
2757         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2758     } else {
2759         TCGv_i64 c64 = tcg_temp_new_i64();
2760         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2761         tcg_gen_extrl_i64_i32(c32, c64);
2762     }
2763 
2764     s1 = gen_load_fpr_F(dc, rs);
2765     s2 = gen_load_fpr_F(dc, rd);
2766     dst = gen_dest_fpr_F(dc);
2767     zero = tcg_constant_i32(0);
2768 
2769     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2770 
2771     gen_store_fpr_F(dc, rd, dst);
2772 }
2773 
2774 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2775 {
2776     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2777     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2778                         gen_load_fpr_D(dc, rs),
2779                         gen_load_fpr_D(dc, rd));
2780     gen_store_fpr_D(dc, rd, dst);
2781 }
2782 
2783 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2784 {
2785     int qd = QFPREG(rd);
2786     int qs = QFPREG(rs);
2787 
2788     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2789                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2790     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2791                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2792 
2793     gen_update_fprs_dirty(dc, qd);
2794 }
2795 
2796 #ifndef CONFIG_USER_ONLY
2797 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env tcg_env)
2798 {
2799     TCGv_i32 r_tl = tcg_temp_new_i32();
2800 
2801     /* load env->tl into r_tl */
2802     tcg_gen_ld_i32(r_tl, tcg_env, offsetof(CPUSPARCState, tl));
2803 
2804     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2805     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2806 
2807     /* calculate offset to current trap state from env->ts, reuse r_tl */
2808     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2809     tcg_gen_addi_ptr(r_tsptr, tcg_env, offsetof(CPUSPARCState, ts));
2810 
2811     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2812     {
2813         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2814         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2815         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2816     }
2817 }
2818 #endif
2819 
2820 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2821                      int width, bool cc, bool left)
2822 {
2823     TCGv lo1, lo2;
2824     uint64_t amask, tabl, tabr;
2825     int shift, imask, omask;
2826 
2827     if (cc) {
2828         tcg_gen_mov_tl(cpu_cc_src, s1);
2829         tcg_gen_mov_tl(cpu_cc_src2, s2);
2830         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2831         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2832         dc->cc_op = CC_OP_SUB;
2833     }
2834 
2835     /* Theory of operation: there are two tables, left and right (not to
2836        be confused with the left and right versions of the opcode).  These
2837        are indexed by the low 3 bits of the inputs.  To make things "easy",
2838        these tables are loaded into two constants, TABL and TABR below.
2839        The operation index = (input & imask) << shift calculates the index
2840        into the constant, while val = (table >> index) & omask calculates
2841        the value we're looking for.  */
2842     switch (width) {
2843     case 8:
2844         imask = 0x7;
2845         shift = 3;
2846         omask = 0xff;
2847         if (left) {
2848             tabl = 0x80c0e0f0f8fcfeffULL;
2849             tabr = 0xff7f3f1f0f070301ULL;
2850         } else {
2851             tabl = 0x0103070f1f3f7fffULL;
2852             tabr = 0xfffefcf8f0e0c080ULL;
2853         }
2854         break;
2855     case 16:
2856         imask = 0x6;
2857         shift = 1;
2858         omask = 0xf;
2859         if (left) {
2860             tabl = 0x8cef;
2861             tabr = 0xf731;
2862         } else {
2863             tabl = 0x137f;
2864             tabr = 0xfec8;
2865         }
2866         break;
2867     case 32:
2868         imask = 0x4;
2869         shift = 0;
2870         omask = 0x3;
2871         if (left) {
2872             tabl = (2 << 2) | 3;
2873             tabr = (3 << 2) | 1;
2874         } else {
2875             tabl = (1 << 2) | 3;
2876             tabr = (3 << 2) | 2;
2877         }
2878         break;
2879     default:
2880         abort();
2881     }
2882 
2883     lo1 = tcg_temp_new();
2884     lo2 = tcg_temp_new();
2885     tcg_gen_andi_tl(lo1, s1, imask);
2886     tcg_gen_andi_tl(lo2, s2, imask);
2887     tcg_gen_shli_tl(lo1, lo1, shift);
2888     tcg_gen_shli_tl(lo2, lo2, shift);
2889 
2890     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2891     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2892     tcg_gen_andi_tl(lo1, lo1, omask);
2893     tcg_gen_andi_tl(lo2, lo2, omask);
2894 
2895     amask = -8;
2896     if (AM_CHECK(dc)) {
2897         amask &= 0xffffffffULL;
2898     }
2899     tcg_gen_andi_tl(s1, s1, amask);
2900     tcg_gen_andi_tl(s2, s2, amask);
2901 
2902     /* Compute dst = (s1 == s2 ? lo1 : lo1 & lo2). */
2903     tcg_gen_and_tl(lo2, lo2, lo1);
2904     tcg_gen_movcond_tl(TCG_COND_EQ, dst, s1, s2, lo1, lo2);
2905 }
2906 
2907 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2908 {
2909     TCGv tmp = tcg_temp_new();
2910 
2911     tcg_gen_add_tl(tmp, s1, s2);
2912     tcg_gen_andi_tl(dst, tmp, -8);
2913     if (left) {
2914         tcg_gen_neg_tl(tmp, tmp);
2915     }
2916     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2917 }
2918 
2919 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2920 {
2921     TCGv t1, t2, shift;
2922 
2923     t1 = tcg_temp_new();
2924     t2 = tcg_temp_new();
2925     shift = tcg_temp_new();
2926 
2927     tcg_gen_andi_tl(shift, gsr, 7);
2928     tcg_gen_shli_tl(shift, shift, 3);
2929     tcg_gen_shl_tl(t1, s1, shift);
2930 
2931     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2932        shift of (up to 63) followed by a constant shift of 1.  */
2933     tcg_gen_xori_tl(shift, shift, 63);
2934     tcg_gen_shr_tl(t2, s2, shift);
2935     tcg_gen_shri_tl(t2, t2, 1);
2936 
2937     tcg_gen_or_tl(dst, t1, t2);
2938 }
2939 #endif
2940 
2941 /* Include the auto-generated decoder.  */
2942 #include "decode-insns.c.inc"
2943 
2944 #define TRANS(NAME, AVAIL, FUNC, ...) \
2945     static bool trans_##NAME(DisasContext *dc, arg_##NAME *a) \
2946     { return avail_##AVAIL(dc) && FUNC(dc, __VA_ARGS__); }
2947 
2948 #define avail_ALL(C)      true
2949 #ifdef TARGET_SPARC64
2950 # define avail_32(C)      false
2951 # define avail_64(C)      true
2952 #else
2953 # define avail_32(C)      true
2954 # define avail_64(C)      false
2955 #endif
2956 
2957 /* Default case for non jump instructions. */
2958 static bool advance_pc(DisasContext *dc)
2959 {
2960     if (dc->npc & 3) {
2961         switch (dc->npc) {
2962         case DYNAMIC_PC:
2963         case DYNAMIC_PC_LOOKUP:
2964             dc->pc = dc->npc;
2965             gen_op_next_insn();
2966             break;
2967         case JUMP_PC:
2968             /* we can do a static jump */
2969             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
2970             dc->base.is_jmp = DISAS_NORETURN;
2971             break;
2972         default:
2973             g_assert_not_reached();
2974         }
2975     } else {
2976         dc->pc = dc->npc;
2977         dc->npc = dc->npc + 4;
2978     }
2979     return true;
2980 }
2981 
2982 static bool advance_jump_uncond_never(DisasContext *dc, bool annul)
2983 {
2984     if (annul) {
2985         dc->pc = dc->npc + 4;
2986         dc->npc = dc->pc + 4;
2987     } else {
2988         dc->pc = dc->npc;
2989         dc->npc = dc->pc + 4;
2990     }
2991     return true;
2992 }
2993 
2994 static bool advance_jump_uncond_always(DisasContext *dc, bool annul,
2995                                        target_ulong dest)
2996 {
2997     if (annul) {
2998         dc->pc = dest;
2999         dc->npc = dest + 4;
3000     } else {
3001         dc->pc = dc->npc;
3002         dc->npc = dest;
3003         tcg_gen_mov_tl(cpu_pc, cpu_npc);
3004     }
3005     return true;
3006 }
3007 
3008 static bool advance_jump_cond(DisasContext *dc, bool annul, target_ulong dest)
3009 {
3010     if (annul) {
3011         gen_branch_a(dc, dest);
3012     } else {
3013         gen_branch_n(dc, dest);
3014     }
3015     return true;
3016 }
3017 
3018 static bool do_bpcc(DisasContext *dc, arg_bcc *a)
3019 {
3020     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3021 
3022     switch (a->cond) {
3023     case 0x0:
3024         return advance_jump_uncond_never(dc, a->a);
3025     case 0x8:
3026         return advance_jump_uncond_always(dc, a->a, target);
3027     default:
3028         flush_cond(dc);
3029         gen_cond(cpu_cond, a->cc, a->cond, dc);
3030         return advance_jump_cond(dc, a->a, target);
3031     }
3032 }
3033 
3034 TRANS(Bicc, ALL, do_bpcc, a)
3035 TRANS(BPcc,  64, do_bpcc, a)
3036 
3037 static bool trans_BPr(DisasContext *dc, arg_BPr *a)
3038 {
3039     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3040     DisasCompare cmp;
3041 
3042     if (!avail_64(dc)) {
3043         return false;
3044     }
3045     if (gen_tcg_cond_reg[a->cond] == TCG_COND_NEVER) {
3046         return false;
3047     }
3048 
3049     flush_cond(dc);
3050     gen_compare_reg(&cmp, a->cond, gen_load_gpr(dc, a->rs1));
3051     tcg_gen_setcond_tl(cmp.cond, cpu_cond, cmp.c1, cmp.c2);
3052     return advance_jump_cond(dc, a->a, target);
3053 }
3054 
3055 static bool trans_CALL(DisasContext *dc, arg_CALL *a)
3056 {
3057     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3058 
3059     gen_store_gpr(dc, 15, tcg_constant_tl(dc->pc));
3060     gen_mov_pc_npc(dc);
3061     dc->npc = target;
3062     return true;
3063 }
3064 
3065 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3066     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3067         goto illegal_insn;
3068 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3069     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3070         goto nfpu_insn;
3071 
3072 /* before an instruction, dc->pc must be static */
3073 static void disas_sparc_legacy(DisasContext *dc, unsigned int insn)
3074 {
3075     unsigned int opc, rs1, rs2, rd;
3076     TCGv cpu_src1, cpu_src2;
3077     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3078     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3079     target_long simm;
3080 
3081     opc = GET_FIELD(insn, 0, 1);
3082     rd = GET_FIELD(insn, 2, 6);
3083 
3084     switch (opc) {
3085     case 0:                     /* branches/sethi */
3086         {
3087             unsigned int xop = GET_FIELD(insn, 7, 9);
3088             int32_t target;
3089             switch (xop) {
3090 #ifdef TARGET_SPARC64
3091             case 0x1:           /* V9 BPcc */
3092                 g_assert_not_reached(); /* in decodetree */
3093             case 0x3:           /* V9 BPr */
3094                 g_assert_not_reached(); /* in decodetree */
3095             case 0x5:           /* V9 FBPcc */
3096                 {
3097                     int cc = GET_FIELD_SP(insn, 20, 21);
3098                     if (gen_trap_ifnofpu(dc)) {
3099                         goto jmp_insn;
3100                     }
3101                     target = GET_FIELD_SP(insn, 0, 18);
3102                     target = sign_extend(target, 19);
3103                     target <<= 2;
3104                     do_fbranch(dc, target, insn, cc);
3105                     goto jmp_insn;
3106                 }
3107 #else
3108             case 0x7:           /* CBN+x */
3109                 {
3110                     goto ncp_insn;
3111                 }
3112 #endif
3113             case 0x2:           /* BN+x */
3114                 g_assert_not_reached(); /* in decodetree */
3115             case 0x6:           /* FBN+x */
3116                 {
3117                     if (gen_trap_ifnofpu(dc)) {
3118                         goto jmp_insn;
3119                     }
3120                     target = GET_FIELD(insn, 10, 31);
3121                     target = sign_extend(target, 22);
3122                     target <<= 2;
3123                     do_fbranch(dc, target, insn, 0);
3124                     goto jmp_insn;
3125                 }
3126             case 0x4:           /* SETHI */
3127                 /* Special-case %g0 because that's the canonical nop.  */
3128                 if (rd) {
3129                     uint32_t value = GET_FIELD(insn, 10, 31);
3130                     TCGv t = gen_dest_gpr(dc, rd);
3131                     tcg_gen_movi_tl(t, value << 10);
3132                     gen_store_gpr(dc, rd, t);
3133                 }
3134                 break;
3135             case 0x0:           /* UNIMPL */
3136             default:
3137                 goto illegal_insn;
3138             }
3139             break;
3140         }
3141         break;
3142     case 1:
3143         g_assert_not_reached(); /* in decodetree */
3144     case 2:                     /* FPU & Logical Operations */
3145         {
3146             unsigned int xop = GET_FIELD(insn, 7, 12);
3147             TCGv cpu_dst = tcg_temp_new();
3148             TCGv cpu_tmp0;
3149 
3150             if (xop == 0x3a) {  /* generate trap */
3151                 int cond = GET_FIELD(insn, 3, 6);
3152                 TCGv_i32 trap;
3153                 TCGLabel *l1 = NULL;
3154                 int mask;
3155 
3156                 if (cond == 0) {
3157                     /* Trap never.  */
3158                     break;
3159                 }
3160 
3161                 save_state(dc);
3162 
3163                 if (cond != 8) {
3164                     /* Conditional trap.  */
3165                     DisasCompare cmp;
3166 #ifdef TARGET_SPARC64
3167                     /* V9 icc/xcc */
3168                     int cc = GET_FIELD_SP(insn, 11, 12);
3169                     if (cc == 0) {
3170                         gen_compare(&cmp, 0, cond, dc);
3171                     } else if (cc == 2) {
3172                         gen_compare(&cmp, 1, cond, dc);
3173                     } else {
3174                         goto illegal_insn;
3175                     }
3176 #else
3177                     gen_compare(&cmp, 0, cond, dc);
3178 #endif
3179                     l1 = gen_new_label();
3180                     tcg_gen_brcond_tl(tcg_invert_cond(cmp.cond),
3181                                       cmp.c1, cmp.c2, l1);
3182                 }
3183 
3184                 mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3185                         ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3186 
3187                 /* Don't use the normal temporaries, as they may well have
3188                    gone out of scope with the branch above.  While we're
3189                    doing that we might as well pre-truncate to 32-bit.  */
3190                 trap = tcg_temp_new_i32();
3191 
3192                 rs1 = GET_FIELD_SP(insn, 14, 18);
3193                 if (IS_IMM) {
3194                     rs2 = GET_FIELD_SP(insn, 0, 7);
3195                     if (rs1 == 0) {
3196                         tcg_gen_movi_i32(trap, (rs2 & mask) + TT_TRAP);
3197                         /* Signal that the trap value is fully constant.  */
3198                         mask = 0;
3199                     } else {
3200                         TCGv t1 = gen_load_gpr(dc, rs1);
3201                         tcg_gen_trunc_tl_i32(trap, t1);
3202                         tcg_gen_addi_i32(trap, trap, rs2);
3203                     }
3204                 } else {
3205                     TCGv t1, t2;
3206                     rs2 = GET_FIELD_SP(insn, 0, 4);
3207                     t1 = gen_load_gpr(dc, rs1);
3208                     t2 = gen_load_gpr(dc, rs2);
3209                     tcg_gen_add_tl(t1, t1, t2);
3210                     tcg_gen_trunc_tl_i32(trap, t1);
3211                 }
3212                 if (mask != 0) {
3213                     tcg_gen_andi_i32(trap, trap, mask);
3214                     tcg_gen_addi_i32(trap, trap, TT_TRAP);
3215                 }
3216 
3217                 gen_helper_raise_exception(tcg_env, trap);
3218 
3219                 if (cond == 8) {
3220                     /* An unconditional trap ends the TB.  */
3221                     dc->base.is_jmp = DISAS_NORETURN;
3222                     goto jmp_insn;
3223                 } else {
3224                     /* A conditional trap falls through to the next insn.  */
3225                     gen_set_label(l1);
3226                     break;
3227                 }
3228             } else if (xop == 0x28) {
3229                 rs1 = GET_FIELD(insn, 13, 17);
3230                 switch(rs1) {
3231                 case 0: /* rdy */
3232 #ifndef TARGET_SPARC64
3233                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3234                                        manual, rdy on the microSPARC
3235                                        II */
3236                 case 0x0f:          /* stbar in the SPARCv8 manual,
3237                                        rdy on the microSPARC II */
3238                 case 0x10 ... 0x1f: /* implementation-dependent in the
3239                                        SPARCv8 manual, rdy on the
3240                                        microSPARC II */
3241                     /* Read Asr17 */
3242                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3243                         TCGv t = gen_dest_gpr(dc, rd);
3244                         /* Read Asr17 for a Leon3 monoprocessor */
3245                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3246                         gen_store_gpr(dc, rd, t);
3247                         break;
3248                     }
3249 #endif
3250                     gen_store_gpr(dc, rd, cpu_y);
3251                     break;
3252 #ifdef TARGET_SPARC64
3253                 case 0x2: /* V9 rdccr */
3254                     update_psr(dc);
3255                     gen_helper_rdccr(cpu_dst, tcg_env);
3256                     gen_store_gpr(dc, rd, cpu_dst);
3257                     break;
3258                 case 0x3: /* V9 rdasi */
3259                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3260                     gen_store_gpr(dc, rd, cpu_dst);
3261                     break;
3262                 case 0x4: /* V9 rdtick */
3263                     {
3264                         TCGv_ptr r_tickptr;
3265                         TCGv_i32 r_const;
3266 
3267                         r_tickptr = tcg_temp_new_ptr();
3268                         r_const = tcg_constant_i32(dc->mem_idx);
3269                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3270                                        offsetof(CPUSPARCState, tick));
3271                         if (translator_io_start(&dc->base)) {
3272                             dc->base.is_jmp = DISAS_EXIT;
3273                         }
3274                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3275                                                   r_const);
3276                         gen_store_gpr(dc, rd, cpu_dst);
3277                     }
3278                     break;
3279                 case 0x5: /* V9 rdpc */
3280                     {
3281                         TCGv t = gen_dest_gpr(dc, rd);
3282                         if (unlikely(AM_CHECK(dc))) {
3283                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3284                         } else {
3285                             tcg_gen_movi_tl(t, dc->pc);
3286                         }
3287                         gen_store_gpr(dc, rd, t);
3288                     }
3289                     break;
3290                 case 0x6: /* V9 rdfprs */
3291                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3292                     gen_store_gpr(dc, rd, cpu_dst);
3293                     break;
3294                 case 0xf: /* V9 membar */
3295                     break; /* no effect */
3296                 case 0x13: /* Graphics Status */
3297                     if (gen_trap_ifnofpu(dc)) {
3298                         goto jmp_insn;
3299                     }
3300                     gen_store_gpr(dc, rd, cpu_gsr);
3301                     break;
3302                 case 0x16: /* Softint */
3303                     tcg_gen_ld32s_tl(cpu_dst, tcg_env,
3304                                      offsetof(CPUSPARCState, softint));
3305                     gen_store_gpr(dc, rd, cpu_dst);
3306                     break;
3307                 case 0x17: /* Tick compare */
3308                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3309                     break;
3310                 case 0x18: /* System tick */
3311                     {
3312                         TCGv_ptr r_tickptr;
3313                         TCGv_i32 r_const;
3314 
3315                         r_tickptr = tcg_temp_new_ptr();
3316                         r_const = tcg_constant_i32(dc->mem_idx);
3317                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3318                                        offsetof(CPUSPARCState, stick));
3319                         if (translator_io_start(&dc->base)) {
3320                             dc->base.is_jmp = DISAS_EXIT;
3321                         }
3322                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3323                                                   r_const);
3324                         gen_store_gpr(dc, rd, cpu_dst);
3325                     }
3326                     break;
3327                 case 0x19: /* System tick compare */
3328                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3329                     break;
3330                 case 0x1a: /* UltraSPARC-T1 Strand status */
3331                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3332                      * this ASR as impl. dep
3333                      */
3334                     CHECK_IU_FEATURE(dc, HYPV);
3335                     {
3336                         TCGv t = gen_dest_gpr(dc, rd);
3337                         tcg_gen_movi_tl(t, 1UL);
3338                         gen_store_gpr(dc, rd, t);
3339                     }
3340                     break;
3341                 case 0x10: /* Performance Control */
3342                 case 0x11: /* Performance Instrumentation Counter */
3343                 case 0x12: /* Dispatch Control */
3344                 case 0x14: /* Softint set, WO */
3345                 case 0x15: /* Softint clear, WO */
3346 #endif
3347                 default:
3348                     goto illegal_insn;
3349                 }
3350 #if !defined(CONFIG_USER_ONLY)
3351             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3352 #ifndef TARGET_SPARC64
3353                 if (!supervisor(dc)) {
3354                     goto priv_insn;
3355                 }
3356                 update_psr(dc);
3357                 gen_helper_rdpsr(cpu_dst, tcg_env);
3358 #else
3359                 CHECK_IU_FEATURE(dc, HYPV);
3360                 if (!hypervisor(dc))
3361                     goto priv_insn;
3362                 rs1 = GET_FIELD(insn, 13, 17);
3363                 switch (rs1) {
3364                 case 0: // hpstate
3365                     tcg_gen_ld_i64(cpu_dst, tcg_env,
3366                                    offsetof(CPUSPARCState, hpstate));
3367                     break;
3368                 case 1: // htstate
3369                     // gen_op_rdhtstate();
3370                     break;
3371                 case 3: // hintp
3372                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3373                     break;
3374                 case 5: // htba
3375                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3376                     break;
3377                 case 6: // hver
3378                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3379                     break;
3380                 case 31: // hstick_cmpr
3381                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3382                     break;
3383                 default:
3384                     goto illegal_insn;
3385                 }
3386 #endif
3387                 gen_store_gpr(dc, rd, cpu_dst);
3388                 break;
3389             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3390                 if (!supervisor(dc)) {
3391                     goto priv_insn;
3392                 }
3393                 cpu_tmp0 = tcg_temp_new();
3394 #ifdef TARGET_SPARC64
3395                 rs1 = GET_FIELD(insn, 13, 17);
3396                 switch (rs1) {
3397                 case 0: // tpc
3398                     {
3399                         TCGv_ptr r_tsptr;
3400 
3401                         r_tsptr = tcg_temp_new_ptr();
3402                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3403                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3404                                       offsetof(trap_state, tpc));
3405                     }
3406                     break;
3407                 case 1: // tnpc
3408                     {
3409                         TCGv_ptr r_tsptr;
3410 
3411                         r_tsptr = tcg_temp_new_ptr();
3412                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3413                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3414                                       offsetof(trap_state, tnpc));
3415                     }
3416                     break;
3417                 case 2: // tstate
3418                     {
3419                         TCGv_ptr r_tsptr;
3420 
3421                         r_tsptr = tcg_temp_new_ptr();
3422                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3423                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3424                                       offsetof(trap_state, tstate));
3425                     }
3426                     break;
3427                 case 3: // tt
3428                     {
3429                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3430 
3431                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3432                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3433                                          offsetof(trap_state, tt));
3434                     }
3435                     break;
3436                 case 4: // tick
3437                     {
3438                         TCGv_ptr r_tickptr;
3439                         TCGv_i32 r_const;
3440 
3441                         r_tickptr = tcg_temp_new_ptr();
3442                         r_const = tcg_constant_i32(dc->mem_idx);
3443                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3444                                        offsetof(CPUSPARCState, tick));
3445                         if (translator_io_start(&dc->base)) {
3446                             dc->base.is_jmp = DISAS_EXIT;
3447                         }
3448                         gen_helper_tick_get_count(cpu_tmp0, tcg_env,
3449                                                   r_tickptr, r_const);
3450                     }
3451                     break;
3452                 case 5: // tba
3453                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3454                     break;
3455                 case 6: // pstate
3456                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3457                                      offsetof(CPUSPARCState, pstate));
3458                     break;
3459                 case 7: // tl
3460                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3461                                      offsetof(CPUSPARCState, tl));
3462                     break;
3463                 case 8: // pil
3464                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3465                                      offsetof(CPUSPARCState, psrpil));
3466                     break;
3467                 case 9: // cwp
3468                     gen_helper_rdcwp(cpu_tmp0, tcg_env);
3469                     break;
3470                 case 10: // cansave
3471                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3472                                      offsetof(CPUSPARCState, cansave));
3473                     break;
3474                 case 11: // canrestore
3475                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3476                                      offsetof(CPUSPARCState, canrestore));
3477                     break;
3478                 case 12: // cleanwin
3479                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3480                                      offsetof(CPUSPARCState, cleanwin));
3481                     break;
3482                 case 13: // otherwin
3483                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3484                                      offsetof(CPUSPARCState, otherwin));
3485                     break;
3486                 case 14: // wstate
3487                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3488                                      offsetof(CPUSPARCState, wstate));
3489                     break;
3490                 case 16: // UA2005 gl
3491                     CHECK_IU_FEATURE(dc, GL);
3492                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3493                                      offsetof(CPUSPARCState, gl));
3494                     break;
3495                 case 26: // UA2005 strand status
3496                     CHECK_IU_FEATURE(dc, HYPV);
3497                     if (!hypervisor(dc))
3498                         goto priv_insn;
3499                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3500                     break;
3501                 case 31: // ver
3502                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3503                     break;
3504                 case 15: // fq
3505                 default:
3506                     goto illegal_insn;
3507                 }
3508 #else
3509                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3510 #endif
3511                 gen_store_gpr(dc, rd, cpu_tmp0);
3512                 break;
3513 #endif
3514 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3515             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3516 #ifdef TARGET_SPARC64
3517                 gen_helper_flushw(tcg_env);
3518 #else
3519                 if (!supervisor(dc))
3520                     goto priv_insn;
3521                 gen_store_gpr(dc, rd, cpu_tbr);
3522 #endif
3523                 break;
3524 #endif
3525             } else if (xop == 0x34) {   /* FPU Operations */
3526                 if (gen_trap_ifnofpu(dc)) {
3527                     goto jmp_insn;
3528                 }
3529                 gen_op_clear_ieee_excp_and_FTT();
3530                 rs1 = GET_FIELD(insn, 13, 17);
3531                 rs2 = GET_FIELD(insn, 27, 31);
3532                 xop = GET_FIELD(insn, 18, 26);
3533 
3534                 switch (xop) {
3535                 case 0x1: /* fmovs */
3536                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3537                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3538                     break;
3539                 case 0x5: /* fnegs */
3540                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3541                     break;
3542                 case 0x9: /* fabss */
3543                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3544                     break;
3545                 case 0x29: /* fsqrts */
3546                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3547                     break;
3548                 case 0x2a: /* fsqrtd */
3549                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3550                     break;
3551                 case 0x2b: /* fsqrtq */
3552                     CHECK_FPU_FEATURE(dc, FLOAT128);
3553                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3554                     break;
3555                 case 0x41: /* fadds */
3556                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3557                     break;
3558                 case 0x42: /* faddd */
3559                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3560                     break;
3561                 case 0x43: /* faddq */
3562                     CHECK_FPU_FEATURE(dc, FLOAT128);
3563                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3564                     break;
3565                 case 0x45: /* fsubs */
3566                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3567                     break;
3568                 case 0x46: /* fsubd */
3569                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3570                     break;
3571                 case 0x47: /* fsubq */
3572                     CHECK_FPU_FEATURE(dc, FLOAT128);
3573                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3574                     break;
3575                 case 0x49: /* fmuls */
3576                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3577                     break;
3578                 case 0x4a: /* fmuld */
3579                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3580                     break;
3581                 case 0x4b: /* fmulq */
3582                     CHECK_FPU_FEATURE(dc, FLOAT128);
3583                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3584                     break;
3585                 case 0x4d: /* fdivs */
3586                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3587                     break;
3588                 case 0x4e: /* fdivd */
3589                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3590                     break;
3591                 case 0x4f: /* fdivq */
3592                     CHECK_FPU_FEATURE(dc, FLOAT128);
3593                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3594                     break;
3595                 case 0x69: /* fsmuld */
3596                     CHECK_FPU_FEATURE(dc, FSMULD);
3597                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3598                     break;
3599                 case 0x6e: /* fdmulq */
3600                     CHECK_FPU_FEATURE(dc, FLOAT128);
3601                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3602                     break;
3603                 case 0xc4: /* fitos */
3604                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3605                     break;
3606                 case 0xc6: /* fdtos */
3607                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3608                     break;
3609                 case 0xc7: /* fqtos */
3610                     CHECK_FPU_FEATURE(dc, FLOAT128);
3611                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3612                     break;
3613                 case 0xc8: /* fitod */
3614                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3615                     break;
3616                 case 0xc9: /* fstod */
3617                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3618                     break;
3619                 case 0xcb: /* fqtod */
3620                     CHECK_FPU_FEATURE(dc, FLOAT128);
3621                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3622                     break;
3623                 case 0xcc: /* fitoq */
3624                     CHECK_FPU_FEATURE(dc, FLOAT128);
3625                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3626                     break;
3627                 case 0xcd: /* fstoq */
3628                     CHECK_FPU_FEATURE(dc, FLOAT128);
3629                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3630                     break;
3631                 case 0xce: /* fdtoq */
3632                     CHECK_FPU_FEATURE(dc, FLOAT128);
3633                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3634                     break;
3635                 case 0xd1: /* fstoi */
3636                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3637                     break;
3638                 case 0xd2: /* fdtoi */
3639                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3640                     break;
3641                 case 0xd3: /* fqtoi */
3642                     CHECK_FPU_FEATURE(dc, FLOAT128);
3643                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3644                     break;
3645 #ifdef TARGET_SPARC64
3646                 case 0x2: /* V9 fmovd */
3647                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3648                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3649                     break;
3650                 case 0x3: /* V9 fmovq */
3651                     CHECK_FPU_FEATURE(dc, FLOAT128);
3652                     gen_move_Q(dc, rd, rs2);
3653                     break;
3654                 case 0x6: /* V9 fnegd */
3655                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3656                     break;
3657                 case 0x7: /* V9 fnegq */
3658                     CHECK_FPU_FEATURE(dc, FLOAT128);
3659                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3660                     break;
3661                 case 0xa: /* V9 fabsd */
3662                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3663                     break;
3664                 case 0xb: /* V9 fabsq */
3665                     CHECK_FPU_FEATURE(dc, FLOAT128);
3666                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3667                     break;
3668                 case 0x81: /* V9 fstox */
3669                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3670                     break;
3671                 case 0x82: /* V9 fdtox */
3672                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3673                     break;
3674                 case 0x83: /* V9 fqtox */
3675                     CHECK_FPU_FEATURE(dc, FLOAT128);
3676                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3677                     break;
3678                 case 0x84: /* V9 fxtos */
3679                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3680                     break;
3681                 case 0x88: /* V9 fxtod */
3682                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3683                     break;
3684                 case 0x8c: /* V9 fxtoq */
3685                     CHECK_FPU_FEATURE(dc, FLOAT128);
3686                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3687                     break;
3688 #endif
3689                 default:
3690                     goto illegal_insn;
3691                 }
3692             } else if (xop == 0x35) {   /* FPU Operations */
3693 #ifdef TARGET_SPARC64
3694                 int cond;
3695 #endif
3696                 if (gen_trap_ifnofpu(dc)) {
3697                     goto jmp_insn;
3698                 }
3699                 gen_op_clear_ieee_excp_and_FTT();
3700                 rs1 = GET_FIELD(insn, 13, 17);
3701                 rs2 = GET_FIELD(insn, 27, 31);
3702                 xop = GET_FIELD(insn, 18, 26);
3703 
3704 #ifdef TARGET_SPARC64
3705 #define FMOVR(sz)                                                  \
3706                 do {                                               \
3707                     DisasCompare cmp;                              \
3708                     cond = GET_FIELD_SP(insn, 10, 12);             \
3709                     cpu_src1 = get_src1(dc, insn);                 \
3710                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3711                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3712                 } while (0)
3713 
3714                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3715                     FMOVR(s);
3716                     break;
3717                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3718                     FMOVR(d);
3719                     break;
3720                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3721                     CHECK_FPU_FEATURE(dc, FLOAT128);
3722                     FMOVR(q);
3723                     break;
3724                 }
3725 #undef FMOVR
3726 #endif
3727                 switch (xop) {
3728 #ifdef TARGET_SPARC64
3729 #define FMOVCC(fcc, sz)                                                 \
3730                     do {                                                \
3731                         DisasCompare cmp;                               \
3732                         cond = GET_FIELD_SP(insn, 14, 17);              \
3733                         gen_fcompare(&cmp, fcc, cond);                  \
3734                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3735                     } while (0)
3736 
3737                     case 0x001: /* V9 fmovscc %fcc0 */
3738                         FMOVCC(0, s);
3739                         break;
3740                     case 0x002: /* V9 fmovdcc %fcc0 */
3741                         FMOVCC(0, d);
3742                         break;
3743                     case 0x003: /* V9 fmovqcc %fcc0 */
3744                         CHECK_FPU_FEATURE(dc, FLOAT128);
3745                         FMOVCC(0, q);
3746                         break;
3747                     case 0x041: /* V9 fmovscc %fcc1 */
3748                         FMOVCC(1, s);
3749                         break;
3750                     case 0x042: /* V9 fmovdcc %fcc1 */
3751                         FMOVCC(1, d);
3752                         break;
3753                     case 0x043: /* V9 fmovqcc %fcc1 */
3754                         CHECK_FPU_FEATURE(dc, FLOAT128);
3755                         FMOVCC(1, q);
3756                         break;
3757                     case 0x081: /* V9 fmovscc %fcc2 */
3758                         FMOVCC(2, s);
3759                         break;
3760                     case 0x082: /* V9 fmovdcc %fcc2 */
3761                         FMOVCC(2, d);
3762                         break;
3763                     case 0x083: /* V9 fmovqcc %fcc2 */
3764                         CHECK_FPU_FEATURE(dc, FLOAT128);
3765                         FMOVCC(2, q);
3766                         break;
3767                     case 0x0c1: /* V9 fmovscc %fcc3 */
3768                         FMOVCC(3, s);
3769                         break;
3770                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3771                         FMOVCC(3, d);
3772                         break;
3773                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3774                         CHECK_FPU_FEATURE(dc, FLOAT128);
3775                         FMOVCC(3, q);
3776                         break;
3777 #undef FMOVCC
3778 #define FMOVCC(xcc, sz)                                                 \
3779                     do {                                                \
3780                         DisasCompare cmp;                               \
3781                         cond = GET_FIELD_SP(insn, 14, 17);              \
3782                         gen_compare(&cmp, xcc, cond, dc);               \
3783                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3784                     } while (0)
3785 
3786                     case 0x101: /* V9 fmovscc %icc */
3787                         FMOVCC(0, s);
3788                         break;
3789                     case 0x102: /* V9 fmovdcc %icc */
3790                         FMOVCC(0, d);
3791                         break;
3792                     case 0x103: /* V9 fmovqcc %icc */
3793                         CHECK_FPU_FEATURE(dc, FLOAT128);
3794                         FMOVCC(0, q);
3795                         break;
3796                     case 0x181: /* V9 fmovscc %xcc */
3797                         FMOVCC(1, s);
3798                         break;
3799                     case 0x182: /* V9 fmovdcc %xcc */
3800                         FMOVCC(1, d);
3801                         break;
3802                     case 0x183: /* V9 fmovqcc %xcc */
3803                         CHECK_FPU_FEATURE(dc, FLOAT128);
3804                         FMOVCC(1, q);
3805                         break;
3806 #undef FMOVCC
3807 #endif
3808                     case 0x51: /* fcmps, V9 %fcc */
3809                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3810                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3811                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3812                         break;
3813                     case 0x52: /* fcmpd, V9 %fcc */
3814                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3815                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3816                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3817                         break;
3818                     case 0x53: /* fcmpq, V9 %fcc */
3819                         CHECK_FPU_FEATURE(dc, FLOAT128);
3820                         gen_op_load_fpr_QT0(QFPREG(rs1));
3821                         gen_op_load_fpr_QT1(QFPREG(rs2));
3822                         gen_op_fcmpq(rd & 3);
3823                         break;
3824                     case 0x55: /* fcmpes, V9 %fcc */
3825                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3826                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3827                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3828                         break;
3829                     case 0x56: /* fcmped, V9 %fcc */
3830                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3831                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3832                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3833                         break;
3834                     case 0x57: /* fcmpeq, V9 %fcc */
3835                         CHECK_FPU_FEATURE(dc, FLOAT128);
3836                         gen_op_load_fpr_QT0(QFPREG(rs1));
3837                         gen_op_load_fpr_QT1(QFPREG(rs2));
3838                         gen_op_fcmpeq(rd & 3);
3839                         break;
3840                     default:
3841                         goto illegal_insn;
3842                 }
3843             } else if (xop == 0x2) {
3844                 TCGv dst = gen_dest_gpr(dc, rd);
3845                 rs1 = GET_FIELD(insn, 13, 17);
3846                 if (rs1 == 0) {
3847                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3848                     if (IS_IMM) {       /* immediate */
3849                         simm = GET_FIELDs(insn, 19, 31);
3850                         tcg_gen_movi_tl(dst, simm);
3851                         gen_store_gpr(dc, rd, dst);
3852                     } else {            /* register */
3853                         rs2 = GET_FIELD(insn, 27, 31);
3854                         if (rs2 == 0) {
3855                             tcg_gen_movi_tl(dst, 0);
3856                             gen_store_gpr(dc, rd, dst);
3857                         } else {
3858                             cpu_src2 = gen_load_gpr(dc, rs2);
3859                             gen_store_gpr(dc, rd, cpu_src2);
3860                         }
3861                     }
3862                 } else {
3863                     cpu_src1 = get_src1(dc, insn);
3864                     if (IS_IMM) {       /* immediate */
3865                         simm = GET_FIELDs(insn, 19, 31);
3866                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3867                         gen_store_gpr(dc, rd, dst);
3868                     } else {            /* register */
3869                         rs2 = GET_FIELD(insn, 27, 31);
3870                         if (rs2 == 0) {
3871                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3872                             gen_store_gpr(dc, rd, cpu_src1);
3873                         } else {
3874                             cpu_src2 = gen_load_gpr(dc, rs2);
3875                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3876                             gen_store_gpr(dc, rd, dst);
3877                         }
3878                     }
3879                 }
3880 #ifdef TARGET_SPARC64
3881             } else if (xop == 0x25) { /* sll, V9 sllx */
3882                 cpu_src1 = get_src1(dc, insn);
3883                 if (IS_IMM) {   /* immediate */
3884                     simm = GET_FIELDs(insn, 20, 31);
3885                     if (insn & (1 << 12)) {
3886                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3887                     } else {
3888                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3889                     }
3890                 } else {                /* register */
3891                     rs2 = GET_FIELD(insn, 27, 31);
3892                     cpu_src2 = gen_load_gpr(dc, rs2);
3893                     cpu_tmp0 = tcg_temp_new();
3894                     if (insn & (1 << 12)) {
3895                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3896                     } else {
3897                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3898                     }
3899                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3900                 }
3901                 gen_store_gpr(dc, rd, cpu_dst);
3902             } else if (xop == 0x26) { /* srl, V9 srlx */
3903                 cpu_src1 = get_src1(dc, insn);
3904                 if (IS_IMM) {   /* immediate */
3905                     simm = GET_FIELDs(insn, 20, 31);
3906                     if (insn & (1 << 12)) {
3907                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3908                     } else {
3909                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3910                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3911                     }
3912                 } else {                /* register */
3913                     rs2 = GET_FIELD(insn, 27, 31);
3914                     cpu_src2 = gen_load_gpr(dc, rs2);
3915                     cpu_tmp0 = tcg_temp_new();
3916                     if (insn & (1 << 12)) {
3917                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3918                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3919                     } else {
3920                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3921                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3922                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3923                     }
3924                 }
3925                 gen_store_gpr(dc, rd, cpu_dst);
3926             } else if (xop == 0x27) { /* sra, V9 srax */
3927                 cpu_src1 = get_src1(dc, insn);
3928                 if (IS_IMM) {   /* immediate */
3929                     simm = GET_FIELDs(insn, 20, 31);
3930                     if (insn & (1 << 12)) {
3931                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3932                     } else {
3933                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3934                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3935                     }
3936                 } else {                /* register */
3937                     rs2 = GET_FIELD(insn, 27, 31);
3938                     cpu_src2 = gen_load_gpr(dc, rs2);
3939                     cpu_tmp0 = tcg_temp_new();
3940                     if (insn & (1 << 12)) {
3941                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3942                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3943                     } else {
3944                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3945                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3946                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3947                     }
3948                 }
3949                 gen_store_gpr(dc, rd, cpu_dst);
3950 #endif
3951             } else if (xop < 0x36) {
3952                 if (xop < 0x20) {
3953                     cpu_src1 = get_src1(dc, insn);
3954                     cpu_src2 = get_src2(dc, insn);
3955                     switch (xop & ~0x10) {
3956                     case 0x0: /* add */
3957                         if (xop & 0x10) {
3958                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3959                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3960                             dc->cc_op = CC_OP_ADD;
3961                         } else {
3962                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3963                         }
3964                         break;
3965                     case 0x1: /* and */
3966                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3967                         if (xop & 0x10) {
3968                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3969                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3970                             dc->cc_op = CC_OP_LOGIC;
3971                         }
3972                         break;
3973                     case 0x2: /* or */
3974                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3975                         if (xop & 0x10) {
3976                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3977                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3978                             dc->cc_op = CC_OP_LOGIC;
3979                         }
3980                         break;
3981                     case 0x3: /* xor */
3982                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3983                         if (xop & 0x10) {
3984                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3985                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3986                             dc->cc_op = CC_OP_LOGIC;
3987                         }
3988                         break;
3989                     case 0x4: /* sub */
3990                         if (xop & 0x10) {
3991                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3992                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3993                             dc->cc_op = CC_OP_SUB;
3994                         } else {
3995                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3996                         }
3997                         break;
3998                     case 0x5: /* andn */
3999                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
4000                         if (xop & 0x10) {
4001                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4002                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4003                             dc->cc_op = CC_OP_LOGIC;
4004                         }
4005                         break;
4006                     case 0x6: /* orn */
4007                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
4008                         if (xop & 0x10) {
4009                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4010                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4011                             dc->cc_op = CC_OP_LOGIC;
4012                         }
4013                         break;
4014                     case 0x7: /* xorn */
4015                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
4016                         if (xop & 0x10) {
4017                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4018                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4019                             dc->cc_op = CC_OP_LOGIC;
4020                         }
4021                         break;
4022                     case 0x8: /* addx, V9 addc */
4023                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4024                                         (xop & 0x10));
4025                         break;
4026 #ifdef TARGET_SPARC64
4027                     case 0x9: /* V9 mulx */
4028                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
4029                         break;
4030 #endif
4031                     case 0xa: /* umul */
4032                         CHECK_IU_FEATURE(dc, MUL);
4033                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
4034                         if (xop & 0x10) {
4035                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4036                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4037                             dc->cc_op = CC_OP_LOGIC;
4038                         }
4039                         break;
4040                     case 0xb: /* smul */
4041                         CHECK_IU_FEATURE(dc, MUL);
4042                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
4043                         if (xop & 0x10) {
4044                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
4045                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
4046                             dc->cc_op = CC_OP_LOGIC;
4047                         }
4048                         break;
4049                     case 0xc: /* subx, V9 subc */
4050                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
4051                                         (xop & 0x10));
4052                         break;
4053 #ifdef TARGET_SPARC64
4054                     case 0xd: /* V9 udivx */
4055                         gen_helper_udivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4056                         break;
4057 #endif
4058                     case 0xe: /* udiv */
4059                         CHECK_IU_FEATURE(dc, DIV);
4060                         if (xop & 0x10) {
4061                             gen_helper_udiv_cc(cpu_dst, tcg_env, cpu_src1,
4062                                                cpu_src2);
4063                             dc->cc_op = CC_OP_DIV;
4064                         } else {
4065                             gen_helper_udiv(cpu_dst, tcg_env, cpu_src1,
4066                                             cpu_src2);
4067                         }
4068                         break;
4069                     case 0xf: /* sdiv */
4070                         CHECK_IU_FEATURE(dc, DIV);
4071                         if (xop & 0x10) {
4072                             gen_helper_sdiv_cc(cpu_dst, tcg_env, cpu_src1,
4073                                                cpu_src2);
4074                             dc->cc_op = CC_OP_DIV;
4075                         } else {
4076                             gen_helper_sdiv(cpu_dst, tcg_env, cpu_src1,
4077                                             cpu_src2);
4078                         }
4079                         break;
4080                     default:
4081                         goto illegal_insn;
4082                     }
4083                     gen_store_gpr(dc, rd, cpu_dst);
4084                 } else {
4085                     cpu_src1 = get_src1(dc, insn);
4086                     cpu_src2 = get_src2(dc, insn);
4087                     switch (xop) {
4088                     case 0x20: /* taddcc */
4089                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4090                         gen_store_gpr(dc, rd, cpu_dst);
4091                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4092                         dc->cc_op = CC_OP_TADD;
4093                         break;
4094                     case 0x21: /* tsubcc */
4095                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4096                         gen_store_gpr(dc, rd, cpu_dst);
4097                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4098                         dc->cc_op = CC_OP_TSUB;
4099                         break;
4100                     case 0x22: /* taddcctv */
4101                         gen_helper_taddcctv(cpu_dst, tcg_env,
4102                                             cpu_src1, cpu_src2);
4103                         gen_store_gpr(dc, rd, cpu_dst);
4104                         dc->cc_op = CC_OP_TADDTV;
4105                         break;
4106                     case 0x23: /* tsubcctv */
4107                         gen_helper_tsubcctv(cpu_dst, tcg_env,
4108                                             cpu_src1, cpu_src2);
4109                         gen_store_gpr(dc, rd, cpu_dst);
4110                         dc->cc_op = CC_OP_TSUBTV;
4111                         break;
4112                     case 0x24: /* mulscc */
4113                         update_psr(dc);
4114                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4115                         gen_store_gpr(dc, rd, cpu_dst);
4116                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4117                         dc->cc_op = CC_OP_ADD;
4118                         break;
4119 #ifndef TARGET_SPARC64
4120                     case 0x25:  /* sll */
4121                         if (IS_IMM) { /* immediate */
4122                             simm = GET_FIELDs(insn, 20, 31);
4123                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4124                         } else { /* register */
4125                             cpu_tmp0 = tcg_temp_new();
4126                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4127                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4128                         }
4129                         gen_store_gpr(dc, rd, cpu_dst);
4130                         break;
4131                     case 0x26:  /* srl */
4132                         if (IS_IMM) { /* immediate */
4133                             simm = GET_FIELDs(insn, 20, 31);
4134                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4135                         } else { /* register */
4136                             cpu_tmp0 = tcg_temp_new();
4137                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4138                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4139                         }
4140                         gen_store_gpr(dc, rd, cpu_dst);
4141                         break;
4142                     case 0x27:  /* sra */
4143                         if (IS_IMM) { /* immediate */
4144                             simm = GET_FIELDs(insn, 20, 31);
4145                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4146                         } else { /* register */
4147                             cpu_tmp0 = tcg_temp_new();
4148                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4149                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4150                         }
4151                         gen_store_gpr(dc, rd, cpu_dst);
4152                         break;
4153 #endif
4154                     case 0x30:
4155                         {
4156                             cpu_tmp0 = tcg_temp_new();
4157                             switch(rd) {
4158                             case 0: /* wry */
4159                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4160                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4161                                 break;
4162 #ifndef TARGET_SPARC64
4163                             case 0x01 ... 0x0f: /* undefined in the
4164                                                    SPARCv8 manual, nop
4165                                                    on the microSPARC
4166                                                    II */
4167                             case 0x10 ... 0x1f: /* implementation-dependent
4168                                                    in the SPARCv8
4169                                                    manual, nop on the
4170                                                    microSPARC II */
4171                                 if ((rd == 0x13) && (dc->def->features &
4172                                                      CPU_FEATURE_POWERDOWN)) {
4173                                     /* LEON3 power-down */
4174                                     save_state(dc);
4175                                     gen_helper_power_down(tcg_env);
4176                                 }
4177                                 break;
4178 #else
4179                             case 0x2: /* V9 wrccr */
4180                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4181                                 gen_helper_wrccr(tcg_env, cpu_tmp0);
4182                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4183                                 dc->cc_op = CC_OP_FLAGS;
4184                                 break;
4185                             case 0x3: /* V9 wrasi */
4186                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4187                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4188                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4189                                                 offsetof(CPUSPARCState, asi));
4190                                 /*
4191                                  * End TB to notice changed ASI.
4192                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4193                                  * update DisasContext and not exit the TB.
4194                                  */
4195                                 save_state(dc);
4196                                 gen_op_next_insn();
4197                                 tcg_gen_lookup_and_goto_ptr();
4198                                 dc->base.is_jmp = DISAS_NORETURN;
4199                                 break;
4200                             case 0x6: /* V9 wrfprs */
4201                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4202                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4203                                 dc->fprs_dirty = 0;
4204                                 save_state(dc);
4205                                 gen_op_next_insn();
4206                                 tcg_gen_exit_tb(NULL, 0);
4207                                 dc->base.is_jmp = DISAS_NORETURN;
4208                                 break;
4209                             case 0xf: /* V9 sir, nop if user */
4210 #if !defined(CONFIG_USER_ONLY)
4211                                 if (supervisor(dc)) {
4212                                     ; // XXX
4213                                 }
4214 #endif
4215                                 break;
4216                             case 0x13: /* Graphics Status */
4217                                 if (gen_trap_ifnofpu(dc)) {
4218                                     goto jmp_insn;
4219                                 }
4220                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4221                                 break;
4222                             case 0x14: /* Softint set */
4223                                 if (!supervisor(dc))
4224                                     goto illegal_insn;
4225                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4226                                 gen_helper_set_softint(tcg_env, cpu_tmp0);
4227                                 break;
4228                             case 0x15: /* Softint clear */
4229                                 if (!supervisor(dc))
4230                                     goto illegal_insn;
4231                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4232                                 gen_helper_clear_softint(tcg_env, cpu_tmp0);
4233                                 break;
4234                             case 0x16: /* Softint write */
4235                                 if (!supervisor(dc))
4236                                     goto illegal_insn;
4237                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4238                                 gen_helper_write_softint(tcg_env, cpu_tmp0);
4239                                 break;
4240                             case 0x17: /* Tick compare */
4241 #if !defined(CONFIG_USER_ONLY)
4242                                 if (!supervisor(dc))
4243                                     goto illegal_insn;
4244 #endif
4245                                 {
4246                                     TCGv_ptr r_tickptr;
4247 
4248                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4249                                                    cpu_src2);
4250                                     r_tickptr = tcg_temp_new_ptr();
4251                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4252                                                    offsetof(CPUSPARCState, tick));
4253                                     translator_io_start(&dc->base);
4254                                     gen_helper_tick_set_limit(r_tickptr,
4255                                                               cpu_tick_cmpr);
4256                                     /* End TB to handle timer interrupt */
4257                                     dc->base.is_jmp = DISAS_EXIT;
4258                                 }
4259                                 break;
4260                             case 0x18: /* System tick */
4261 #if !defined(CONFIG_USER_ONLY)
4262                                 if (!supervisor(dc))
4263                                     goto illegal_insn;
4264 #endif
4265                                 {
4266                                     TCGv_ptr r_tickptr;
4267 
4268                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4269                                                    cpu_src2);
4270                                     r_tickptr = tcg_temp_new_ptr();
4271                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4272                                                    offsetof(CPUSPARCState, stick));
4273                                     translator_io_start(&dc->base);
4274                                     gen_helper_tick_set_count(r_tickptr,
4275                                                               cpu_tmp0);
4276                                     /* End TB to handle timer interrupt */
4277                                     dc->base.is_jmp = DISAS_EXIT;
4278                                 }
4279                                 break;
4280                             case 0x19: /* System tick compare */
4281 #if !defined(CONFIG_USER_ONLY)
4282                                 if (!supervisor(dc))
4283                                     goto illegal_insn;
4284 #endif
4285                                 {
4286                                     TCGv_ptr r_tickptr;
4287 
4288                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4289                                                    cpu_src2);
4290                                     r_tickptr = tcg_temp_new_ptr();
4291                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4292                                                    offsetof(CPUSPARCState, stick));
4293                                     translator_io_start(&dc->base);
4294                                     gen_helper_tick_set_limit(r_tickptr,
4295                                                               cpu_stick_cmpr);
4296                                     /* End TB to handle timer interrupt */
4297                                     dc->base.is_jmp = DISAS_EXIT;
4298                                 }
4299                                 break;
4300 
4301                             case 0x10: /* Performance Control */
4302                             case 0x11: /* Performance Instrumentation
4303                                           Counter */
4304                             case 0x12: /* Dispatch Control */
4305 #endif
4306                             default:
4307                                 goto illegal_insn;
4308                             }
4309                         }
4310                         break;
4311 #if !defined(CONFIG_USER_ONLY)
4312                     case 0x31: /* wrpsr, V9 saved, restored */
4313                         {
4314                             if (!supervisor(dc))
4315                                 goto priv_insn;
4316 #ifdef TARGET_SPARC64
4317                             switch (rd) {
4318                             case 0:
4319                                 gen_helper_saved(tcg_env);
4320                                 break;
4321                             case 1:
4322                                 gen_helper_restored(tcg_env);
4323                                 break;
4324                             case 2: /* UA2005 allclean */
4325                             case 3: /* UA2005 otherw */
4326                             case 4: /* UA2005 normalw */
4327                             case 5: /* UA2005 invalw */
4328                                 // XXX
4329                             default:
4330                                 goto illegal_insn;
4331                             }
4332 #else
4333                             cpu_tmp0 = tcg_temp_new();
4334                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4335                             gen_helper_wrpsr(tcg_env, cpu_tmp0);
4336                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4337                             dc->cc_op = CC_OP_FLAGS;
4338                             save_state(dc);
4339                             gen_op_next_insn();
4340                             tcg_gen_exit_tb(NULL, 0);
4341                             dc->base.is_jmp = DISAS_NORETURN;
4342 #endif
4343                         }
4344                         break;
4345                     case 0x32: /* wrwim, V9 wrpr */
4346                         {
4347                             if (!supervisor(dc))
4348                                 goto priv_insn;
4349                             cpu_tmp0 = tcg_temp_new();
4350                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4351 #ifdef TARGET_SPARC64
4352                             switch (rd) {
4353                             case 0: // tpc
4354                                 {
4355                                     TCGv_ptr r_tsptr;
4356 
4357                                     r_tsptr = tcg_temp_new_ptr();
4358                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4359                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4360                                                   offsetof(trap_state, tpc));
4361                                 }
4362                                 break;
4363                             case 1: // tnpc
4364                                 {
4365                                     TCGv_ptr r_tsptr;
4366 
4367                                     r_tsptr = tcg_temp_new_ptr();
4368                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4369                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4370                                                   offsetof(trap_state, tnpc));
4371                                 }
4372                                 break;
4373                             case 2: // tstate
4374                                 {
4375                                     TCGv_ptr r_tsptr;
4376 
4377                                     r_tsptr = tcg_temp_new_ptr();
4378                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4379                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4380                                                   offsetof(trap_state,
4381                                                            tstate));
4382                                 }
4383                                 break;
4384                             case 3: // tt
4385                                 {
4386                                     TCGv_ptr r_tsptr;
4387 
4388                                     r_tsptr = tcg_temp_new_ptr();
4389                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4390                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4391                                                     offsetof(trap_state, tt));
4392                                 }
4393                                 break;
4394                             case 4: // tick
4395                                 {
4396                                     TCGv_ptr r_tickptr;
4397 
4398                                     r_tickptr = tcg_temp_new_ptr();
4399                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4400                                                    offsetof(CPUSPARCState, tick));
4401                                     translator_io_start(&dc->base);
4402                                     gen_helper_tick_set_count(r_tickptr,
4403                                                               cpu_tmp0);
4404                                     /* End TB to handle timer interrupt */
4405                                     dc->base.is_jmp = DISAS_EXIT;
4406                                 }
4407                                 break;
4408                             case 5: // tba
4409                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4410                                 break;
4411                             case 6: // pstate
4412                                 save_state(dc);
4413                                 if (translator_io_start(&dc->base)) {
4414                                     dc->base.is_jmp = DISAS_EXIT;
4415                                 }
4416                                 gen_helper_wrpstate(tcg_env, cpu_tmp0);
4417                                 dc->npc = DYNAMIC_PC;
4418                                 break;
4419                             case 7: // tl
4420                                 save_state(dc);
4421                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4422                                                offsetof(CPUSPARCState, tl));
4423                                 dc->npc = DYNAMIC_PC;
4424                                 break;
4425                             case 8: // pil
4426                                 if (translator_io_start(&dc->base)) {
4427                                     dc->base.is_jmp = DISAS_EXIT;
4428                                 }
4429                                 gen_helper_wrpil(tcg_env, cpu_tmp0);
4430                                 break;
4431                             case 9: // cwp
4432                                 gen_helper_wrcwp(tcg_env, cpu_tmp0);
4433                                 break;
4434                             case 10: // cansave
4435                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4436                                                 offsetof(CPUSPARCState,
4437                                                          cansave));
4438                                 break;
4439                             case 11: // canrestore
4440                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4441                                                 offsetof(CPUSPARCState,
4442                                                          canrestore));
4443                                 break;
4444                             case 12: // cleanwin
4445                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4446                                                 offsetof(CPUSPARCState,
4447                                                          cleanwin));
4448                                 break;
4449                             case 13: // otherwin
4450                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4451                                                 offsetof(CPUSPARCState,
4452                                                          otherwin));
4453                                 break;
4454                             case 14: // wstate
4455                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4456                                                 offsetof(CPUSPARCState,
4457                                                          wstate));
4458                                 break;
4459                             case 16: // UA2005 gl
4460                                 CHECK_IU_FEATURE(dc, GL);
4461                                 gen_helper_wrgl(tcg_env, cpu_tmp0);
4462                                 break;
4463                             case 26: // UA2005 strand status
4464                                 CHECK_IU_FEATURE(dc, HYPV);
4465                                 if (!hypervisor(dc))
4466                                     goto priv_insn;
4467                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4468                                 break;
4469                             default:
4470                                 goto illegal_insn;
4471                             }
4472 #else
4473                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4474                             if (dc->def->nwindows != 32) {
4475                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4476                                                 (1 << dc->def->nwindows) - 1);
4477                             }
4478 #endif
4479                         }
4480                         break;
4481                     case 0x33: /* wrtbr, UA2005 wrhpr */
4482                         {
4483 #ifndef TARGET_SPARC64
4484                             if (!supervisor(dc))
4485                                 goto priv_insn;
4486                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4487 #else
4488                             CHECK_IU_FEATURE(dc, HYPV);
4489                             if (!hypervisor(dc))
4490                                 goto priv_insn;
4491                             cpu_tmp0 = tcg_temp_new();
4492                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4493                             switch (rd) {
4494                             case 0: // hpstate
4495                                 tcg_gen_st_i64(cpu_tmp0, tcg_env,
4496                                                offsetof(CPUSPARCState,
4497                                                         hpstate));
4498                                 save_state(dc);
4499                                 gen_op_next_insn();
4500                                 tcg_gen_exit_tb(NULL, 0);
4501                                 dc->base.is_jmp = DISAS_NORETURN;
4502                                 break;
4503                             case 1: // htstate
4504                                 // XXX gen_op_wrhtstate();
4505                                 break;
4506                             case 3: // hintp
4507                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4508                                 break;
4509                             case 5: // htba
4510                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4511                                 break;
4512                             case 31: // hstick_cmpr
4513                                 {
4514                                     TCGv_ptr r_tickptr;
4515 
4516                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4517                                     r_tickptr = tcg_temp_new_ptr();
4518                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4519                                                    offsetof(CPUSPARCState, hstick));
4520                                     translator_io_start(&dc->base);
4521                                     gen_helper_tick_set_limit(r_tickptr,
4522                                                               cpu_hstick_cmpr);
4523                                     /* End TB to handle timer interrupt */
4524                                     dc->base.is_jmp = DISAS_EXIT;
4525                                 }
4526                                 break;
4527                             case 6: // hver readonly
4528                             default:
4529                                 goto illegal_insn;
4530                             }
4531 #endif
4532                         }
4533                         break;
4534 #endif
4535 #ifdef TARGET_SPARC64
4536                     case 0x2c: /* V9 movcc */
4537                         {
4538                             int cc = GET_FIELD_SP(insn, 11, 12);
4539                             int cond = GET_FIELD_SP(insn, 14, 17);
4540                             DisasCompare cmp;
4541                             TCGv dst;
4542 
4543                             if (insn & (1 << 18)) {
4544                                 if (cc == 0) {
4545                                     gen_compare(&cmp, 0, cond, dc);
4546                                 } else if (cc == 2) {
4547                                     gen_compare(&cmp, 1, cond, dc);
4548                                 } else {
4549                                     goto illegal_insn;
4550                                 }
4551                             } else {
4552                                 gen_fcompare(&cmp, cc, cond);
4553                             }
4554 
4555                             /* The get_src2 above loaded the normal 13-bit
4556                                immediate field, not the 11-bit field we have
4557                                in movcc.  But it did handle the reg case.  */
4558                             if (IS_IMM) {
4559                                 simm = GET_FIELD_SPs(insn, 0, 10);
4560                                 tcg_gen_movi_tl(cpu_src2, simm);
4561                             }
4562 
4563                             dst = gen_load_gpr(dc, rd);
4564                             tcg_gen_movcond_tl(cmp.cond, dst,
4565                                                cmp.c1, cmp.c2,
4566                                                cpu_src2, dst);
4567                             gen_store_gpr(dc, rd, dst);
4568                             break;
4569                         }
4570                     case 0x2d: /* V9 sdivx */
4571                         gen_helper_sdivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4572                         gen_store_gpr(dc, rd, cpu_dst);
4573                         break;
4574                     case 0x2e: /* V9 popc */
4575                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4576                         gen_store_gpr(dc, rd, cpu_dst);
4577                         break;
4578                     case 0x2f: /* V9 movr */
4579                         {
4580                             int cond = GET_FIELD_SP(insn, 10, 12);
4581                             DisasCompare cmp;
4582                             TCGv dst;
4583 
4584                             gen_compare_reg(&cmp, cond, cpu_src1);
4585 
4586                             /* The get_src2 above loaded the normal 13-bit
4587                                immediate field, not the 10-bit field we have
4588                                in movr.  But it did handle the reg case.  */
4589                             if (IS_IMM) {
4590                                 simm = GET_FIELD_SPs(insn, 0, 9);
4591                                 tcg_gen_movi_tl(cpu_src2, simm);
4592                             }
4593 
4594                             dst = gen_load_gpr(dc, rd);
4595                             tcg_gen_movcond_tl(cmp.cond, dst,
4596                                                cmp.c1, cmp.c2,
4597                                                cpu_src2, dst);
4598                             gen_store_gpr(dc, rd, dst);
4599                             break;
4600                         }
4601 #endif
4602                     default:
4603                         goto illegal_insn;
4604                     }
4605                 }
4606             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4607 #ifdef TARGET_SPARC64
4608                 int opf = GET_FIELD_SP(insn, 5, 13);
4609                 rs1 = GET_FIELD(insn, 13, 17);
4610                 rs2 = GET_FIELD(insn, 27, 31);
4611                 if (gen_trap_ifnofpu(dc)) {
4612                     goto jmp_insn;
4613                 }
4614 
4615                 switch (opf) {
4616                 case 0x000: /* VIS I edge8cc */
4617                     CHECK_FPU_FEATURE(dc, VIS1);
4618                     cpu_src1 = gen_load_gpr(dc, rs1);
4619                     cpu_src2 = gen_load_gpr(dc, rs2);
4620                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4621                     gen_store_gpr(dc, rd, cpu_dst);
4622                     break;
4623                 case 0x001: /* VIS II edge8n */
4624                     CHECK_FPU_FEATURE(dc, VIS2);
4625                     cpu_src1 = gen_load_gpr(dc, rs1);
4626                     cpu_src2 = gen_load_gpr(dc, rs2);
4627                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4628                     gen_store_gpr(dc, rd, cpu_dst);
4629                     break;
4630                 case 0x002: /* VIS I edge8lcc */
4631                     CHECK_FPU_FEATURE(dc, VIS1);
4632                     cpu_src1 = gen_load_gpr(dc, rs1);
4633                     cpu_src2 = gen_load_gpr(dc, rs2);
4634                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4635                     gen_store_gpr(dc, rd, cpu_dst);
4636                     break;
4637                 case 0x003: /* VIS II edge8ln */
4638                     CHECK_FPU_FEATURE(dc, VIS2);
4639                     cpu_src1 = gen_load_gpr(dc, rs1);
4640                     cpu_src2 = gen_load_gpr(dc, rs2);
4641                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4642                     gen_store_gpr(dc, rd, cpu_dst);
4643                     break;
4644                 case 0x004: /* VIS I edge16cc */
4645                     CHECK_FPU_FEATURE(dc, VIS1);
4646                     cpu_src1 = gen_load_gpr(dc, rs1);
4647                     cpu_src2 = gen_load_gpr(dc, rs2);
4648                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4649                     gen_store_gpr(dc, rd, cpu_dst);
4650                     break;
4651                 case 0x005: /* VIS II edge16n */
4652                     CHECK_FPU_FEATURE(dc, VIS2);
4653                     cpu_src1 = gen_load_gpr(dc, rs1);
4654                     cpu_src2 = gen_load_gpr(dc, rs2);
4655                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4656                     gen_store_gpr(dc, rd, cpu_dst);
4657                     break;
4658                 case 0x006: /* VIS I edge16lcc */
4659                     CHECK_FPU_FEATURE(dc, VIS1);
4660                     cpu_src1 = gen_load_gpr(dc, rs1);
4661                     cpu_src2 = gen_load_gpr(dc, rs2);
4662                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4663                     gen_store_gpr(dc, rd, cpu_dst);
4664                     break;
4665                 case 0x007: /* VIS II edge16ln */
4666                     CHECK_FPU_FEATURE(dc, VIS2);
4667                     cpu_src1 = gen_load_gpr(dc, rs1);
4668                     cpu_src2 = gen_load_gpr(dc, rs2);
4669                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4670                     gen_store_gpr(dc, rd, cpu_dst);
4671                     break;
4672                 case 0x008: /* VIS I edge32cc */
4673                     CHECK_FPU_FEATURE(dc, VIS1);
4674                     cpu_src1 = gen_load_gpr(dc, rs1);
4675                     cpu_src2 = gen_load_gpr(dc, rs2);
4676                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4677                     gen_store_gpr(dc, rd, cpu_dst);
4678                     break;
4679                 case 0x009: /* VIS II edge32n */
4680                     CHECK_FPU_FEATURE(dc, VIS2);
4681                     cpu_src1 = gen_load_gpr(dc, rs1);
4682                     cpu_src2 = gen_load_gpr(dc, rs2);
4683                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4684                     gen_store_gpr(dc, rd, cpu_dst);
4685                     break;
4686                 case 0x00a: /* VIS I edge32lcc */
4687                     CHECK_FPU_FEATURE(dc, VIS1);
4688                     cpu_src1 = gen_load_gpr(dc, rs1);
4689                     cpu_src2 = gen_load_gpr(dc, rs2);
4690                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4691                     gen_store_gpr(dc, rd, cpu_dst);
4692                     break;
4693                 case 0x00b: /* VIS II edge32ln */
4694                     CHECK_FPU_FEATURE(dc, VIS2);
4695                     cpu_src1 = gen_load_gpr(dc, rs1);
4696                     cpu_src2 = gen_load_gpr(dc, rs2);
4697                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4698                     gen_store_gpr(dc, rd, cpu_dst);
4699                     break;
4700                 case 0x010: /* VIS I array8 */
4701                     CHECK_FPU_FEATURE(dc, VIS1);
4702                     cpu_src1 = gen_load_gpr(dc, rs1);
4703                     cpu_src2 = gen_load_gpr(dc, rs2);
4704                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4705                     gen_store_gpr(dc, rd, cpu_dst);
4706                     break;
4707                 case 0x012: /* VIS I array16 */
4708                     CHECK_FPU_FEATURE(dc, VIS1);
4709                     cpu_src1 = gen_load_gpr(dc, rs1);
4710                     cpu_src2 = gen_load_gpr(dc, rs2);
4711                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4712                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4713                     gen_store_gpr(dc, rd, cpu_dst);
4714                     break;
4715                 case 0x014: /* VIS I array32 */
4716                     CHECK_FPU_FEATURE(dc, VIS1);
4717                     cpu_src1 = gen_load_gpr(dc, rs1);
4718                     cpu_src2 = gen_load_gpr(dc, rs2);
4719                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4720                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4721                     gen_store_gpr(dc, rd, cpu_dst);
4722                     break;
4723                 case 0x018: /* VIS I alignaddr */
4724                     CHECK_FPU_FEATURE(dc, VIS1);
4725                     cpu_src1 = gen_load_gpr(dc, rs1);
4726                     cpu_src2 = gen_load_gpr(dc, rs2);
4727                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4728                     gen_store_gpr(dc, rd, cpu_dst);
4729                     break;
4730                 case 0x01a: /* VIS I alignaddrl */
4731                     CHECK_FPU_FEATURE(dc, VIS1);
4732                     cpu_src1 = gen_load_gpr(dc, rs1);
4733                     cpu_src2 = gen_load_gpr(dc, rs2);
4734                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4735                     gen_store_gpr(dc, rd, cpu_dst);
4736                     break;
4737                 case 0x019: /* VIS II bmask */
4738                     CHECK_FPU_FEATURE(dc, VIS2);
4739                     cpu_src1 = gen_load_gpr(dc, rs1);
4740                     cpu_src2 = gen_load_gpr(dc, rs2);
4741                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4742                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4743                     gen_store_gpr(dc, rd, cpu_dst);
4744                     break;
4745                 case 0x020: /* VIS I fcmple16 */
4746                     CHECK_FPU_FEATURE(dc, VIS1);
4747                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4748                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4749                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4750                     gen_store_gpr(dc, rd, cpu_dst);
4751                     break;
4752                 case 0x022: /* VIS I fcmpne16 */
4753                     CHECK_FPU_FEATURE(dc, VIS1);
4754                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4755                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4756                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4757                     gen_store_gpr(dc, rd, cpu_dst);
4758                     break;
4759                 case 0x024: /* VIS I fcmple32 */
4760                     CHECK_FPU_FEATURE(dc, VIS1);
4761                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4762                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4763                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4764                     gen_store_gpr(dc, rd, cpu_dst);
4765                     break;
4766                 case 0x026: /* VIS I fcmpne32 */
4767                     CHECK_FPU_FEATURE(dc, VIS1);
4768                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4769                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4770                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4771                     gen_store_gpr(dc, rd, cpu_dst);
4772                     break;
4773                 case 0x028: /* VIS I fcmpgt16 */
4774                     CHECK_FPU_FEATURE(dc, VIS1);
4775                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4776                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4777                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4778                     gen_store_gpr(dc, rd, cpu_dst);
4779                     break;
4780                 case 0x02a: /* VIS I fcmpeq16 */
4781                     CHECK_FPU_FEATURE(dc, VIS1);
4782                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4783                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4784                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4785                     gen_store_gpr(dc, rd, cpu_dst);
4786                     break;
4787                 case 0x02c: /* VIS I fcmpgt32 */
4788                     CHECK_FPU_FEATURE(dc, VIS1);
4789                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4790                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4791                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4792                     gen_store_gpr(dc, rd, cpu_dst);
4793                     break;
4794                 case 0x02e: /* VIS I fcmpeq32 */
4795                     CHECK_FPU_FEATURE(dc, VIS1);
4796                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4797                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4798                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4799                     gen_store_gpr(dc, rd, cpu_dst);
4800                     break;
4801                 case 0x031: /* VIS I fmul8x16 */
4802                     CHECK_FPU_FEATURE(dc, VIS1);
4803                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4804                     break;
4805                 case 0x033: /* VIS I fmul8x16au */
4806                     CHECK_FPU_FEATURE(dc, VIS1);
4807                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4808                     break;
4809                 case 0x035: /* VIS I fmul8x16al */
4810                     CHECK_FPU_FEATURE(dc, VIS1);
4811                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4812                     break;
4813                 case 0x036: /* VIS I fmul8sux16 */
4814                     CHECK_FPU_FEATURE(dc, VIS1);
4815                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4816                     break;
4817                 case 0x037: /* VIS I fmul8ulx16 */
4818                     CHECK_FPU_FEATURE(dc, VIS1);
4819                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4820                     break;
4821                 case 0x038: /* VIS I fmuld8sux16 */
4822                     CHECK_FPU_FEATURE(dc, VIS1);
4823                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4824                     break;
4825                 case 0x039: /* VIS I fmuld8ulx16 */
4826                     CHECK_FPU_FEATURE(dc, VIS1);
4827                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4828                     break;
4829                 case 0x03a: /* VIS I fpack32 */
4830                     CHECK_FPU_FEATURE(dc, VIS1);
4831                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4832                     break;
4833                 case 0x03b: /* VIS I fpack16 */
4834                     CHECK_FPU_FEATURE(dc, VIS1);
4835                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4836                     cpu_dst_32 = gen_dest_fpr_F(dc);
4837                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4838                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4839                     break;
4840                 case 0x03d: /* VIS I fpackfix */
4841                     CHECK_FPU_FEATURE(dc, VIS1);
4842                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4843                     cpu_dst_32 = gen_dest_fpr_F(dc);
4844                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4845                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4846                     break;
4847                 case 0x03e: /* VIS I pdist */
4848                     CHECK_FPU_FEATURE(dc, VIS1);
4849                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4850                     break;
4851                 case 0x048: /* VIS I faligndata */
4852                     CHECK_FPU_FEATURE(dc, VIS1);
4853                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4854                     break;
4855                 case 0x04b: /* VIS I fpmerge */
4856                     CHECK_FPU_FEATURE(dc, VIS1);
4857                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4858                     break;
4859                 case 0x04c: /* VIS II bshuffle */
4860                     CHECK_FPU_FEATURE(dc, VIS2);
4861                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4862                     break;
4863                 case 0x04d: /* VIS I fexpand */
4864                     CHECK_FPU_FEATURE(dc, VIS1);
4865                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4866                     break;
4867                 case 0x050: /* VIS I fpadd16 */
4868                     CHECK_FPU_FEATURE(dc, VIS1);
4869                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4870                     break;
4871                 case 0x051: /* VIS I fpadd16s */
4872                     CHECK_FPU_FEATURE(dc, VIS1);
4873                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4874                     break;
4875                 case 0x052: /* VIS I fpadd32 */
4876                     CHECK_FPU_FEATURE(dc, VIS1);
4877                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4878                     break;
4879                 case 0x053: /* VIS I fpadd32s */
4880                     CHECK_FPU_FEATURE(dc, VIS1);
4881                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4882                     break;
4883                 case 0x054: /* VIS I fpsub16 */
4884                     CHECK_FPU_FEATURE(dc, VIS1);
4885                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4886                     break;
4887                 case 0x055: /* VIS I fpsub16s */
4888                     CHECK_FPU_FEATURE(dc, VIS1);
4889                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4890                     break;
4891                 case 0x056: /* VIS I fpsub32 */
4892                     CHECK_FPU_FEATURE(dc, VIS1);
4893                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4894                     break;
4895                 case 0x057: /* VIS I fpsub32s */
4896                     CHECK_FPU_FEATURE(dc, VIS1);
4897                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4898                     break;
4899                 case 0x060: /* VIS I fzero */
4900                     CHECK_FPU_FEATURE(dc, VIS1);
4901                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4902                     tcg_gen_movi_i64(cpu_dst_64, 0);
4903                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4904                     break;
4905                 case 0x061: /* VIS I fzeros */
4906                     CHECK_FPU_FEATURE(dc, VIS1);
4907                     cpu_dst_32 = gen_dest_fpr_F(dc);
4908                     tcg_gen_movi_i32(cpu_dst_32, 0);
4909                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4910                     break;
4911                 case 0x062: /* VIS I fnor */
4912                     CHECK_FPU_FEATURE(dc, VIS1);
4913                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4914                     break;
4915                 case 0x063: /* VIS I fnors */
4916                     CHECK_FPU_FEATURE(dc, VIS1);
4917                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4918                     break;
4919                 case 0x064: /* VIS I fandnot2 */
4920                     CHECK_FPU_FEATURE(dc, VIS1);
4921                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4922                     break;
4923                 case 0x065: /* VIS I fandnot2s */
4924                     CHECK_FPU_FEATURE(dc, VIS1);
4925                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4926                     break;
4927                 case 0x066: /* VIS I fnot2 */
4928                     CHECK_FPU_FEATURE(dc, VIS1);
4929                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4930                     break;
4931                 case 0x067: /* VIS I fnot2s */
4932                     CHECK_FPU_FEATURE(dc, VIS1);
4933                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4934                     break;
4935                 case 0x068: /* VIS I fandnot1 */
4936                     CHECK_FPU_FEATURE(dc, VIS1);
4937                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4938                     break;
4939                 case 0x069: /* VIS I fandnot1s */
4940                     CHECK_FPU_FEATURE(dc, VIS1);
4941                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4942                     break;
4943                 case 0x06a: /* VIS I fnot1 */
4944                     CHECK_FPU_FEATURE(dc, VIS1);
4945                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4946                     break;
4947                 case 0x06b: /* VIS I fnot1s */
4948                     CHECK_FPU_FEATURE(dc, VIS1);
4949                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4950                     break;
4951                 case 0x06c: /* VIS I fxor */
4952                     CHECK_FPU_FEATURE(dc, VIS1);
4953                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4954                     break;
4955                 case 0x06d: /* VIS I fxors */
4956                     CHECK_FPU_FEATURE(dc, VIS1);
4957                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4958                     break;
4959                 case 0x06e: /* VIS I fnand */
4960                     CHECK_FPU_FEATURE(dc, VIS1);
4961                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4962                     break;
4963                 case 0x06f: /* VIS I fnands */
4964                     CHECK_FPU_FEATURE(dc, VIS1);
4965                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4966                     break;
4967                 case 0x070: /* VIS I fand */
4968                     CHECK_FPU_FEATURE(dc, VIS1);
4969                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4970                     break;
4971                 case 0x071: /* VIS I fands */
4972                     CHECK_FPU_FEATURE(dc, VIS1);
4973                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4974                     break;
4975                 case 0x072: /* VIS I fxnor */
4976                     CHECK_FPU_FEATURE(dc, VIS1);
4977                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4978                     break;
4979                 case 0x073: /* VIS I fxnors */
4980                     CHECK_FPU_FEATURE(dc, VIS1);
4981                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4982                     break;
4983                 case 0x074: /* VIS I fsrc1 */
4984                     CHECK_FPU_FEATURE(dc, VIS1);
4985                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4986                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4987                     break;
4988                 case 0x075: /* VIS I fsrc1s */
4989                     CHECK_FPU_FEATURE(dc, VIS1);
4990                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4991                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4992                     break;
4993                 case 0x076: /* VIS I fornot2 */
4994                     CHECK_FPU_FEATURE(dc, VIS1);
4995                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4996                     break;
4997                 case 0x077: /* VIS I fornot2s */
4998                     CHECK_FPU_FEATURE(dc, VIS1);
4999                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
5000                     break;
5001                 case 0x078: /* VIS I fsrc2 */
5002                     CHECK_FPU_FEATURE(dc, VIS1);
5003                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
5004                     gen_store_fpr_D(dc, rd, cpu_src1_64);
5005                     break;
5006                 case 0x079: /* VIS I fsrc2s */
5007                     CHECK_FPU_FEATURE(dc, VIS1);
5008                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
5009                     gen_store_fpr_F(dc, rd, cpu_src1_32);
5010                     break;
5011                 case 0x07a: /* VIS I fornot1 */
5012                     CHECK_FPU_FEATURE(dc, VIS1);
5013                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
5014                     break;
5015                 case 0x07b: /* VIS I fornot1s */
5016                     CHECK_FPU_FEATURE(dc, VIS1);
5017                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
5018                     break;
5019                 case 0x07c: /* VIS I for */
5020                     CHECK_FPU_FEATURE(dc, VIS1);
5021                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
5022                     break;
5023                 case 0x07d: /* VIS I fors */
5024                     CHECK_FPU_FEATURE(dc, VIS1);
5025                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
5026                     break;
5027                 case 0x07e: /* VIS I fone */
5028                     CHECK_FPU_FEATURE(dc, VIS1);
5029                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5030                     tcg_gen_movi_i64(cpu_dst_64, -1);
5031                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5032                     break;
5033                 case 0x07f: /* VIS I fones */
5034                     CHECK_FPU_FEATURE(dc, VIS1);
5035                     cpu_dst_32 = gen_dest_fpr_F(dc);
5036                     tcg_gen_movi_i32(cpu_dst_32, -1);
5037                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5038                     break;
5039                 case 0x080: /* VIS I shutdown */
5040                 case 0x081: /* VIS II siam */
5041                     // XXX
5042                     goto illegal_insn;
5043                 default:
5044                     goto illegal_insn;
5045                 }
5046 #else
5047                 goto ncp_insn;
5048 #endif
5049             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
5050 #ifdef TARGET_SPARC64
5051                 goto illegal_insn;
5052 #else
5053                 goto ncp_insn;
5054 #endif
5055 #ifdef TARGET_SPARC64
5056             } else if (xop == 0x39) { /* V9 return */
5057                 save_state(dc);
5058                 cpu_src1 = get_src1(dc, insn);
5059                 cpu_tmp0 = tcg_temp_new();
5060                 if (IS_IMM) {   /* immediate */
5061                     simm = GET_FIELDs(insn, 19, 31);
5062                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5063                 } else {                /* register */
5064                     rs2 = GET_FIELD(insn, 27, 31);
5065                     if (rs2) {
5066                         cpu_src2 = gen_load_gpr(dc, rs2);
5067                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5068                     } else {
5069                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5070                     }
5071                 }
5072                 gen_check_align(dc, cpu_tmp0, 3);
5073                 gen_helper_restore(tcg_env);
5074                 gen_mov_pc_npc(dc);
5075                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5076                 dc->npc = DYNAMIC_PC_LOOKUP;
5077                 goto jmp_insn;
5078 #endif
5079             } else {
5080                 cpu_src1 = get_src1(dc, insn);
5081                 cpu_tmp0 = tcg_temp_new();
5082                 if (IS_IMM) {   /* immediate */
5083                     simm = GET_FIELDs(insn, 19, 31);
5084                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5085                 } else {                /* register */
5086                     rs2 = GET_FIELD(insn, 27, 31);
5087                     if (rs2) {
5088                         cpu_src2 = gen_load_gpr(dc, rs2);
5089                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5090                     } else {
5091                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5092                     }
5093                 }
5094                 switch (xop) {
5095                 case 0x38:      /* jmpl */
5096                     {
5097                         gen_check_align(dc, cpu_tmp0, 3);
5098                         gen_store_gpr(dc, rd, tcg_constant_tl(dc->pc));
5099                         gen_mov_pc_npc(dc);
5100                         gen_address_mask(dc, cpu_tmp0);
5101                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5102                         dc->npc = DYNAMIC_PC_LOOKUP;
5103                     }
5104                     goto jmp_insn;
5105 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5106                 case 0x39:      /* rett, V9 return */
5107                     {
5108                         if (!supervisor(dc))
5109                             goto priv_insn;
5110                         gen_check_align(dc, cpu_tmp0, 3);
5111                         gen_mov_pc_npc(dc);
5112                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5113                         dc->npc = DYNAMIC_PC;
5114                         gen_helper_rett(tcg_env);
5115                     }
5116                     goto jmp_insn;
5117 #endif
5118                 case 0x3b: /* flush */
5119                     /* nop */
5120                     break;
5121                 case 0x3c:      /* save */
5122                     gen_helper_save(tcg_env);
5123                     gen_store_gpr(dc, rd, cpu_tmp0);
5124                     break;
5125                 case 0x3d:      /* restore */
5126                     gen_helper_restore(tcg_env);
5127                     gen_store_gpr(dc, rd, cpu_tmp0);
5128                     break;
5129 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5130                 case 0x3e:      /* V9 done/retry */
5131                     {
5132                         switch (rd) {
5133                         case 0:
5134                             if (!supervisor(dc))
5135                                 goto priv_insn;
5136                             dc->npc = DYNAMIC_PC;
5137                             dc->pc = DYNAMIC_PC;
5138                             translator_io_start(&dc->base);
5139                             gen_helper_done(tcg_env);
5140                             goto jmp_insn;
5141                         case 1:
5142                             if (!supervisor(dc))
5143                                 goto priv_insn;
5144                             dc->npc = DYNAMIC_PC;
5145                             dc->pc = DYNAMIC_PC;
5146                             translator_io_start(&dc->base);
5147                             gen_helper_retry(tcg_env);
5148                             goto jmp_insn;
5149                         default:
5150                             goto illegal_insn;
5151                         }
5152                     }
5153                     break;
5154 #endif
5155                 default:
5156                     goto illegal_insn;
5157                 }
5158             }
5159             break;
5160         }
5161         break;
5162     case 3:                     /* load/store instructions */
5163         {
5164             unsigned int xop = GET_FIELD(insn, 7, 12);
5165             /* ??? gen_address_mask prevents us from using a source
5166                register directly.  Always generate a temporary.  */
5167             TCGv cpu_addr = tcg_temp_new();
5168 
5169             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5170             if (xop == 0x3c || xop == 0x3e) {
5171                 /* V9 casa/casxa : no offset */
5172             } else if (IS_IMM) {     /* immediate */
5173                 simm = GET_FIELDs(insn, 19, 31);
5174                 if (simm != 0) {
5175                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5176                 }
5177             } else {            /* register */
5178                 rs2 = GET_FIELD(insn, 27, 31);
5179                 if (rs2 != 0) {
5180                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5181                 }
5182             }
5183             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5184                 (xop > 0x17 && xop <= 0x1d ) ||
5185                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5186                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5187 
5188                 switch (xop) {
5189                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5190                     gen_address_mask(dc, cpu_addr);
5191                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5192                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5193                     break;
5194                 case 0x1:       /* ldub, load unsigned byte */
5195                     gen_address_mask(dc, cpu_addr);
5196                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5197                                        dc->mem_idx, MO_UB);
5198                     break;
5199                 case 0x2:       /* lduh, load unsigned halfword */
5200                     gen_address_mask(dc, cpu_addr);
5201                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5202                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5203                     break;
5204                 case 0x3:       /* ldd, load double word */
5205                     if (rd & 1)
5206                         goto illegal_insn;
5207                     else {
5208                         TCGv_i64 t64;
5209 
5210                         gen_address_mask(dc, cpu_addr);
5211                         t64 = tcg_temp_new_i64();
5212                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5213                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5214                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5215                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5216                         gen_store_gpr(dc, rd + 1, cpu_val);
5217                         tcg_gen_shri_i64(t64, t64, 32);
5218                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5219                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5220                     }
5221                     break;
5222                 case 0x9:       /* ldsb, load signed byte */
5223                     gen_address_mask(dc, cpu_addr);
5224                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5225                     break;
5226                 case 0xa:       /* ldsh, load signed halfword */
5227                     gen_address_mask(dc, cpu_addr);
5228                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5229                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5230                     break;
5231                 case 0xd:       /* ldstub */
5232                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5233                     break;
5234                 case 0x0f:
5235                     /* swap, swap register with memory. Also atomically */
5236                     cpu_src1 = gen_load_gpr(dc, rd);
5237                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5238                              dc->mem_idx, MO_TEUL);
5239                     break;
5240 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5241                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5242                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5243                     break;
5244                 case 0x11:      /* lduba, load unsigned byte alternate */
5245                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5246                     break;
5247                 case 0x12:      /* lduha, load unsigned halfword alternate */
5248                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5249                     break;
5250                 case 0x13:      /* ldda, load double word alternate */
5251                     if (rd & 1) {
5252                         goto illegal_insn;
5253                     }
5254                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5255                     goto skip_move;
5256                 case 0x19:      /* ldsba, load signed byte alternate */
5257                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5258                     break;
5259                 case 0x1a:      /* ldsha, load signed halfword alternate */
5260                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5261                     break;
5262                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5263                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5264                     break;
5265                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5266                                    atomically */
5267                     cpu_src1 = gen_load_gpr(dc, rd);
5268                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5269                     break;
5270 
5271 #ifndef TARGET_SPARC64
5272                 case 0x30: /* ldc */
5273                 case 0x31: /* ldcsr */
5274                 case 0x33: /* lddc */
5275                     goto ncp_insn;
5276 #endif
5277 #endif
5278 #ifdef TARGET_SPARC64
5279                 case 0x08: /* V9 ldsw */
5280                     gen_address_mask(dc, cpu_addr);
5281                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5282                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5283                     break;
5284                 case 0x0b: /* V9 ldx */
5285                     gen_address_mask(dc, cpu_addr);
5286                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5287                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5288                     break;
5289                 case 0x18: /* V9 ldswa */
5290                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5291                     break;
5292                 case 0x1b: /* V9 ldxa */
5293                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5294                     break;
5295                 case 0x2d: /* V9 prefetch, no effect */
5296                     goto skip_move;
5297                 case 0x30: /* V9 ldfa */
5298                     if (gen_trap_ifnofpu(dc)) {
5299                         goto jmp_insn;
5300                     }
5301                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5302                     gen_update_fprs_dirty(dc, rd);
5303                     goto skip_move;
5304                 case 0x33: /* V9 lddfa */
5305                     if (gen_trap_ifnofpu(dc)) {
5306                         goto jmp_insn;
5307                     }
5308                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5309                     gen_update_fprs_dirty(dc, DFPREG(rd));
5310                     goto skip_move;
5311                 case 0x3d: /* V9 prefetcha, no effect */
5312                     goto skip_move;
5313                 case 0x32: /* V9 ldqfa */
5314                     CHECK_FPU_FEATURE(dc, FLOAT128);
5315                     if (gen_trap_ifnofpu(dc)) {
5316                         goto jmp_insn;
5317                     }
5318                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5319                     gen_update_fprs_dirty(dc, QFPREG(rd));
5320                     goto skip_move;
5321 #endif
5322                 default:
5323                     goto illegal_insn;
5324                 }
5325                 gen_store_gpr(dc, rd, cpu_val);
5326 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5327             skip_move: ;
5328 #endif
5329             } else if (xop >= 0x20 && xop < 0x24) {
5330                 if (gen_trap_ifnofpu(dc)) {
5331                     goto jmp_insn;
5332                 }
5333                 switch (xop) {
5334                 case 0x20:      /* ldf, load fpreg */
5335                     gen_address_mask(dc, cpu_addr);
5336                     cpu_dst_32 = gen_dest_fpr_F(dc);
5337                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5338                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5339                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5340                     break;
5341                 case 0x21:      /* ldfsr, V9 ldxfsr */
5342 #ifdef TARGET_SPARC64
5343                     gen_address_mask(dc, cpu_addr);
5344                     if (rd == 1) {
5345                         TCGv_i64 t64 = tcg_temp_new_i64();
5346                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5347                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5348                         gen_helper_ldxfsr(cpu_fsr, tcg_env, cpu_fsr, t64);
5349                         break;
5350                     }
5351 #endif
5352                     cpu_dst_32 = tcg_temp_new_i32();
5353                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5354                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5355                     gen_helper_ldfsr(cpu_fsr, tcg_env, cpu_fsr, cpu_dst_32);
5356                     break;
5357                 case 0x22:      /* ldqf, load quad fpreg */
5358                     CHECK_FPU_FEATURE(dc, FLOAT128);
5359                     gen_address_mask(dc, cpu_addr);
5360                     cpu_src1_64 = tcg_temp_new_i64();
5361                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5362                                         MO_TEUQ | MO_ALIGN_4);
5363                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5364                     cpu_src2_64 = tcg_temp_new_i64();
5365                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5366                                         MO_TEUQ | MO_ALIGN_4);
5367                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5368                     break;
5369                 case 0x23:      /* lddf, load double fpreg */
5370                     gen_address_mask(dc, cpu_addr);
5371                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5372                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5373                                         MO_TEUQ | MO_ALIGN_4);
5374                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5375                     break;
5376                 default:
5377                     goto illegal_insn;
5378                 }
5379             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5380                        xop == 0xe || xop == 0x1e) {
5381                 TCGv cpu_val = gen_load_gpr(dc, rd);
5382 
5383                 switch (xop) {
5384                 case 0x4: /* st, store word */
5385                     gen_address_mask(dc, cpu_addr);
5386                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5387                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5388                     break;
5389                 case 0x5: /* stb, store byte */
5390                     gen_address_mask(dc, cpu_addr);
5391                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5392                     break;
5393                 case 0x6: /* sth, store halfword */
5394                     gen_address_mask(dc, cpu_addr);
5395                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5396                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5397                     break;
5398                 case 0x7: /* std, store double word */
5399                     if (rd & 1)
5400                         goto illegal_insn;
5401                     else {
5402                         TCGv_i64 t64;
5403                         TCGv lo;
5404 
5405                         gen_address_mask(dc, cpu_addr);
5406                         lo = gen_load_gpr(dc, rd + 1);
5407                         t64 = tcg_temp_new_i64();
5408                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5409                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5410                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5411                     }
5412                     break;
5413 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5414                 case 0x14: /* sta, V9 stwa, store word alternate */
5415                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5416                     break;
5417                 case 0x15: /* stba, store byte alternate */
5418                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5419                     break;
5420                 case 0x16: /* stha, store halfword alternate */
5421                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5422                     break;
5423                 case 0x17: /* stda, store double word alternate */
5424                     if (rd & 1) {
5425                         goto illegal_insn;
5426                     }
5427                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5428                     break;
5429 #endif
5430 #ifdef TARGET_SPARC64
5431                 case 0x0e: /* V9 stx */
5432                     gen_address_mask(dc, cpu_addr);
5433                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5434                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5435                     break;
5436                 case 0x1e: /* V9 stxa */
5437                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5438                     break;
5439 #endif
5440                 default:
5441                     goto illegal_insn;
5442                 }
5443             } else if (xop > 0x23 && xop < 0x28) {
5444                 if (gen_trap_ifnofpu(dc)) {
5445                     goto jmp_insn;
5446                 }
5447                 switch (xop) {
5448                 case 0x24: /* stf, store fpreg */
5449                     gen_address_mask(dc, cpu_addr);
5450                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5451                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5452                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5453                     break;
5454                 case 0x25: /* stfsr, V9 stxfsr */
5455                     {
5456 #ifdef TARGET_SPARC64
5457                         gen_address_mask(dc, cpu_addr);
5458                         if (rd == 1) {
5459                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5460                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5461                             break;
5462                         }
5463 #endif
5464                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5465                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5466                     }
5467                     break;
5468                 case 0x26:
5469 #ifdef TARGET_SPARC64
5470                     /* V9 stqf, store quad fpreg */
5471                     CHECK_FPU_FEATURE(dc, FLOAT128);
5472                     gen_address_mask(dc, cpu_addr);
5473                     /* ??? While stqf only requires 4-byte alignment, it is
5474                        legal for the cpu to signal the unaligned exception.
5475                        The OS trap handler is then required to fix it up.
5476                        For qemu, this avoids having to probe the second page
5477                        before performing the first write.  */
5478                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5479                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5480                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5481                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5482                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5483                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5484                                         dc->mem_idx, MO_TEUQ);
5485                     break;
5486 #else /* !TARGET_SPARC64 */
5487                     /* stdfq, store floating point queue */
5488 #if defined(CONFIG_USER_ONLY)
5489                     goto illegal_insn;
5490 #else
5491                     if (!supervisor(dc))
5492                         goto priv_insn;
5493                     if (gen_trap_ifnofpu(dc)) {
5494                         goto jmp_insn;
5495                     }
5496                     goto nfq_insn;
5497 #endif
5498 #endif
5499                 case 0x27: /* stdf, store double fpreg */
5500                     gen_address_mask(dc, cpu_addr);
5501                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5502                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5503                                         MO_TEUQ | MO_ALIGN_4);
5504                     break;
5505                 default:
5506                     goto illegal_insn;
5507                 }
5508             } else if (xop > 0x33 && xop < 0x3f) {
5509                 switch (xop) {
5510 #ifdef TARGET_SPARC64
5511                 case 0x34: /* V9 stfa */
5512                     if (gen_trap_ifnofpu(dc)) {
5513                         goto jmp_insn;
5514                     }
5515                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5516                     break;
5517                 case 0x36: /* V9 stqfa */
5518                     {
5519                         CHECK_FPU_FEATURE(dc, FLOAT128);
5520                         if (gen_trap_ifnofpu(dc)) {
5521                             goto jmp_insn;
5522                         }
5523                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5524                     }
5525                     break;
5526                 case 0x37: /* V9 stdfa */
5527                     if (gen_trap_ifnofpu(dc)) {
5528                         goto jmp_insn;
5529                     }
5530                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5531                     break;
5532                 case 0x3e: /* V9 casxa */
5533                     rs2 = GET_FIELD(insn, 27, 31);
5534                     cpu_src2 = gen_load_gpr(dc, rs2);
5535                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5536                     break;
5537 #else
5538                 case 0x34: /* stc */
5539                 case 0x35: /* stcsr */
5540                 case 0x36: /* stdcq */
5541                 case 0x37: /* stdc */
5542                     goto ncp_insn;
5543 #endif
5544 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5545                 case 0x3c: /* V9 or LEON3 casa */
5546 #ifndef TARGET_SPARC64
5547                     CHECK_IU_FEATURE(dc, CASA);
5548 #endif
5549                     rs2 = GET_FIELD(insn, 27, 31);
5550                     cpu_src2 = gen_load_gpr(dc, rs2);
5551                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5552                     break;
5553 #endif
5554                 default:
5555                     goto illegal_insn;
5556                 }
5557             } else {
5558                 goto illegal_insn;
5559             }
5560         }
5561         break;
5562     }
5563     advance_pc(dc);
5564  jmp_insn:
5565     return;
5566  illegal_insn:
5567     gen_exception(dc, TT_ILL_INSN);
5568     return;
5569 #if !defined(CONFIG_USER_ONLY)
5570  priv_insn:
5571     gen_exception(dc, TT_PRIV_INSN);
5572     return;
5573 #endif
5574  nfpu_insn:
5575     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5576     return;
5577 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5578  nfq_insn:
5579     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5580     return;
5581 #endif
5582 #ifndef TARGET_SPARC64
5583  ncp_insn:
5584     gen_exception(dc, TT_NCP_INSN);
5585     return;
5586 #endif
5587 }
5588 
5589 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5590 {
5591     DisasContext *dc = container_of(dcbase, DisasContext, base);
5592     CPUSPARCState *env = cpu_env(cs);
5593     int bound;
5594 
5595     dc->pc = dc->base.pc_first;
5596     dc->npc = (target_ulong)dc->base.tb->cs_base;
5597     dc->cc_op = CC_OP_DYNAMIC;
5598     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5599     dc->def = &env->def;
5600     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5601     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5602 #ifndef CONFIG_USER_ONLY
5603     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5604 #endif
5605 #ifdef TARGET_SPARC64
5606     dc->fprs_dirty = 0;
5607     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5608 #ifndef CONFIG_USER_ONLY
5609     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5610 #endif
5611 #endif
5612     /*
5613      * if we reach a page boundary, we stop generation so that the
5614      * PC of a TT_TFAULT exception is always in the right page
5615      */
5616     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5617     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5618 }
5619 
5620 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5621 {
5622 }
5623 
5624 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5625 {
5626     DisasContext *dc = container_of(dcbase, DisasContext, base);
5627     target_ulong npc = dc->npc;
5628 
5629     if (npc & 3) {
5630         switch (npc) {
5631         case JUMP_PC:
5632             assert(dc->jump_pc[1] == dc->pc + 4);
5633             npc = dc->jump_pc[0] | JUMP_PC;
5634             break;
5635         case DYNAMIC_PC:
5636         case DYNAMIC_PC_LOOKUP:
5637             npc = DYNAMIC_PC;
5638             break;
5639         default:
5640             g_assert_not_reached();
5641         }
5642     }
5643     tcg_gen_insn_start(dc->pc, npc);
5644 }
5645 
5646 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5647 {
5648     DisasContext *dc = container_of(dcbase, DisasContext, base);
5649     CPUSPARCState *env = cpu_env(cs);
5650     unsigned int insn;
5651 
5652     insn = translator_ldl(env, &dc->base, dc->pc);
5653     dc->base.pc_next += 4;
5654 
5655     if (!decode(dc, insn)) {
5656         disas_sparc_legacy(dc, insn);
5657     }
5658 
5659     if (dc->base.is_jmp == DISAS_NORETURN) {
5660         return;
5661     }
5662     if (dc->pc != dc->base.pc_next) {
5663         dc->base.is_jmp = DISAS_TOO_MANY;
5664     }
5665 }
5666 
5667 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5668 {
5669     DisasContext *dc = container_of(dcbase, DisasContext, base);
5670     DisasDelayException *e, *e_next;
5671     bool may_lookup;
5672 
5673     switch (dc->base.is_jmp) {
5674     case DISAS_NEXT:
5675     case DISAS_TOO_MANY:
5676         if (((dc->pc | dc->npc) & 3) == 0) {
5677             /* static PC and NPC: we can use direct chaining */
5678             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5679             break;
5680         }
5681 
5682         may_lookup = true;
5683         if (dc->pc & 3) {
5684             switch (dc->pc) {
5685             case DYNAMIC_PC_LOOKUP:
5686                 break;
5687             case DYNAMIC_PC:
5688                 may_lookup = false;
5689                 break;
5690             default:
5691                 g_assert_not_reached();
5692             }
5693         } else {
5694             tcg_gen_movi_tl(cpu_pc, dc->pc);
5695         }
5696 
5697         if (dc->npc & 3) {
5698             switch (dc->npc) {
5699             case JUMP_PC:
5700                 gen_generic_branch(dc);
5701                 break;
5702             case DYNAMIC_PC:
5703                 may_lookup = false;
5704                 break;
5705             case DYNAMIC_PC_LOOKUP:
5706                 break;
5707             default:
5708                 g_assert_not_reached();
5709             }
5710         } else {
5711             tcg_gen_movi_tl(cpu_npc, dc->npc);
5712         }
5713         if (may_lookup) {
5714             tcg_gen_lookup_and_goto_ptr();
5715         } else {
5716             tcg_gen_exit_tb(NULL, 0);
5717         }
5718         break;
5719 
5720     case DISAS_NORETURN:
5721        break;
5722 
5723     case DISAS_EXIT:
5724         /* Exit TB */
5725         save_state(dc);
5726         tcg_gen_exit_tb(NULL, 0);
5727         break;
5728 
5729     default:
5730         g_assert_not_reached();
5731     }
5732 
5733     for (e = dc->delay_excp_list; e ; e = e_next) {
5734         gen_set_label(e->lab);
5735 
5736         tcg_gen_movi_tl(cpu_pc, e->pc);
5737         if (e->npc % 4 == 0) {
5738             tcg_gen_movi_tl(cpu_npc, e->npc);
5739         }
5740         gen_helper_raise_exception(tcg_env, e->excp);
5741 
5742         e_next = e->next;
5743         g_free(e);
5744     }
5745 }
5746 
5747 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5748                                CPUState *cpu, FILE *logfile)
5749 {
5750     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5751     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5752 }
5753 
5754 static const TranslatorOps sparc_tr_ops = {
5755     .init_disas_context = sparc_tr_init_disas_context,
5756     .tb_start           = sparc_tr_tb_start,
5757     .insn_start         = sparc_tr_insn_start,
5758     .translate_insn     = sparc_tr_translate_insn,
5759     .tb_stop            = sparc_tr_tb_stop,
5760     .disas_log          = sparc_tr_disas_log,
5761 };
5762 
5763 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5764                            target_ulong pc, void *host_pc)
5765 {
5766     DisasContext dc = {};
5767 
5768     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5769 }
5770 
5771 void sparc_tcg_init(void)
5772 {
5773     static const char gregnames[32][4] = {
5774         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5775         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5776         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5777         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5778     };
5779     static const char fregnames[32][4] = {
5780         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5781         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5782         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5783         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5784     };
5785 
5786     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5787 #ifdef TARGET_SPARC64
5788         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5789         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5790 #else
5791         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5792 #endif
5793         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5794         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5795     };
5796 
5797     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5798 #ifdef TARGET_SPARC64
5799         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5800         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5801         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5802         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5803           "hstick_cmpr" },
5804         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5805         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5806         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5807         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5808         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5809 #endif
5810         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5811         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5812         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5813         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5814         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5815         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5816         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5817         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5818 #ifndef CONFIG_USER_ONLY
5819         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5820 #endif
5821     };
5822 
5823     unsigned int i;
5824 
5825     cpu_regwptr = tcg_global_mem_new_ptr(tcg_env,
5826                                          offsetof(CPUSPARCState, regwptr),
5827                                          "regwptr");
5828 
5829     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5830         *r32[i].ptr = tcg_global_mem_new_i32(tcg_env, r32[i].off, r32[i].name);
5831     }
5832 
5833     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5834         *rtl[i].ptr = tcg_global_mem_new(tcg_env, rtl[i].off, rtl[i].name);
5835     }
5836 
5837     cpu_regs[0] = NULL;
5838     for (i = 1; i < 8; ++i) {
5839         cpu_regs[i] = tcg_global_mem_new(tcg_env,
5840                                          offsetof(CPUSPARCState, gregs[i]),
5841                                          gregnames[i]);
5842     }
5843 
5844     for (i = 8; i < 32; ++i) {
5845         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5846                                          (i - 8) * sizeof(target_ulong),
5847                                          gregnames[i]);
5848     }
5849 
5850     for (i = 0; i < TARGET_DPREGS; i++) {
5851         cpu_fpr[i] = tcg_global_mem_new_i64(tcg_env,
5852                                             offsetof(CPUSPARCState, fpr[i]),
5853                                             fregnames[i]);
5854     }
5855 }
5856 
5857 void sparc_restore_state_to_opc(CPUState *cs,
5858                                 const TranslationBlock *tb,
5859                                 const uint64_t *data)
5860 {
5861     SPARCCPU *cpu = SPARC_CPU(cs);
5862     CPUSPARCState *env = &cpu->env;
5863     target_ulong pc = data[0];
5864     target_ulong npc = data[1];
5865 
5866     env->pc = pc;
5867     if (npc == DYNAMIC_PC) {
5868         /* dynamic NPC: already stored */
5869     } else if (npc & JUMP_PC) {
5870         /* jump PC: use 'cond' and the jump targets of the translation */
5871         if (env->cond) {
5872             env->npc = npc & ~3;
5873         } else {
5874             env->npc = pc + 4;
5875         }
5876     } else {
5877         env->npc = npc;
5878     }
5879 }
5880