xref: /openbmc/qemu/target/sparc/translate.c (revision 3037663616db929a3f9c21daa64ff9605bb6ac6f)
1 /*
2    SPARC translation
3 
4    Copyright (C) 2003 Thomas M. Ogrisegg <tom@fnord.at>
5    Copyright (C) 2003-2005 Fabrice Bellard
6 
7    This library is free software; you can redistribute it and/or
8    modify it under the terms of the GNU Lesser General Public
9    License as published by the Free Software Foundation; either
10    version 2.1 of the License, or (at your option) any later version.
11 
12    This library is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15    Lesser General Public License for more details.
16 
17    You should have received a copy of the GNU Lesser General Public
18    License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 
23 #include "cpu.h"
24 #include "disas/disas.h"
25 #include "exec/helper-proto.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 
29 #include "exec/helper-gen.h"
30 
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 #include "asi.h"
34 
35 #define HELPER_H "helper.h"
36 #include "exec/helper-info.c.inc"
37 #undef  HELPER_H
38 
39 /* Dynamic PC, must exit to main loop. */
40 #define DYNAMIC_PC         1
41 /* Dynamic PC, one of two values according to jump_pc[T2]. */
42 #define JUMP_PC            2
43 /* Dynamic PC, may lookup next TB. */
44 #define DYNAMIC_PC_LOOKUP  3
45 
46 #define DISAS_EXIT  DISAS_TARGET_0
47 
48 /* global register indexes */
49 static TCGv_ptr cpu_regwptr;
50 static TCGv cpu_cc_src, cpu_cc_src2, cpu_cc_dst;
51 static TCGv_i32 cpu_cc_op;
52 static TCGv_i32 cpu_psr;
53 static TCGv cpu_fsr, cpu_pc, cpu_npc;
54 static TCGv cpu_regs[32];
55 static TCGv cpu_y;
56 #ifndef CONFIG_USER_ONLY
57 static TCGv cpu_tbr;
58 #endif
59 static TCGv cpu_cond;
60 #ifdef TARGET_SPARC64
61 static TCGv_i32 cpu_xcc, cpu_fprs;
62 static TCGv cpu_gsr;
63 static TCGv cpu_tick_cmpr, cpu_stick_cmpr, cpu_hstick_cmpr;
64 static TCGv cpu_hintp, cpu_htba, cpu_hver, cpu_ssr, cpu_ver;
65 #else
66 static TCGv cpu_wim;
67 #endif
68 /* Floating point registers */
69 static TCGv_i64 cpu_fpr[TARGET_DPREGS];
70 
71 typedef struct DisasDelayException {
72     struct DisasDelayException *next;
73     TCGLabel *lab;
74     TCGv_i32 excp;
75     /* Saved state at parent insn. */
76     target_ulong pc;
77     target_ulong npc;
78 } DisasDelayException;
79 
80 typedef struct DisasContext {
81     DisasContextBase base;
82     target_ulong pc;    /* current Program Counter: integer or DYNAMIC_PC */
83     target_ulong npc;   /* next PC: integer or DYNAMIC_PC or JUMP_PC */
84     target_ulong jump_pc[2]; /* used when JUMP_PC pc value is used */
85     int mem_idx;
86     bool fpu_enabled;
87     bool address_mask_32bit;
88 #ifndef CONFIG_USER_ONLY
89     bool supervisor;
90 #ifdef TARGET_SPARC64
91     bool hypervisor;
92 #endif
93 #endif
94 
95     uint32_t cc_op;  /* current CC operation */
96     sparc_def_t *def;
97 #ifdef TARGET_SPARC64
98     int fprs_dirty;
99     int asi;
100 #endif
101     DisasDelayException *delay_excp_list;
102 } DisasContext;
103 
104 typedef struct {
105     TCGCond cond;
106     bool is_bool;
107     TCGv c1, c2;
108 } DisasCompare;
109 
110 // This function uses non-native bit order
111 #define GET_FIELD(X, FROM, TO)                                  \
112     ((X) >> (31 - (TO)) & ((1 << ((TO) - (FROM) + 1)) - 1))
113 
114 // This function uses the order in the manuals, i.e. bit 0 is 2^0
115 #define GET_FIELD_SP(X, FROM, TO)               \
116     GET_FIELD(X, 31 - (TO), 31 - (FROM))
117 
118 #define GET_FIELDs(x,a,b) sign_extend (GET_FIELD(x,a,b), (b) - (a) + 1)
119 #define GET_FIELD_SPs(x,a,b) sign_extend (GET_FIELD_SP(x,a,b), ((b) - (a) + 1))
120 
121 #ifdef TARGET_SPARC64
122 #define DFPREG(r) (((r & 1) << 5) | (r & 0x1e))
123 #define QFPREG(r) (((r & 1) << 5) | (r & 0x1c))
124 #else
125 #define DFPREG(r) (r & 0x1e)
126 #define QFPREG(r) (r & 0x1c)
127 #endif
128 
129 #define UA2005_HTRAP_MASK 0xff
130 #define V8_TRAP_MASK 0x7f
131 
132 static int sign_extend(int x, int len)
133 {
134     len = 32 - len;
135     return (x << len) >> len;
136 }
137 
138 #define IS_IMM (insn & (1<<13))
139 
140 static void gen_update_fprs_dirty(DisasContext *dc, int rd)
141 {
142 #if defined(TARGET_SPARC64)
143     int bit = (rd < 32) ? 1 : 2;
144     /* If we know we've already set this bit within the TB,
145        we can avoid setting it again.  */
146     if (!(dc->fprs_dirty & bit)) {
147         dc->fprs_dirty |= bit;
148         tcg_gen_ori_i32(cpu_fprs, cpu_fprs, bit);
149     }
150 #endif
151 }
152 
153 /* floating point registers moves */
154 static TCGv_i32 gen_load_fpr_F(DisasContext *dc, unsigned int src)
155 {
156     TCGv_i32 ret = tcg_temp_new_i32();
157     if (src & 1) {
158         tcg_gen_extrl_i64_i32(ret, cpu_fpr[src / 2]);
159     } else {
160         tcg_gen_extrh_i64_i32(ret, cpu_fpr[src / 2]);
161     }
162     return ret;
163 }
164 
165 static void gen_store_fpr_F(DisasContext *dc, unsigned int dst, TCGv_i32 v)
166 {
167     TCGv_i64 t = tcg_temp_new_i64();
168 
169     tcg_gen_extu_i32_i64(t, v);
170     tcg_gen_deposit_i64(cpu_fpr[dst / 2], cpu_fpr[dst / 2], t,
171                         (dst & 1 ? 0 : 32), 32);
172     gen_update_fprs_dirty(dc, dst);
173 }
174 
175 static TCGv_i32 gen_dest_fpr_F(DisasContext *dc)
176 {
177     return tcg_temp_new_i32();
178 }
179 
180 static TCGv_i64 gen_load_fpr_D(DisasContext *dc, unsigned int src)
181 {
182     src = DFPREG(src);
183     return cpu_fpr[src / 2];
184 }
185 
186 static void gen_store_fpr_D(DisasContext *dc, unsigned int dst, TCGv_i64 v)
187 {
188     dst = DFPREG(dst);
189     tcg_gen_mov_i64(cpu_fpr[dst / 2], v);
190     gen_update_fprs_dirty(dc, dst);
191 }
192 
193 static TCGv_i64 gen_dest_fpr_D(DisasContext *dc, unsigned int dst)
194 {
195     return cpu_fpr[DFPREG(dst) / 2];
196 }
197 
198 static void gen_op_load_fpr_QT0(unsigned int src)
199 {
200     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
201                    offsetof(CPU_QuadU, ll.upper));
202     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
203                    offsetof(CPU_QuadU, ll.lower));
204 }
205 
206 static void gen_op_load_fpr_QT1(unsigned int src)
207 {
208     tcg_gen_st_i64(cpu_fpr[src / 2], tcg_env, offsetof(CPUSPARCState, qt1) +
209                    offsetof(CPU_QuadU, ll.upper));
210     tcg_gen_st_i64(cpu_fpr[src/2 + 1], tcg_env, offsetof(CPUSPARCState, qt1) +
211                    offsetof(CPU_QuadU, ll.lower));
212 }
213 
214 static void gen_op_store_QT0_fpr(unsigned int dst)
215 {
216     tcg_gen_ld_i64(cpu_fpr[dst / 2], tcg_env, offsetof(CPUSPARCState, qt0) +
217                    offsetof(CPU_QuadU, ll.upper));
218     tcg_gen_ld_i64(cpu_fpr[dst/2 + 1], tcg_env, offsetof(CPUSPARCState, qt0) +
219                    offsetof(CPU_QuadU, ll.lower));
220 }
221 
222 static void gen_store_fpr_Q(DisasContext *dc, unsigned int dst,
223                             TCGv_i64 v1, TCGv_i64 v2)
224 {
225     dst = QFPREG(dst);
226 
227     tcg_gen_mov_i64(cpu_fpr[dst / 2], v1);
228     tcg_gen_mov_i64(cpu_fpr[dst / 2 + 1], v2);
229     gen_update_fprs_dirty(dc, dst);
230 }
231 
232 #ifdef TARGET_SPARC64
233 static TCGv_i64 gen_load_fpr_Q0(DisasContext *dc, unsigned int src)
234 {
235     src = QFPREG(src);
236     return cpu_fpr[src / 2];
237 }
238 
239 static TCGv_i64 gen_load_fpr_Q1(DisasContext *dc, unsigned int src)
240 {
241     src = QFPREG(src);
242     return cpu_fpr[src / 2 + 1];
243 }
244 
245 static void gen_move_Q(DisasContext *dc, unsigned int rd, unsigned int rs)
246 {
247     rd = QFPREG(rd);
248     rs = QFPREG(rs);
249 
250     tcg_gen_mov_i64(cpu_fpr[rd / 2], cpu_fpr[rs / 2]);
251     tcg_gen_mov_i64(cpu_fpr[rd / 2 + 1], cpu_fpr[rs / 2 + 1]);
252     gen_update_fprs_dirty(dc, rd);
253 }
254 #endif
255 
256 /* moves */
257 #ifdef CONFIG_USER_ONLY
258 #define supervisor(dc) 0
259 #ifdef TARGET_SPARC64
260 #define hypervisor(dc) 0
261 #endif
262 #else
263 #ifdef TARGET_SPARC64
264 #define hypervisor(dc) (dc->hypervisor)
265 #define supervisor(dc) (dc->supervisor | dc->hypervisor)
266 #else
267 #define supervisor(dc) (dc->supervisor)
268 #endif
269 #endif
270 
271 #if !defined(TARGET_SPARC64)
272 # define AM_CHECK(dc)  false
273 #elif defined(TARGET_ABI32)
274 # define AM_CHECK(dc)  true
275 #elif defined(CONFIG_USER_ONLY)
276 # define AM_CHECK(dc)  false
277 #else
278 # define AM_CHECK(dc)  ((dc)->address_mask_32bit)
279 #endif
280 
281 static void gen_address_mask(DisasContext *dc, TCGv addr)
282 {
283     if (AM_CHECK(dc)) {
284         tcg_gen_andi_tl(addr, addr, 0xffffffffULL);
285     }
286 }
287 
288 static target_ulong address_mask_i(DisasContext *dc, target_ulong addr)
289 {
290     return AM_CHECK(dc) ? (uint32_t)addr : addr;
291 }
292 
293 static TCGv gen_load_gpr(DisasContext *dc, int reg)
294 {
295     if (reg > 0) {
296         assert(reg < 32);
297         return cpu_regs[reg];
298     } else {
299         TCGv t = tcg_temp_new();
300         tcg_gen_movi_tl(t, 0);
301         return t;
302     }
303 }
304 
305 static void gen_store_gpr(DisasContext *dc, int reg, TCGv v)
306 {
307     if (reg > 0) {
308         assert(reg < 32);
309         tcg_gen_mov_tl(cpu_regs[reg], v);
310     }
311 }
312 
313 static TCGv gen_dest_gpr(DisasContext *dc, int reg)
314 {
315     if (reg > 0) {
316         assert(reg < 32);
317         return cpu_regs[reg];
318     } else {
319         return tcg_temp_new();
320     }
321 }
322 
323 static bool use_goto_tb(DisasContext *s, target_ulong pc, target_ulong npc)
324 {
325     return translator_use_goto_tb(&s->base, pc) &&
326            translator_use_goto_tb(&s->base, npc);
327 }
328 
329 static void gen_goto_tb(DisasContext *s, int tb_num,
330                         target_ulong pc, target_ulong npc)
331 {
332     if (use_goto_tb(s, pc, npc))  {
333         /* jump to same page: we can use a direct jump */
334         tcg_gen_goto_tb(tb_num);
335         tcg_gen_movi_tl(cpu_pc, pc);
336         tcg_gen_movi_tl(cpu_npc, npc);
337         tcg_gen_exit_tb(s->base.tb, tb_num);
338     } else {
339         /* jump to another page: we can use an indirect jump */
340         tcg_gen_movi_tl(cpu_pc, pc);
341         tcg_gen_movi_tl(cpu_npc, npc);
342         tcg_gen_lookup_and_goto_ptr();
343     }
344 }
345 
346 // XXX suboptimal
347 static void gen_mov_reg_N(TCGv reg, TCGv_i32 src)
348 {
349     tcg_gen_extu_i32_tl(reg, src);
350     tcg_gen_extract_tl(reg, reg, PSR_NEG_SHIFT, 1);
351 }
352 
353 static void gen_mov_reg_Z(TCGv reg, TCGv_i32 src)
354 {
355     tcg_gen_extu_i32_tl(reg, src);
356     tcg_gen_extract_tl(reg, reg, PSR_ZERO_SHIFT, 1);
357 }
358 
359 static void gen_mov_reg_V(TCGv reg, TCGv_i32 src)
360 {
361     tcg_gen_extu_i32_tl(reg, src);
362     tcg_gen_extract_tl(reg, reg, PSR_OVF_SHIFT, 1);
363 }
364 
365 static void gen_mov_reg_C(TCGv reg, TCGv_i32 src)
366 {
367     tcg_gen_extu_i32_tl(reg, src);
368     tcg_gen_extract_tl(reg, reg, PSR_CARRY_SHIFT, 1);
369 }
370 
371 static void gen_op_add_cc(TCGv dst, TCGv src1, TCGv src2)
372 {
373     tcg_gen_mov_tl(cpu_cc_src, src1);
374     tcg_gen_mov_tl(cpu_cc_src2, src2);
375     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
376     tcg_gen_mov_tl(dst, cpu_cc_dst);
377 }
378 
379 static TCGv_i32 gen_add32_carry32(void)
380 {
381     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
382 
383     /* Carry is computed from a previous add: (dst < src)  */
384 #if TARGET_LONG_BITS == 64
385     cc_src1_32 = tcg_temp_new_i32();
386     cc_src2_32 = tcg_temp_new_i32();
387     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_dst);
388     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src);
389 #else
390     cc_src1_32 = cpu_cc_dst;
391     cc_src2_32 = cpu_cc_src;
392 #endif
393 
394     carry_32 = tcg_temp_new_i32();
395     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
396 
397     return carry_32;
398 }
399 
400 static TCGv_i32 gen_sub32_carry32(void)
401 {
402     TCGv_i32 carry_32, cc_src1_32, cc_src2_32;
403 
404     /* Carry is computed from a previous borrow: (src1 < src2)  */
405 #if TARGET_LONG_BITS == 64
406     cc_src1_32 = tcg_temp_new_i32();
407     cc_src2_32 = tcg_temp_new_i32();
408     tcg_gen_extrl_i64_i32(cc_src1_32, cpu_cc_src);
409     tcg_gen_extrl_i64_i32(cc_src2_32, cpu_cc_src2);
410 #else
411     cc_src1_32 = cpu_cc_src;
412     cc_src2_32 = cpu_cc_src2;
413 #endif
414 
415     carry_32 = tcg_temp_new_i32();
416     tcg_gen_setcond_i32(TCG_COND_LTU, carry_32, cc_src1_32, cc_src2_32);
417 
418     return carry_32;
419 }
420 
421 static void gen_op_addx_int(DisasContext *dc, TCGv dst, TCGv src1,
422                             TCGv src2, int update_cc)
423 {
424     TCGv_i32 carry_32;
425     TCGv carry;
426 
427     switch (dc->cc_op) {
428     case CC_OP_DIV:
429     case CC_OP_LOGIC:
430         /* Carry is known to be zero.  Fall back to plain ADD.  */
431         if (update_cc) {
432             gen_op_add_cc(dst, src1, src2);
433         } else {
434             tcg_gen_add_tl(dst, src1, src2);
435         }
436         return;
437 
438     case CC_OP_ADD:
439     case CC_OP_TADD:
440     case CC_OP_TADDTV:
441         if (TARGET_LONG_BITS == 32) {
442             /* We can re-use the host's hardware carry generation by using
443                an ADD2 opcode.  We discard the low part of the output.
444                Ideally we'd combine this operation with the add that
445                generated the carry in the first place.  */
446             carry = tcg_temp_new();
447             tcg_gen_add2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
448             goto add_done;
449         }
450         carry_32 = gen_add32_carry32();
451         break;
452 
453     case CC_OP_SUB:
454     case CC_OP_TSUB:
455     case CC_OP_TSUBTV:
456         carry_32 = gen_sub32_carry32();
457         break;
458 
459     default:
460         /* We need external help to produce the carry.  */
461         carry_32 = tcg_temp_new_i32();
462         gen_helper_compute_C_icc(carry_32, tcg_env);
463         break;
464     }
465 
466 #if TARGET_LONG_BITS == 64
467     carry = tcg_temp_new();
468     tcg_gen_extu_i32_i64(carry, carry_32);
469 #else
470     carry = carry_32;
471 #endif
472 
473     tcg_gen_add_tl(dst, src1, src2);
474     tcg_gen_add_tl(dst, dst, carry);
475 
476  add_done:
477     if (update_cc) {
478         tcg_gen_mov_tl(cpu_cc_src, src1);
479         tcg_gen_mov_tl(cpu_cc_src2, src2);
480         tcg_gen_mov_tl(cpu_cc_dst, dst);
481         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADDX);
482         dc->cc_op = CC_OP_ADDX;
483     }
484 }
485 
486 static void gen_op_sub_cc(TCGv dst, TCGv src1, TCGv src2)
487 {
488     tcg_gen_mov_tl(cpu_cc_src, src1);
489     tcg_gen_mov_tl(cpu_cc_src2, src2);
490     tcg_gen_sub_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
491     tcg_gen_mov_tl(dst, cpu_cc_dst);
492 }
493 
494 static void gen_op_subx_int(DisasContext *dc, TCGv dst, TCGv src1,
495                             TCGv src2, int update_cc)
496 {
497     TCGv_i32 carry_32;
498     TCGv carry;
499 
500     switch (dc->cc_op) {
501     case CC_OP_DIV:
502     case CC_OP_LOGIC:
503         /* Carry is known to be zero.  Fall back to plain SUB.  */
504         if (update_cc) {
505             gen_op_sub_cc(dst, src1, src2);
506         } else {
507             tcg_gen_sub_tl(dst, src1, src2);
508         }
509         return;
510 
511     case CC_OP_ADD:
512     case CC_OP_TADD:
513     case CC_OP_TADDTV:
514         carry_32 = gen_add32_carry32();
515         break;
516 
517     case CC_OP_SUB:
518     case CC_OP_TSUB:
519     case CC_OP_TSUBTV:
520         if (TARGET_LONG_BITS == 32) {
521             /* We can re-use the host's hardware carry generation by using
522                a SUB2 opcode.  We discard the low part of the output.
523                Ideally we'd combine this operation with the add that
524                generated the carry in the first place.  */
525             carry = tcg_temp_new();
526             tcg_gen_sub2_tl(carry, dst, cpu_cc_src, src1, cpu_cc_src2, src2);
527             goto sub_done;
528         }
529         carry_32 = gen_sub32_carry32();
530         break;
531 
532     default:
533         /* We need external help to produce the carry.  */
534         carry_32 = tcg_temp_new_i32();
535         gen_helper_compute_C_icc(carry_32, tcg_env);
536         break;
537     }
538 
539 #if TARGET_LONG_BITS == 64
540     carry = tcg_temp_new();
541     tcg_gen_extu_i32_i64(carry, carry_32);
542 #else
543     carry = carry_32;
544 #endif
545 
546     tcg_gen_sub_tl(dst, src1, src2);
547     tcg_gen_sub_tl(dst, dst, carry);
548 
549  sub_done:
550     if (update_cc) {
551         tcg_gen_mov_tl(cpu_cc_src, src1);
552         tcg_gen_mov_tl(cpu_cc_src2, src2);
553         tcg_gen_mov_tl(cpu_cc_dst, dst);
554         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUBX);
555         dc->cc_op = CC_OP_SUBX;
556     }
557 }
558 
559 static void gen_op_mulscc(TCGv dst, TCGv src1, TCGv src2)
560 {
561     TCGv r_temp, zero, t0;
562 
563     r_temp = tcg_temp_new();
564     t0 = tcg_temp_new();
565 
566     /* old op:
567     if (!(env->y & 1))
568         T1 = 0;
569     */
570     zero = tcg_constant_tl(0);
571     tcg_gen_andi_tl(cpu_cc_src, src1, 0xffffffff);
572     tcg_gen_andi_tl(r_temp, cpu_y, 0x1);
573     tcg_gen_andi_tl(cpu_cc_src2, src2, 0xffffffff);
574     tcg_gen_movcond_tl(TCG_COND_EQ, cpu_cc_src2, r_temp, zero,
575                        zero, cpu_cc_src2);
576 
577     // b2 = T0 & 1;
578     // env->y = (b2 << 31) | (env->y >> 1);
579     tcg_gen_extract_tl(t0, cpu_y, 1, 31);
580     tcg_gen_deposit_tl(cpu_y, t0, cpu_cc_src, 31, 1);
581 
582     // b1 = N ^ V;
583     gen_mov_reg_N(t0, cpu_psr);
584     gen_mov_reg_V(r_temp, cpu_psr);
585     tcg_gen_xor_tl(t0, t0, r_temp);
586 
587     // T0 = (b1 << 31) | (T0 >> 1);
588     // src1 = T0;
589     tcg_gen_shli_tl(t0, t0, 31);
590     tcg_gen_shri_tl(cpu_cc_src, cpu_cc_src, 1);
591     tcg_gen_or_tl(cpu_cc_src, cpu_cc_src, t0);
592 
593     tcg_gen_add_tl(cpu_cc_dst, cpu_cc_src, cpu_cc_src2);
594 
595     tcg_gen_mov_tl(dst, cpu_cc_dst);
596 }
597 
598 static void gen_op_multiply(TCGv dst, TCGv src1, TCGv src2, int sign_ext)
599 {
600 #if TARGET_LONG_BITS == 32
601     if (sign_ext) {
602         tcg_gen_muls2_tl(dst, cpu_y, src1, src2);
603     } else {
604         tcg_gen_mulu2_tl(dst, cpu_y, src1, src2);
605     }
606 #else
607     TCGv t0 = tcg_temp_new_i64();
608     TCGv t1 = tcg_temp_new_i64();
609 
610     if (sign_ext) {
611         tcg_gen_ext32s_i64(t0, src1);
612         tcg_gen_ext32s_i64(t1, src2);
613     } else {
614         tcg_gen_ext32u_i64(t0, src1);
615         tcg_gen_ext32u_i64(t1, src2);
616     }
617 
618     tcg_gen_mul_i64(dst, t0, t1);
619     tcg_gen_shri_i64(cpu_y, dst, 32);
620 #endif
621 }
622 
623 static void gen_op_umul(TCGv dst, TCGv src1, TCGv src2)
624 {
625     /* zero-extend truncated operands before multiplication */
626     gen_op_multiply(dst, src1, src2, 0);
627 }
628 
629 static void gen_op_smul(TCGv dst, TCGv src1, TCGv src2)
630 {
631     /* sign-extend truncated operands before multiplication */
632     gen_op_multiply(dst, src1, src2, 1);
633 }
634 
635 // 1
636 static void gen_op_eval_ba(TCGv dst)
637 {
638     tcg_gen_movi_tl(dst, 1);
639 }
640 
641 // Z
642 static void gen_op_eval_be(TCGv dst, TCGv_i32 src)
643 {
644     gen_mov_reg_Z(dst, src);
645 }
646 
647 // Z | (N ^ V)
648 static void gen_op_eval_ble(TCGv dst, TCGv_i32 src)
649 {
650     TCGv t0 = tcg_temp_new();
651     gen_mov_reg_N(t0, src);
652     gen_mov_reg_V(dst, src);
653     tcg_gen_xor_tl(dst, dst, t0);
654     gen_mov_reg_Z(t0, src);
655     tcg_gen_or_tl(dst, dst, t0);
656 }
657 
658 // N ^ V
659 static void gen_op_eval_bl(TCGv dst, TCGv_i32 src)
660 {
661     TCGv t0 = tcg_temp_new();
662     gen_mov_reg_V(t0, src);
663     gen_mov_reg_N(dst, src);
664     tcg_gen_xor_tl(dst, dst, t0);
665 }
666 
667 // C | Z
668 static void gen_op_eval_bleu(TCGv dst, TCGv_i32 src)
669 {
670     TCGv t0 = tcg_temp_new();
671     gen_mov_reg_Z(t0, src);
672     gen_mov_reg_C(dst, src);
673     tcg_gen_or_tl(dst, dst, t0);
674 }
675 
676 // C
677 static void gen_op_eval_bcs(TCGv dst, TCGv_i32 src)
678 {
679     gen_mov_reg_C(dst, src);
680 }
681 
682 // V
683 static void gen_op_eval_bvs(TCGv dst, TCGv_i32 src)
684 {
685     gen_mov_reg_V(dst, src);
686 }
687 
688 // 0
689 static void gen_op_eval_bn(TCGv dst)
690 {
691     tcg_gen_movi_tl(dst, 0);
692 }
693 
694 // N
695 static void gen_op_eval_bneg(TCGv dst, TCGv_i32 src)
696 {
697     gen_mov_reg_N(dst, src);
698 }
699 
700 // !Z
701 static void gen_op_eval_bne(TCGv dst, TCGv_i32 src)
702 {
703     gen_mov_reg_Z(dst, src);
704     tcg_gen_xori_tl(dst, dst, 0x1);
705 }
706 
707 // !(Z | (N ^ V))
708 static void gen_op_eval_bg(TCGv dst, TCGv_i32 src)
709 {
710     gen_op_eval_ble(dst, src);
711     tcg_gen_xori_tl(dst, dst, 0x1);
712 }
713 
714 // !(N ^ V)
715 static void gen_op_eval_bge(TCGv dst, TCGv_i32 src)
716 {
717     gen_op_eval_bl(dst, src);
718     tcg_gen_xori_tl(dst, dst, 0x1);
719 }
720 
721 // !(C | Z)
722 static void gen_op_eval_bgu(TCGv dst, TCGv_i32 src)
723 {
724     gen_op_eval_bleu(dst, src);
725     tcg_gen_xori_tl(dst, dst, 0x1);
726 }
727 
728 // !C
729 static void gen_op_eval_bcc(TCGv dst, TCGv_i32 src)
730 {
731     gen_mov_reg_C(dst, src);
732     tcg_gen_xori_tl(dst, dst, 0x1);
733 }
734 
735 // !N
736 static void gen_op_eval_bpos(TCGv dst, TCGv_i32 src)
737 {
738     gen_mov_reg_N(dst, src);
739     tcg_gen_xori_tl(dst, dst, 0x1);
740 }
741 
742 // !V
743 static void gen_op_eval_bvc(TCGv dst, TCGv_i32 src)
744 {
745     gen_mov_reg_V(dst, src);
746     tcg_gen_xori_tl(dst, dst, 0x1);
747 }
748 
749 /*
750   FPSR bit field FCC1 | FCC0:
751    0 =
752    1 <
753    2 >
754    3 unordered
755 */
756 static void gen_mov_reg_FCC0(TCGv reg, TCGv src,
757                                     unsigned int fcc_offset)
758 {
759     tcg_gen_shri_tl(reg, src, FSR_FCC0_SHIFT + fcc_offset);
760     tcg_gen_andi_tl(reg, reg, 0x1);
761 }
762 
763 static void gen_mov_reg_FCC1(TCGv reg, TCGv src, unsigned int fcc_offset)
764 {
765     tcg_gen_shri_tl(reg, src, FSR_FCC1_SHIFT + fcc_offset);
766     tcg_gen_andi_tl(reg, reg, 0x1);
767 }
768 
769 // !0: FCC0 | FCC1
770 static void gen_op_eval_fbne(TCGv dst, TCGv src, unsigned int fcc_offset)
771 {
772     TCGv t0 = tcg_temp_new();
773     gen_mov_reg_FCC0(dst, src, fcc_offset);
774     gen_mov_reg_FCC1(t0, src, fcc_offset);
775     tcg_gen_or_tl(dst, dst, t0);
776 }
777 
778 // 1 or 2: FCC0 ^ FCC1
779 static void gen_op_eval_fblg(TCGv dst, TCGv src, unsigned int fcc_offset)
780 {
781     TCGv t0 = tcg_temp_new();
782     gen_mov_reg_FCC0(dst, src, fcc_offset);
783     gen_mov_reg_FCC1(t0, src, fcc_offset);
784     tcg_gen_xor_tl(dst, dst, t0);
785 }
786 
787 // 1 or 3: FCC0
788 static void gen_op_eval_fbul(TCGv dst, TCGv src, unsigned int fcc_offset)
789 {
790     gen_mov_reg_FCC0(dst, src, fcc_offset);
791 }
792 
793 // 1: FCC0 & !FCC1
794 static void gen_op_eval_fbl(TCGv dst, TCGv src, unsigned int fcc_offset)
795 {
796     TCGv t0 = tcg_temp_new();
797     gen_mov_reg_FCC0(dst, src, fcc_offset);
798     gen_mov_reg_FCC1(t0, src, fcc_offset);
799     tcg_gen_andc_tl(dst, dst, t0);
800 }
801 
802 // 2 or 3: FCC1
803 static void gen_op_eval_fbug(TCGv dst, TCGv src, unsigned int fcc_offset)
804 {
805     gen_mov_reg_FCC1(dst, src, fcc_offset);
806 }
807 
808 // 2: !FCC0 & FCC1
809 static void gen_op_eval_fbg(TCGv dst, TCGv src, unsigned int fcc_offset)
810 {
811     TCGv t0 = tcg_temp_new();
812     gen_mov_reg_FCC0(dst, src, fcc_offset);
813     gen_mov_reg_FCC1(t0, src, fcc_offset);
814     tcg_gen_andc_tl(dst, t0, dst);
815 }
816 
817 // 3: FCC0 & FCC1
818 static void gen_op_eval_fbu(TCGv dst, TCGv src, unsigned int fcc_offset)
819 {
820     TCGv t0 = tcg_temp_new();
821     gen_mov_reg_FCC0(dst, src, fcc_offset);
822     gen_mov_reg_FCC1(t0, src, fcc_offset);
823     tcg_gen_and_tl(dst, dst, t0);
824 }
825 
826 // 0: !(FCC0 | FCC1)
827 static void gen_op_eval_fbe(TCGv dst, TCGv src, unsigned int fcc_offset)
828 {
829     TCGv t0 = tcg_temp_new();
830     gen_mov_reg_FCC0(dst, src, fcc_offset);
831     gen_mov_reg_FCC1(t0, src, fcc_offset);
832     tcg_gen_or_tl(dst, dst, t0);
833     tcg_gen_xori_tl(dst, dst, 0x1);
834 }
835 
836 // 0 or 3: !(FCC0 ^ FCC1)
837 static void gen_op_eval_fbue(TCGv dst, TCGv src, unsigned int fcc_offset)
838 {
839     TCGv t0 = tcg_temp_new();
840     gen_mov_reg_FCC0(dst, src, fcc_offset);
841     gen_mov_reg_FCC1(t0, src, fcc_offset);
842     tcg_gen_xor_tl(dst, dst, t0);
843     tcg_gen_xori_tl(dst, dst, 0x1);
844 }
845 
846 // 0 or 2: !FCC0
847 static void gen_op_eval_fbge(TCGv dst, TCGv src, unsigned int fcc_offset)
848 {
849     gen_mov_reg_FCC0(dst, src, fcc_offset);
850     tcg_gen_xori_tl(dst, dst, 0x1);
851 }
852 
853 // !1: !(FCC0 & !FCC1)
854 static void gen_op_eval_fbuge(TCGv dst, TCGv src, unsigned int fcc_offset)
855 {
856     TCGv t0 = tcg_temp_new();
857     gen_mov_reg_FCC0(dst, src, fcc_offset);
858     gen_mov_reg_FCC1(t0, src, fcc_offset);
859     tcg_gen_andc_tl(dst, dst, t0);
860     tcg_gen_xori_tl(dst, dst, 0x1);
861 }
862 
863 // 0 or 1: !FCC1
864 static void gen_op_eval_fble(TCGv dst, TCGv src, unsigned int fcc_offset)
865 {
866     gen_mov_reg_FCC1(dst, src, fcc_offset);
867     tcg_gen_xori_tl(dst, dst, 0x1);
868 }
869 
870 // !2: !(!FCC0 & FCC1)
871 static void gen_op_eval_fbule(TCGv dst, TCGv src, unsigned int fcc_offset)
872 {
873     TCGv t0 = tcg_temp_new();
874     gen_mov_reg_FCC0(dst, src, fcc_offset);
875     gen_mov_reg_FCC1(t0, src, fcc_offset);
876     tcg_gen_andc_tl(dst, t0, dst);
877     tcg_gen_xori_tl(dst, dst, 0x1);
878 }
879 
880 // !3: !(FCC0 & FCC1)
881 static void gen_op_eval_fbo(TCGv dst, TCGv src, unsigned int fcc_offset)
882 {
883     TCGv t0 = tcg_temp_new();
884     gen_mov_reg_FCC0(dst, src, fcc_offset);
885     gen_mov_reg_FCC1(t0, src, fcc_offset);
886     tcg_gen_and_tl(dst, dst, t0);
887     tcg_gen_xori_tl(dst, dst, 0x1);
888 }
889 
890 static void gen_branch2(DisasContext *dc, target_ulong pc1,
891                         target_ulong pc2, TCGv r_cond)
892 {
893     TCGLabel *l1 = gen_new_label();
894 
895     tcg_gen_brcondi_tl(TCG_COND_EQ, r_cond, 0, l1);
896 
897     gen_goto_tb(dc, 0, pc1, pc1 + 4);
898 
899     gen_set_label(l1);
900     gen_goto_tb(dc, 1, pc2, pc2 + 4);
901 }
902 
903 static void gen_generic_branch(DisasContext *dc)
904 {
905     TCGv npc0 = tcg_constant_tl(dc->jump_pc[0]);
906     TCGv npc1 = tcg_constant_tl(dc->jump_pc[1]);
907     TCGv zero = tcg_constant_tl(0);
908 
909     tcg_gen_movcond_tl(TCG_COND_NE, cpu_npc, cpu_cond, zero, npc0, npc1);
910 }
911 
912 /* call this function before using the condition register as it may
913    have been set for a jump */
914 static void flush_cond(DisasContext *dc)
915 {
916     if (dc->npc == JUMP_PC) {
917         gen_generic_branch(dc);
918         dc->npc = DYNAMIC_PC_LOOKUP;
919     }
920 }
921 
922 static void save_npc(DisasContext *dc)
923 {
924     if (dc->npc & 3) {
925         switch (dc->npc) {
926         case JUMP_PC:
927             gen_generic_branch(dc);
928             dc->npc = DYNAMIC_PC_LOOKUP;
929             break;
930         case DYNAMIC_PC:
931         case DYNAMIC_PC_LOOKUP:
932             break;
933         default:
934             g_assert_not_reached();
935         }
936     } else {
937         tcg_gen_movi_tl(cpu_npc, dc->npc);
938     }
939 }
940 
941 static void update_psr(DisasContext *dc)
942 {
943     if (dc->cc_op != CC_OP_FLAGS) {
944         dc->cc_op = CC_OP_FLAGS;
945         gen_helper_compute_psr(tcg_env);
946     }
947 }
948 
949 static void save_state(DisasContext *dc)
950 {
951     tcg_gen_movi_tl(cpu_pc, dc->pc);
952     save_npc(dc);
953 }
954 
955 static void gen_exception(DisasContext *dc, int which)
956 {
957     save_state(dc);
958     gen_helper_raise_exception(tcg_env, tcg_constant_i32(which));
959     dc->base.is_jmp = DISAS_NORETURN;
960 }
961 
962 static TCGLabel *delay_exceptionv(DisasContext *dc, TCGv_i32 excp)
963 {
964     DisasDelayException *e = g_new0(DisasDelayException, 1);
965 
966     e->next = dc->delay_excp_list;
967     dc->delay_excp_list = e;
968 
969     e->lab = gen_new_label();
970     e->excp = excp;
971     e->pc = dc->pc;
972     /* Caller must have used flush_cond before branch. */
973     assert(e->npc != JUMP_PC);
974     e->npc = dc->npc;
975 
976     return e->lab;
977 }
978 
979 static TCGLabel *delay_exception(DisasContext *dc, int excp)
980 {
981     return delay_exceptionv(dc, tcg_constant_i32(excp));
982 }
983 
984 static void gen_check_align(DisasContext *dc, TCGv addr, int mask)
985 {
986     TCGv t = tcg_temp_new();
987     TCGLabel *lab;
988 
989     tcg_gen_andi_tl(t, addr, mask);
990 
991     flush_cond(dc);
992     lab = delay_exception(dc, TT_UNALIGNED);
993     tcg_gen_brcondi_tl(TCG_COND_NE, t, 0, lab);
994 }
995 
996 static void gen_mov_pc_npc(DisasContext *dc)
997 {
998     if (dc->npc & 3) {
999         switch (dc->npc) {
1000         case JUMP_PC:
1001             gen_generic_branch(dc);
1002             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1003             dc->pc = DYNAMIC_PC_LOOKUP;
1004             break;
1005         case DYNAMIC_PC:
1006         case DYNAMIC_PC_LOOKUP:
1007             tcg_gen_mov_tl(cpu_pc, cpu_npc);
1008             dc->pc = dc->npc;
1009             break;
1010         default:
1011             g_assert_not_reached();
1012         }
1013     } else {
1014         dc->pc = dc->npc;
1015     }
1016 }
1017 
1018 static void gen_op_next_insn(void)
1019 {
1020     tcg_gen_mov_tl(cpu_pc, cpu_npc);
1021     tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
1022 }
1023 
1024 static void gen_compare(DisasCompare *cmp, bool xcc, unsigned int cond,
1025                         DisasContext *dc)
1026 {
1027     static int subcc_cond[16] = {
1028         TCG_COND_NEVER,
1029         TCG_COND_EQ,
1030         TCG_COND_LE,
1031         TCG_COND_LT,
1032         TCG_COND_LEU,
1033         TCG_COND_LTU,
1034         -1, /* neg */
1035         -1, /* overflow */
1036         TCG_COND_ALWAYS,
1037         TCG_COND_NE,
1038         TCG_COND_GT,
1039         TCG_COND_GE,
1040         TCG_COND_GTU,
1041         TCG_COND_GEU,
1042         -1, /* pos */
1043         -1, /* no overflow */
1044     };
1045 
1046     static int logic_cond[16] = {
1047         TCG_COND_NEVER,
1048         TCG_COND_EQ,     /* eq:  Z */
1049         TCG_COND_LE,     /* le:  Z | (N ^ V) -> Z | N */
1050         TCG_COND_LT,     /* lt:  N ^ V -> N */
1051         TCG_COND_EQ,     /* leu: C | Z -> Z */
1052         TCG_COND_NEVER,  /* ltu: C -> 0 */
1053         TCG_COND_LT,     /* neg: N */
1054         TCG_COND_NEVER,  /* vs:  V -> 0 */
1055         TCG_COND_ALWAYS,
1056         TCG_COND_NE,     /* ne:  !Z */
1057         TCG_COND_GT,     /* gt:  !(Z | (N ^ V)) -> !(Z | N) */
1058         TCG_COND_GE,     /* ge:  !(N ^ V) -> !N */
1059         TCG_COND_NE,     /* gtu: !(C | Z) -> !Z */
1060         TCG_COND_ALWAYS, /* geu: !C -> 1 */
1061         TCG_COND_GE,     /* pos: !N */
1062         TCG_COND_ALWAYS, /* vc:  !V -> 1 */
1063     };
1064 
1065     TCGv_i32 r_src;
1066     TCGv r_dst;
1067 
1068 #ifdef TARGET_SPARC64
1069     if (xcc) {
1070         r_src = cpu_xcc;
1071     } else {
1072         r_src = cpu_psr;
1073     }
1074 #else
1075     r_src = cpu_psr;
1076 #endif
1077 
1078     switch (dc->cc_op) {
1079     case CC_OP_LOGIC:
1080         cmp->cond = logic_cond[cond];
1081     do_compare_dst_0:
1082         cmp->is_bool = false;
1083         cmp->c2 = tcg_constant_tl(0);
1084 #ifdef TARGET_SPARC64
1085         if (!xcc) {
1086             cmp->c1 = tcg_temp_new();
1087             tcg_gen_ext32s_tl(cmp->c1, cpu_cc_dst);
1088             break;
1089         }
1090 #endif
1091         cmp->c1 = cpu_cc_dst;
1092         break;
1093 
1094     case CC_OP_SUB:
1095         switch (cond) {
1096         case 6:  /* neg */
1097         case 14: /* pos */
1098             cmp->cond = (cond == 6 ? TCG_COND_LT : TCG_COND_GE);
1099             goto do_compare_dst_0;
1100 
1101         case 7: /* overflow */
1102         case 15: /* !overflow */
1103             goto do_dynamic;
1104 
1105         default:
1106             cmp->cond = subcc_cond[cond];
1107             cmp->is_bool = false;
1108 #ifdef TARGET_SPARC64
1109             if (!xcc) {
1110                 /* Note that sign-extension works for unsigned compares as
1111                    long as both operands are sign-extended.  */
1112                 cmp->c1 = tcg_temp_new();
1113                 cmp->c2 = tcg_temp_new();
1114                 tcg_gen_ext32s_tl(cmp->c1, cpu_cc_src);
1115                 tcg_gen_ext32s_tl(cmp->c2, cpu_cc_src2);
1116                 break;
1117             }
1118 #endif
1119             cmp->c1 = cpu_cc_src;
1120             cmp->c2 = cpu_cc_src2;
1121             break;
1122         }
1123         break;
1124 
1125     default:
1126     do_dynamic:
1127         gen_helper_compute_psr(tcg_env);
1128         dc->cc_op = CC_OP_FLAGS;
1129         /* FALLTHRU */
1130 
1131     case CC_OP_FLAGS:
1132         /* We're going to generate a boolean result.  */
1133         cmp->cond = TCG_COND_NE;
1134         cmp->is_bool = true;
1135         cmp->c1 = r_dst = tcg_temp_new();
1136         cmp->c2 = tcg_constant_tl(0);
1137 
1138         switch (cond) {
1139         case 0x0:
1140             gen_op_eval_bn(r_dst);
1141             break;
1142         case 0x1:
1143             gen_op_eval_be(r_dst, r_src);
1144             break;
1145         case 0x2:
1146             gen_op_eval_ble(r_dst, r_src);
1147             break;
1148         case 0x3:
1149             gen_op_eval_bl(r_dst, r_src);
1150             break;
1151         case 0x4:
1152             gen_op_eval_bleu(r_dst, r_src);
1153             break;
1154         case 0x5:
1155             gen_op_eval_bcs(r_dst, r_src);
1156             break;
1157         case 0x6:
1158             gen_op_eval_bneg(r_dst, r_src);
1159             break;
1160         case 0x7:
1161             gen_op_eval_bvs(r_dst, r_src);
1162             break;
1163         case 0x8:
1164             gen_op_eval_ba(r_dst);
1165             break;
1166         case 0x9:
1167             gen_op_eval_bne(r_dst, r_src);
1168             break;
1169         case 0xa:
1170             gen_op_eval_bg(r_dst, r_src);
1171             break;
1172         case 0xb:
1173             gen_op_eval_bge(r_dst, r_src);
1174             break;
1175         case 0xc:
1176             gen_op_eval_bgu(r_dst, r_src);
1177             break;
1178         case 0xd:
1179             gen_op_eval_bcc(r_dst, r_src);
1180             break;
1181         case 0xe:
1182             gen_op_eval_bpos(r_dst, r_src);
1183             break;
1184         case 0xf:
1185             gen_op_eval_bvc(r_dst, r_src);
1186             break;
1187         }
1188         break;
1189     }
1190 }
1191 
1192 static void gen_fcompare(DisasCompare *cmp, unsigned int cc, unsigned int cond)
1193 {
1194     unsigned int offset;
1195     TCGv r_dst;
1196 
1197     /* For now we still generate a straight boolean result.  */
1198     cmp->cond = TCG_COND_NE;
1199     cmp->is_bool = true;
1200     cmp->c1 = r_dst = tcg_temp_new();
1201     cmp->c2 = tcg_constant_tl(0);
1202 
1203     switch (cc) {
1204     default:
1205     case 0x0:
1206         offset = 0;
1207         break;
1208     case 0x1:
1209         offset = 32 - 10;
1210         break;
1211     case 0x2:
1212         offset = 34 - 10;
1213         break;
1214     case 0x3:
1215         offset = 36 - 10;
1216         break;
1217     }
1218 
1219     switch (cond) {
1220     case 0x0:
1221         gen_op_eval_bn(r_dst);
1222         break;
1223     case 0x1:
1224         gen_op_eval_fbne(r_dst, cpu_fsr, offset);
1225         break;
1226     case 0x2:
1227         gen_op_eval_fblg(r_dst, cpu_fsr, offset);
1228         break;
1229     case 0x3:
1230         gen_op_eval_fbul(r_dst, cpu_fsr, offset);
1231         break;
1232     case 0x4:
1233         gen_op_eval_fbl(r_dst, cpu_fsr, offset);
1234         break;
1235     case 0x5:
1236         gen_op_eval_fbug(r_dst, cpu_fsr, offset);
1237         break;
1238     case 0x6:
1239         gen_op_eval_fbg(r_dst, cpu_fsr, offset);
1240         break;
1241     case 0x7:
1242         gen_op_eval_fbu(r_dst, cpu_fsr, offset);
1243         break;
1244     case 0x8:
1245         gen_op_eval_ba(r_dst);
1246         break;
1247     case 0x9:
1248         gen_op_eval_fbe(r_dst, cpu_fsr, offset);
1249         break;
1250     case 0xa:
1251         gen_op_eval_fbue(r_dst, cpu_fsr, offset);
1252         break;
1253     case 0xb:
1254         gen_op_eval_fbge(r_dst, cpu_fsr, offset);
1255         break;
1256     case 0xc:
1257         gen_op_eval_fbuge(r_dst, cpu_fsr, offset);
1258         break;
1259     case 0xd:
1260         gen_op_eval_fble(r_dst, cpu_fsr, offset);
1261         break;
1262     case 0xe:
1263         gen_op_eval_fbule(r_dst, cpu_fsr, offset);
1264         break;
1265     case 0xf:
1266         gen_op_eval_fbo(r_dst, cpu_fsr, offset);
1267         break;
1268     }
1269 }
1270 
1271 // Inverted logic
1272 static const TCGCond gen_tcg_cond_reg[8] = {
1273     TCG_COND_NEVER,  /* reserved */
1274     TCG_COND_NE,
1275     TCG_COND_GT,
1276     TCG_COND_GE,
1277     TCG_COND_NEVER,  /* reserved */
1278     TCG_COND_EQ,
1279     TCG_COND_LE,
1280     TCG_COND_LT,
1281 };
1282 
1283 static void gen_compare_reg(DisasCompare *cmp, int cond, TCGv r_src)
1284 {
1285     cmp->cond = tcg_invert_cond(gen_tcg_cond_reg[cond]);
1286     cmp->is_bool = false;
1287     cmp->c1 = r_src;
1288     cmp->c2 = tcg_constant_tl(0);
1289 }
1290 
1291 #ifdef TARGET_SPARC64
1292 static void gen_op_fcmps(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1293 {
1294     switch (fccno) {
1295     case 0:
1296         gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1297         break;
1298     case 1:
1299         gen_helper_fcmps_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1300         break;
1301     case 2:
1302         gen_helper_fcmps_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1303         break;
1304     case 3:
1305         gen_helper_fcmps_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1306         break;
1307     }
1308 }
1309 
1310 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1311 {
1312     switch (fccno) {
1313     case 0:
1314         gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1315         break;
1316     case 1:
1317         gen_helper_fcmpd_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1318         break;
1319     case 2:
1320         gen_helper_fcmpd_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1321         break;
1322     case 3:
1323         gen_helper_fcmpd_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1324         break;
1325     }
1326 }
1327 
1328 static void gen_op_fcmpq(int fccno)
1329 {
1330     switch (fccno) {
1331     case 0:
1332         gen_helper_fcmpq(cpu_fsr, tcg_env);
1333         break;
1334     case 1:
1335         gen_helper_fcmpq_fcc1(cpu_fsr, tcg_env);
1336         break;
1337     case 2:
1338         gen_helper_fcmpq_fcc2(cpu_fsr, tcg_env);
1339         break;
1340     case 3:
1341         gen_helper_fcmpq_fcc3(cpu_fsr, tcg_env);
1342         break;
1343     }
1344 }
1345 
1346 static void gen_op_fcmpes(int fccno, TCGv_i32 r_rs1, TCGv_i32 r_rs2)
1347 {
1348     switch (fccno) {
1349     case 0:
1350         gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1351         break;
1352     case 1:
1353         gen_helper_fcmpes_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1354         break;
1355     case 2:
1356         gen_helper_fcmpes_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1357         break;
1358     case 3:
1359         gen_helper_fcmpes_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1360         break;
1361     }
1362 }
1363 
1364 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1365 {
1366     switch (fccno) {
1367     case 0:
1368         gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1369         break;
1370     case 1:
1371         gen_helper_fcmped_fcc1(cpu_fsr, tcg_env, r_rs1, r_rs2);
1372         break;
1373     case 2:
1374         gen_helper_fcmped_fcc2(cpu_fsr, tcg_env, r_rs1, r_rs2);
1375         break;
1376     case 3:
1377         gen_helper_fcmped_fcc3(cpu_fsr, tcg_env, r_rs1, r_rs2);
1378         break;
1379     }
1380 }
1381 
1382 static void gen_op_fcmpeq(int fccno)
1383 {
1384     switch (fccno) {
1385     case 0:
1386         gen_helper_fcmpeq(cpu_fsr, tcg_env);
1387         break;
1388     case 1:
1389         gen_helper_fcmpeq_fcc1(cpu_fsr, tcg_env);
1390         break;
1391     case 2:
1392         gen_helper_fcmpeq_fcc2(cpu_fsr, tcg_env);
1393         break;
1394     case 3:
1395         gen_helper_fcmpeq_fcc3(cpu_fsr, tcg_env);
1396         break;
1397     }
1398 }
1399 
1400 #else
1401 
1402 static void gen_op_fcmps(int fccno, TCGv r_rs1, TCGv r_rs2)
1403 {
1404     gen_helper_fcmps(cpu_fsr, tcg_env, r_rs1, r_rs2);
1405 }
1406 
1407 static void gen_op_fcmpd(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1408 {
1409     gen_helper_fcmpd(cpu_fsr, tcg_env, r_rs1, r_rs2);
1410 }
1411 
1412 static void gen_op_fcmpq(int fccno)
1413 {
1414     gen_helper_fcmpq(cpu_fsr, tcg_env);
1415 }
1416 
1417 static void gen_op_fcmpes(int fccno, TCGv r_rs1, TCGv r_rs2)
1418 {
1419     gen_helper_fcmpes(cpu_fsr, tcg_env, r_rs1, r_rs2);
1420 }
1421 
1422 static void gen_op_fcmped(int fccno, TCGv_i64 r_rs1, TCGv_i64 r_rs2)
1423 {
1424     gen_helper_fcmped(cpu_fsr, tcg_env, r_rs1, r_rs2);
1425 }
1426 
1427 static void gen_op_fcmpeq(int fccno)
1428 {
1429     gen_helper_fcmpeq(cpu_fsr, tcg_env);
1430 }
1431 #endif
1432 
1433 static void gen_op_fpexception_im(DisasContext *dc, int fsr_flags)
1434 {
1435     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_NMASK);
1436     tcg_gen_ori_tl(cpu_fsr, cpu_fsr, fsr_flags);
1437     gen_exception(dc, TT_FP_EXCP);
1438 }
1439 
1440 static int gen_trap_ifnofpu(DisasContext *dc)
1441 {
1442 #if !defined(CONFIG_USER_ONLY)
1443     if (!dc->fpu_enabled) {
1444         gen_exception(dc, TT_NFPU_INSN);
1445         return 1;
1446     }
1447 #endif
1448     return 0;
1449 }
1450 
1451 static void gen_op_clear_ieee_excp_and_FTT(void)
1452 {
1453     tcg_gen_andi_tl(cpu_fsr, cpu_fsr, FSR_FTT_CEXC_NMASK);
1454 }
1455 
1456 static void gen_fop_FF(DisasContext *dc, int rd, int rs,
1457                               void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32))
1458 {
1459     TCGv_i32 dst, src;
1460 
1461     src = gen_load_fpr_F(dc, rs);
1462     dst = gen_dest_fpr_F(dc);
1463 
1464     gen(dst, tcg_env, src);
1465     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1466 
1467     gen_store_fpr_F(dc, rd, dst);
1468 }
1469 
1470 static void gen_ne_fop_FF(DisasContext *dc, int rd, int rs,
1471                           void (*gen)(TCGv_i32, TCGv_i32))
1472 {
1473     TCGv_i32 dst, src;
1474 
1475     src = gen_load_fpr_F(dc, rs);
1476     dst = gen_dest_fpr_F(dc);
1477 
1478     gen(dst, src);
1479 
1480     gen_store_fpr_F(dc, rd, dst);
1481 }
1482 
1483 static void gen_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1484                         void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i32, TCGv_i32))
1485 {
1486     TCGv_i32 dst, src1, src2;
1487 
1488     src1 = gen_load_fpr_F(dc, rs1);
1489     src2 = gen_load_fpr_F(dc, rs2);
1490     dst = gen_dest_fpr_F(dc);
1491 
1492     gen(dst, tcg_env, src1, src2);
1493     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1494 
1495     gen_store_fpr_F(dc, rd, dst);
1496 }
1497 
1498 #ifdef TARGET_SPARC64
1499 static void gen_ne_fop_FFF(DisasContext *dc, int rd, int rs1, int rs2,
1500                            void (*gen)(TCGv_i32, TCGv_i32, TCGv_i32))
1501 {
1502     TCGv_i32 dst, src1, src2;
1503 
1504     src1 = gen_load_fpr_F(dc, rs1);
1505     src2 = gen_load_fpr_F(dc, rs2);
1506     dst = gen_dest_fpr_F(dc);
1507 
1508     gen(dst, src1, src2);
1509 
1510     gen_store_fpr_F(dc, rd, dst);
1511 }
1512 #endif
1513 
1514 static void gen_fop_DD(DisasContext *dc, int rd, int rs,
1515                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64))
1516 {
1517     TCGv_i64 dst, src;
1518 
1519     src = gen_load_fpr_D(dc, rs);
1520     dst = gen_dest_fpr_D(dc, rd);
1521 
1522     gen(dst, tcg_env, src);
1523     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1524 
1525     gen_store_fpr_D(dc, rd, dst);
1526 }
1527 
1528 #ifdef TARGET_SPARC64
1529 static void gen_ne_fop_DD(DisasContext *dc, int rd, int rs,
1530                           void (*gen)(TCGv_i64, TCGv_i64))
1531 {
1532     TCGv_i64 dst, src;
1533 
1534     src = gen_load_fpr_D(dc, rs);
1535     dst = gen_dest_fpr_D(dc, rd);
1536 
1537     gen(dst, src);
1538 
1539     gen_store_fpr_D(dc, rd, dst);
1540 }
1541 #endif
1542 
1543 static void gen_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1544                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i64, TCGv_i64))
1545 {
1546     TCGv_i64 dst, src1, src2;
1547 
1548     src1 = gen_load_fpr_D(dc, rs1);
1549     src2 = gen_load_fpr_D(dc, rs2);
1550     dst = gen_dest_fpr_D(dc, rd);
1551 
1552     gen(dst, tcg_env, src1, src2);
1553     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1554 
1555     gen_store_fpr_D(dc, rd, dst);
1556 }
1557 
1558 #ifdef TARGET_SPARC64
1559 static void gen_ne_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1560                            void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64))
1561 {
1562     TCGv_i64 dst, src1, src2;
1563 
1564     src1 = gen_load_fpr_D(dc, rs1);
1565     src2 = gen_load_fpr_D(dc, rs2);
1566     dst = gen_dest_fpr_D(dc, rd);
1567 
1568     gen(dst, src1, src2);
1569 
1570     gen_store_fpr_D(dc, rd, dst);
1571 }
1572 
1573 static void gen_gsr_fop_DDD(DisasContext *dc, int rd, int rs1, int rs2,
1574                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1575 {
1576     TCGv_i64 dst, src1, src2;
1577 
1578     src1 = gen_load_fpr_D(dc, rs1);
1579     src2 = gen_load_fpr_D(dc, rs2);
1580     dst = gen_dest_fpr_D(dc, rd);
1581 
1582     gen(dst, cpu_gsr, src1, src2);
1583 
1584     gen_store_fpr_D(dc, rd, dst);
1585 }
1586 
1587 static void gen_ne_fop_DDDD(DisasContext *dc, int rd, int rs1, int rs2,
1588                             void (*gen)(TCGv_i64, TCGv_i64, TCGv_i64, TCGv_i64))
1589 {
1590     TCGv_i64 dst, src0, src1, src2;
1591 
1592     src1 = gen_load_fpr_D(dc, rs1);
1593     src2 = gen_load_fpr_D(dc, rs2);
1594     src0 = gen_load_fpr_D(dc, rd);
1595     dst = gen_dest_fpr_D(dc, rd);
1596 
1597     gen(dst, src0, src1, src2);
1598 
1599     gen_store_fpr_D(dc, rd, dst);
1600 }
1601 #endif
1602 
1603 static void gen_fop_QQ(DisasContext *dc, int rd, int rs,
1604                        void (*gen)(TCGv_ptr))
1605 {
1606     gen_op_load_fpr_QT1(QFPREG(rs));
1607 
1608     gen(tcg_env);
1609     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1610 
1611     gen_op_store_QT0_fpr(QFPREG(rd));
1612     gen_update_fprs_dirty(dc, QFPREG(rd));
1613 }
1614 
1615 #ifdef TARGET_SPARC64
1616 static void gen_ne_fop_QQ(DisasContext *dc, int rd, int rs,
1617                           void (*gen)(TCGv_ptr))
1618 {
1619     gen_op_load_fpr_QT1(QFPREG(rs));
1620 
1621     gen(tcg_env);
1622 
1623     gen_op_store_QT0_fpr(QFPREG(rd));
1624     gen_update_fprs_dirty(dc, QFPREG(rd));
1625 }
1626 #endif
1627 
1628 static void gen_fop_QQQ(DisasContext *dc, int rd, int rs1, int rs2,
1629                         void (*gen)(TCGv_ptr))
1630 {
1631     gen_op_load_fpr_QT0(QFPREG(rs1));
1632     gen_op_load_fpr_QT1(QFPREG(rs2));
1633 
1634     gen(tcg_env);
1635     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1636 
1637     gen_op_store_QT0_fpr(QFPREG(rd));
1638     gen_update_fprs_dirty(dc, QFPREG(rd));
1639 }
1640 
1641 static void gen_fop_DFF(DisasContext *dc, int rd, int rs1, int rs2,
1642                         void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32, TCGv_i32))
1643 {
1644     TCGv_i64 dst;
1645     TCGv_i32 src1, src2;
1646 
1647     src1 = gen_load_fpr_F(dc, rs1);
1648     src2 = gen_load_fpr_F(dc, rs2);
1649     dst = gen_dest_fpr_D(dc, rd);
1650 
1651     gen(dst, tcg_env, src1, src2);
1652     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1653 
1654     gen_store_fpr_D(dc, rd, dst);
1655 }
1656 
1657 static void gen_fop_QDD(DisasContext *dc, int rd, int rs1, int rs2,
1658                         void (*gen)(TCGv_ptr, TCGv_i64, TCGv_i64))
1659 {
1660     TCGv_i64 src1, src2;
1661 
1662     src1 = gen_load_fpr_D(dc, rs1);
1663     src2 = gen_load_fpr_D(dc, rs2);
1664 
1665     gen(tcg_env, src1, src2);
1666     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1667 
1668     gen_op_store_QT0_fpr(QFPREG(rd));
1669     gen_update_fprs_dirty(dc, QFPREG(rd));
1670 }
1671 
1672 #ifdef TARGET_SPARC64
1673 static void gen_fop_DF(DisasContext *dc, int rd, int rs,
1674                        void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1675 {
1676     TCGv_i64 dst;
1677     TCGv_i32 src;
1678 
1679     src = gen_load_fpr_F(dc, rs);
1680     dst = gen_dest_fpr_D(dc, rd);
1681 
1682     gen(dst, tcg_env, src);
1683     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1684 
1685     gen_store_fpr_D(dc, rd, dst);
1686 }
1687 #endif
1688 
1689 static void gen_ne_fop_DF(DisasContext *dc, int rd, int rs,
1690                           void (*gen)(TCGv_i64, TCGv_ptr, TCGv_i32))
1691 {
1692     TCGv_i64 dst;
1693     TCGv_i32 src;
1694 
1695     src = gen_load_fpr_F(dc, rs);
1696     dst = gen_dest_fpr_D(dc, rd);
1697 
1698     gen(dst, tcg_env, src);
1699 
1700     gen_store_fpr_D(dc, rd, dst);
1701 }
1702 
1703 static void gen_fop_FD(DisasContext *dc, int rd, int rs,
1704                        void (*gen)(TCGv_i32, TCGv_ptr, TCGv_i64))
1705 {
1706     TCGv_i32 dst;
1707     TCGv_i64 src;
1708 
1709     src = gen_load_fpr_D(dc, rs);
1710     dst = gen_dest_fpr_F(dc);
1711 
1712     gen(dst, tcg_env, src);
1713     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1714 
1715     gen_store_fpr_F(dc, rd, dst);
1716 }
1717 
1718 static void gen_fop_FQ(DisasContext *dc, int rd, int rs,
1719                        void (*gen)(TCGv_i32, TCGv_ptr))
1720 {
1721     TCGv_i32 dst;
1722 
1723     gen_op_load_fpr_QT1(QFPREG(rs));
1724     dst = gen_dest_fpr_F(dc);
1725 
1726     gen(dst, tcg_env);
1727     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1728 
1729     gen_store_fpr_F(dc, rd, dst);
1730 }
1731 
1732 static void gen_fop_DQ(DisasContext *dc, int rd, int rs,
1733                        void (*gen)(TCGv_i64, TCGv_ptr))
1734 {
1735     TCGv_i64 dst;
1736 
1737     gen_op_load_fpr_QT1(QFPREG(rs));
1738     dst = gen_dest_fpr_D(dc, rd);
1739 
1740     gen(dst, tcg_env);
1741     gen_helper_check_ieee_exceptions(cpu_fsr, tcg_env);
1742 
1743     gen_store_fpr_D(dc, rd, dst);
1744 }
1745 
1746 static void gen_ne_fop_QF(DisasContext *dc, int rd, int rs,
1747                           void (*gen)(TCGv_ptr, TCGv_i32))
1748 {
1749     TCGv_i32 src;
1750 
1751     src = gen_load_fpr_F(dc, rs);
1752 
1753     gen(tcg_env, src);
1754 
1755     gen_op_store_QT0_fpr(QFPREG(rd));
1756     gen_update_fprs_dirty(dc, QFPREG(rd));
1757 }
1758 
1759 static void gen_ne_fop_QD(DisasContext *dc, int rd, int rs,
1760                           void (*gen)(TCGv_ptr, TCGv_i64))
1761 {
1762     TCGv_i64 src;
1763 
1764     src = gen_load_fpr_D(dc, rs);
1765 
1766     gen(tcg_env, src);
1767 
1768     gen_op_store_QT0_fpr(QFPREG(rd));
1769     gen_update_fprs_dirty(dc, QFPREG(rd));
1770 }
1771 
1772 static void gen_swap(DisasContext *dc, TCGv dst, TCGv src,
1773                      TCGv addr, int mmu_idx, MemOp memop)
1774 {
1775     gen_address_mask(dc, addr);
1776     tcg_gen_atomic_xchg_tl(dst, addr, src, mmu_idx, memop | MO_ALIGN);
1777 }
1778 
1779 static void gen_ldstub(DisasContext *dc, TCGv dst, TCGv addr, int mmu_idx)
1780 {
1781     TCGv m1 = tcg_constant_tl(0xff);
1782     gen_address_mask(dc, addr);
1783     tcg_gen_atomic_xchg_tl(dst, addr, m1, mmu_idx, MO_UB);
1784 }
1785 
1786 /* asi moves */
1787 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
1788 typedef enum {
1789     GET_ASI_HELPER,
1790     GET_ASI_EXCP,
1791     GET_ASI_DIRECT,
1792     GET_ASI_DTWINX,
1793     GET_ASI_BLOCK,
1794     GET_ASI_SHORT,
1795     GET_ASI_BCOPY,
1796     GET_ASI_BFILL,
1797 } ASIType;
1798 
1799 typedef struct {
1800     ASIType type;
1801     int asi;
1802     int mem_idx;
1803     MemOp memop;
1804 } DisasASI;
1805 
1806 static DisasASI get_asi(DisasContext *dc, int insn, MemOp memop)
1807 {
1808     int asi = GET_FIELD(insn, 19, 26);
1809     ASIType type = GET_ASI_HELPER;
1810     int mem_idx = dc->mem_idx;
1811 
1812 #ifndef TARGET_SPARC64
1813     /* Before v9, all asis are immediate and privileged.  */
1814     if (IS_IMM) {
1815         gen_exception(dc, TT_ILL_INSN);
1816         type = GET_ASI_EXCP;
1817     } else if (supervisor(dc)
1818                /* Note that LEON accepts ASI_USERDATA in user mode, for
1819                   use with CASA.  Also note that previous versions of
1820                   QEMU allowed (and old versions of gcc emitted) ASI_P
1821                   for LEON, which is incorrect.  */
1822                || (asi == ASI_USERDATA
1823                    && (dc->def->features & CPU_FEATURE_CASA))) {
1824         switch (asi) {
1825         case ASI_USERDATA:   /* User data access */
1826             mem_idx = MMU_USER_IDX;
1827             type = GET_ASI_DIRECT;
1828             break;
1829         case ASI_KERNELDATA: /* Supervisor data access */
1830             mem_idx = MMU_KERNEL_IDX;
1831             type = GET_ASI_DIRECT;
1832             break;
1833         case ASI_M_BYPASS:    /* MMU passthrough */
1834         case ASI_LEON_BYPASS: /* LEON MMU passthrough */
1835             mem_idx = MMU_PHYS_IDX;
1836             type = GET_ASI_DIRECT;
1837             break;
1838         case ASI_M_BCOPY: /* Block copy, sta access */
1839             mem_idx = MMU_KERNEL_IDX;
1840             type = GET_ASI_BCOPY;
1841             break;
1842         case ASI_M_BFILL: /* Block fill, stda access */
1843             mem_idx = MMU_KERNEL_IDX;
1844             type = GET_ASI_BFILL;
1845             break;
1846         }
1847 
1848         /* MMU_PHYS_IDX is used when the MMU is disabled to passthrough the
1849          * permissions check in get_physical_address(..).
1850          */
1851         mem_idx = (dc->mem_idx == MMU_PHYS_IDX) ? MMU_PHYS_IDX : mem_idx;
1852     } else {
1853         gen_exception(dc, TT_PRIV_INSN);
1854         type = GET_ASI_EXCP;
1855     }
1856 #else
1857     if (IS_IMM) {
1858         asi = dc->asi;
1859     }
1860     /* With v9, all asis below 0x80 are privileged.  */
1861     /* ??? We ought to check cpu_has_hypervisor, but we didn't copy
1862        down that bit into DisasContext.  For the moment that's ok,
1863        since the direct implementations below doesn't have any ASIs
1864        in the restricted [0x30, 0x7f] range, and the check will be
1865        done properly in the helper.  */
1866     if (!supervisor(dc) && asi < 0x80) {
1867         gen_exception(dc, TT_PRIV_ACT);
1868         type = GET_ASI_EXCP;
1869     } else {
1870         switch (asi) {
1871         case ASI_REAL:      /* Bypass */
1872         case ASI_REAL_IO:   /* Bypass, non-cacheable */
1873         case ASI_REAL_L:    /* Bypass LE */
1874         case ASI_REAL_IO_L: /* Bypass, non-cacheable LE */
1875         case ASI_TWINX_REAL:   /* Real address, twinx */
1876         case ASI_TWINX_REAL_L: /* Real address, twinx, LE */
1877         case ASI_QUAD_LDD_PHYS:
1878         case ASI_QUAD_LDD_PHYS_L:
1879             mem_idx = MMU_PHYS_IDX;
1880             break;
1881         case ASI_N:  /* Nucleus */
1882         case ASI_NL: /* Nucleus LE */
1883         case ASI_TWINX_N:
1884         case ASI_TWINX_NL:
1885         case ASI_NUCLEUS_QUAD_LDD:
1886         case ASI_NUCLEUS_QUAD_LDD_L:
1887             if (hypervisor(dc)) {
1888                 mem_idx = MMU_PHYS_IDX;
1889             } else {
1890                 mem_idx = MMU_NUCLEUS_IDX;
1891             }
1892             break;
1893         case ASI_AIUP:  /* As if user primary */
1894         case ASI_AIUPL: /* As if user primary LE */
1895         case ASI_TWINX_AIUP:
1896         case ASI_TWINX_AIUP_L:
1897         case ASI_BLK_AIUP_4V:
1898         case ASI_BLK_AIUP_L_4V:
1899         case ASI_BLK_AIUP:
1900         case ASI_BLK_AIUPL:
1901             mem_idx = MMU_USER_IDX;
1902             break;
1903         case ASI_AIUS:  /* As if user secondary */
1904         case ASI_AIUSL: /* As if user secondary LE */
1905         case ASI_TWINX_AIUS:
1906         case ASI_TWINX_AIUS_L:
1907         case ASI_BLK_AIUS_4V:
1908         case ASI_BLK_AIUS_L_4V:
1909         case ASI_BLK_AIUS:
1910         case ASI_BLK_AIUSL:
1911             mem_idx = MMU_USER_SECONDARY_IDX;
1912             break;
1913         case ASI_S:  /* Secondary */
1914         case ASI_SL: /* Secondary LE */
1915         case ASI_TWINX_S:
1916         case ASI_TWINX_SL:
1917         case ASI_BLK_COMMIT_S:
1918         case ASI_BLK_S:
1919         case ASI_BLK_SL:
1920         case ASI_FL8_S:
1921         case ASI_FL8_SL:
1922         case ASI_FL16_S:
1923         case ASI_FL16_SL:
1924             if (mem_idx == MMU_USER_IDX) {
1925                 mem_idx = MMU_USER_SECONDARY_IDX;
1926             } else if (mem_idx == MMU_KERNEL_IDX) {
1927                 mem_idx = MMU_KERNEL_SECONDARY_IDX;
1928             }
1929             break;
1930         case ASI_P:  /* Primary */
1931         case ASI_PL: /* Primary LE */
1932         case ASI_TWINX_P:
1933         case ASI_TWINX_PL:
1934         case ASI_BLK_COMMIT_P:
1935         case ASI_BLK_P:
1936         case ASI_BLK_PL:
1937         case ASI_FL8_P:
1938         case ASI_FL8_PL:
1939         case ASI_FL16_P:
1940         case ASI_FL16_PL:
1941             break;
1942         }
1943         switch (asi) {
1944         case ASI_REAL:
1945         case ASI_REAL_IO:
1946         case ASI_REAL_L:
1947         case ASI_REAL_IO_L:
1948         case ASI_N:
1949         case ASI_NL:
1950         case ASI_AIUP:
1951         case ASI_AIUPL:
1952         case ASI_AIUS:
1953         case ASI_AIUSL:
1954         case ASI_S:
1955         case ASI_SL:
1956         case ASI_P:
1957         case ASI_PL:
1958             type = GET_ASI_DIRECT;
1959             break;
1960         case ASI_TWINX_REAL:
1961         case ASI_TWINX_REAL_L:
1962         case ASI_TWINX_N:
1963         case ASI_TWINX_NL:
1964         case ASI_TWINX_AIUP:
1965         case ASI_TWINX_AIUP_L:
1966         case ASI_TWINX_AIUS:
1967         case ASI_TWINX_AIUS_L:
1968         case ASI_TWINX_P:
1969         case ASI_TWINX_PL:
1970         case ASI_TWINX_S:
1971         case ASI_TWINX_SL:
1972         case ASI_QUAD_LDD_PHYS:
1973         case ASI_QUAD_LDD_PHYS_L:
1974         case ASI_NUCLEUS_QUAD_LDD:
1975         case ASI_NUCLEUS_QUAD_LDD_L:
1976             type = GET_ASI_DTWINX;
1977             break;
1978         case ASI_BLK_COMMIT_P:
1979         case ASI_BLK_COMMIT_S:
1980         case ASI_BLK_AIUP_4V:
1981         case ASI_BLK_AIUP_L_4V:
1982         case ASI_BLK_AIUP:
1983         case ASI_BLK_AIUPL:
1984         case ASI_BLK_AIUS_4V:
1985         case ASI_BLK_AIUS_L_4V:
1986         case ASI_BLK_AIUS:
1987         case ASI_BLK_AIUSL:
1988         case ASI_BLK_S:
1989         case ASI_BLK_SL:
1990         case ASI_BLK_P:
1991         case ASI_BLK_PL:
1992             type = GET_ASI_BLOCK;
1993             break;
1994         case ASI_FL8_S:
1995         case ASI_FL8_SL:
1996         case ASI_FL8_P:
1997         case ASI_FL8_PL:
1998             memop = MO_UB;
1999             type = GET_ASI_SHORT;
2000             break;
2001         case ASI_FL16_S:
2002         case ASI_FL16_SL:
2003         case ASI_FL16_P:
2004         case ASI_FL16_PL:
2005             memop = MO_TEUW;
2006             type = GET_ASI_SHORT;
2007             break;
2008         }
2009         /* The little-endian asis all have bit 3 set.  */
2010         if (asi & 8) {
2011             memop ^= MO_BSWAP;
2012         }
2013     }
2014 #endif
2015 
2016     return (DisasASI){ type, asi, mem_idx, memop };
2017 }
2018 
2019 static void gen_ld_asi(DisasContext *dc, TCGv dst, TCGv addr,
2020                        int insn, MemOp memop)
2021 {
2022     DisasASI da = get_asi(dc, insn, memop);
2023 
2024     switch (da.type) {
2025     case GET_ASI_EXCP:
2026         break;
2027     case GET_ASI_DTWINX: /* Reserved for ldda.  */
2028         gen_exception(dc, TT_ILL_INSN);
2029         break;
2030     case GET_ASI_DIRECT:
2031         gen_address_mask(dc, addr);
2032         tcg_gen_qemu_ld_tl(dst, addr, da.mem_idx, da.memop | MO_ALIGN);
2033         break;
2034     default:
2035         {
2036             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2037             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2038 
2039             save_state(dc);
2040 #ifdef TARGET_SPARC64
2041             gen_helper_ld_asi(dst, tcg_env, addr, r_asi, r_mop);
2042 #else
2043             {
2044                 TCGv_i64 t64 = tcg_temp_new_i64();
2045                 gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2046                 tcg_gen_trunc_i64_tl(dst, t64);
2047             }
2048 #endif
2049         }
2050         break;
2051     }
2052 }
2053 
2054 static void gen_st_asi(DisasContext *dc, TCGv src, TCGv addr,
2055                        int insn, MemOp memop)
2056 {
2057     DisasASI da = get_asi(dc, insn, memop);
2058 
2059     switch (da.type) {
2060     case GET_ASI_EXCP:
2061         break;
2062     case GET_ASI_DTWINX: /* Reserved for stda.  */
2063 #ifndef TARGET_SPARC64
2064         gen_exception(dc, TT_ILL_INSN);
2065         break;
2066 #else
2067         if (!(dc->def->features & CPU_FEATURE_HYPV)) {
2068             /* Pre OpenSPARC CPUs don't have these */
2069             gen_exception(dc, TT_ILL_INSN);
2070             return;
2071         }
2072         /* in OpenSPARC T1+ CPUs TWINX ASIs in store instructions
2073          * are ST_BLKINIT_ ASIs */
2074 #endif
2075         /* fall through */
2076     case GET_ASI_DIRECT:
2077         gen_address_mask(dc, addr);
2078         tcg_gen_qemu_st_tl(src, addr, da.mem_idx, da.memop | MO_ALIGN);
2079         break;
2080 #if !defined(TARGET_SPARC64) && !defined(CONFIG_USER_ONLY)
2081     case GET_ASI_BCOPY:
2082         /* Copy 32 bytes from the address in SRC to ADDR.  */
2083         /* ??? The original qemu code suggests 4-byte alignment, dropping
2084            the low bits, but the only place I can see this used is in the
2085            Linux kernel with 32 byte alignment, which would make more sense
2086            as a cacheline-style operation.  */
2087         {
2088             TCGv saddr = tcg_temp_new();
2089             TCGv daddr = tcg_temp_new();
2090             TCGv four = tcg_constant_tl(4);
2091             TCGv_i32 tmp = tcg_temp_new_i32();
2092             int i;
2093 
2094             tcg_gen_andi_tl(saddr, src, -4);
2095             tcg_gen_andi_tl(daddr, addr, -4);
2096             for (i = 0; i < 32; i += 4) {
2097                 /* Since the loads and stores are paired, allow the
2098                    copy to happen in the host endianness.  */
2099                 tcg_gen_qemu_ld_i32(tmp, saddr, da.mem_idx, MO_UL);
2100                 tcg_gen_qemu_st_i32(tmp, daddr, da.mem_idx, MO_UL);
2101                 tcg_gen_add_tl(saddr, saddr, four);
2102                 tcg_gen_add_tl(daddr, daddr, four);
2103             }
2104         }
2105         break;
2106 #endif
2107     default:
2108         {
2109             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2110             TCGv_i32 r_mop = tcg_constant_i32(memop | MO_ALIGN);
2111 
2112             save_state(dc);
2113 #ifdef TARGET_SPARC64
2114             gen_helper_st_asi(tcg_env, addr, src, r_asi, r_mop);
2115 #else
2116             {
2117                 TCGv_i64 t64 = tcg_temp_new_i64();
2118                 tcg_gen_extu_tl_i64(t64, src);
2119                 gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2120             }
2121 #endif
2122 
2123             /* A write to a TLB register may alter page maps.  End the TB. */
2124             dc->npc = DYNAMIC_PC;
2125         }
2126         break;
2127     }
2128 }
2129 
2130 static void gen_swap_asi(DisasContext *dc, TCGv dst, TCGv src,
2131                          TCGv addr, int insn)
2132 {
2133     DisasASI da = get_asi(dc, insn, MO_TEUL);
2134 
2135     switch (da.type) {
2136     case GET_ASI_EXCP:
2137         break;
2138     case GET_ASI_DIRECT:
2139         gen_swap(dc, dst, src, addr, da.mem_idx, da.memop);
2140         break;
2141     default:
2142         /* ??? Should be DAE_invalid_asi.  */
2143         gen_exception(dc, TT_DATA_ACCESS);
2144         break;
2145     }
2146 }
2147 
2148 static void gen_cas_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2149                         int insn, int rd)
2150 {
2151     DisasASI da = get_asi(dc, insn, MO_TEUL);
2152     TCGv oldv;
2153 
2154     switch (da.type) {
2155     case GET_ASI_EXCP:
2156         return;
2157     case GET_ASI_DIRECT:
2158         oldv = tcg_temp_new();
2159         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2160                                   da.mem_idx, da.memop | MO_ALIGN);
2161         gen_store_gpr(dc, rd, oldv);
2162         break;
2163     default:
2164         /* ??? Should be DAE_invalid_asi.  */
2165         gen_exception(dc, TT_DATA_ACCESS);
2166         break;
2167     }
2168 }
2169 
2170 static void gen_ldstub_asi(DisasContext *dc, TCGv dst, TCGv addr, int insn)
2171 {
2172     DisasASI da = get_asi(dc, insn, MO_UB);
2173 
2174     switch (da.type) {
2175     case GET_ASI_EXCP:
2176         break;
2177     case GET_ASI_DIRECT:
2178         gen_ldstub(dc, dst, addr, da.mem_idx);
2179         break;
2180     default:
2181         /* ??? In theory, this should be raise DAE_invalid_asi.
2182            But the SS-20 roms do ldstuba [%l0] #ASI_M_CTL, %o1.  */
2183         if (tb_cflags(dc->base.tb) & CF_PARALLEL) {
2184             gen_helper_exit_atomic(tcg_env);
2185         } else {
2186             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2187             TCGv_i32 r_mop = tcg_constant_i32(MO_UB);
2188             TCGv_i64 s64, t64;
2189 
2190             save_state(dc);
2191             t64 = tcg_temp_new_i64();
2192             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2193 
2194             s64 = tcg_constant_i64(0xff);
2195             gen_helper_st_asi(tcg_env, addr, s64, r_asi, r_mop);
2196 
2197             tcg_gen_trunc_i64_tl(dst, t64);
2198 
2199             /* End the TB.  */
2200             dc->npc = DYNAMIC_PC;
2201         }
2202         break;
2203     }
2204 }
2205 #endif
2206 
2207 #ifdef TARGET_SPARC64
2208 static void gen_ldf_asi(DisasContext *dc, TCGv addr,
2209                         int insn, int size, int rd)
2210 {
2211     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2212     TCGv_i32 d32;
2213     TCGv_i64 d64;
2214 
2215     switch (da.type) {
2216     case GET_ASI_EXCP:
2217         break;
2218 
2219     case GET_ASI_DIRECT:
2220         gen_address_mask(dc, addr);
2221         switch (size) {
2222         case 4:
2223             d32 = gen_dest_fpr_F(dc);
2224             tcg_gen_qemu_ld_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2225             gen_store_fpr_F(dc, rd, d32);
2226             break;
2227         case 8:
2228             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2229                                 da.memop | MO_ALIGN_4);
2230             break;
2231         case 16:
2232             d64 = tcg_temp_new_i64();
2233             tcg_gen_qemu_ld_i64(d64, addr, da.mem_idx, da.memop | MO_ALIGN_4);
2234             tcg_gen_addi_tl(addr, addr, 8);
2235             tcg_gen_qemu_ld_i64(cpu_fpr[rd/2+1], addr, da.mem_idx,
2236                                 da.memop | MO_ALIGN_4);
2237             tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2238             break;
2239         default:
2240             g_assert_not_reached();
2241         }
2242         break;
2243 
2244     case GET_ASI_BLOCK:
2245         /* Valid for lddfa on aligned registers only.  */
2246         if (size == 8 && (rd & 7) == 0) {
2247             MemOp memop;
2248             TCGv eight;
2249             int i;
2250 
2251             gen_address_mask(dc, addr);
2252 
2253             /* The first operation checks required alignment.  */
2254             memop = da.memop | MO_ALIGN_64;
2255             eight = tcg_constant_tl(8);
2256             for (i = 0; ; ++i) {
2257                 tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2 + i], addr,
2258                                     da.mem_idx, memop);
2259                 if (i == 7) {
2260                     break;
2261                 }
2262                 tcg_gen_add_tl(addr, addr, eight);
2263                 memop = da.memop;
2264             }
2265         } else {
2266             gen_exception(dc, TT_ILL_INSN);
2267         }
2268         break;
2269 
2270     case GET_ASI_SHORT:
2271         /* Valid for lddfa only.  */
2272         if (size == 8) {
2273             gen_address_mask(dc, addr);
2274             tcg_gen_qemu_ld_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2275                                 da.memop | MO_ALIGN);
2276         } else {
2277             gen_exception(dc, TT_ILL_INSN);
2278         }
2279         break;
2280 
2281     default:
2282         {
2283             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2284             TCGv_i32 r_mop = tcg_constant_i32(da.memop | MO_ALIGN);
2285 
2286             save_state(dc);
2287             /* According to the table in the UA2011 manual, the only
2288                other asis that are valid for ldfa/lddfa/ldqfa are
2289                the NO_FAULT asis.  We still need a helper for these,
2290                but we can just use the integer asi helper for them.  */
2291             switch (size) {
2292             case 4:
2293                 d64 = tcg_temp_new_i64();
2294                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2295                 d32 = gen_dest_fpr_F(dc);
2296                 tcg_gen_extrl_i64_i32(d32, d64);
2297                 gen_store_fpr_F(dc, rd, d32);
2298                 break;
2299             case 8:
2300                 gen_helper_ld_asi(cpu_fpr[rd / 2], tcg_env, addr, r_asi, r_mop);
2301                 break;
2302             case 16:
2303                 d64 = tcg_temp_new_i64();
2304                 gen_helper_ld_asi(d64, tcg_env, addr, r_asi, r_mop);
2305                 tcg_gen_addi_tl(addr, addr, 8);
2306                 gen_helper_ld_asi(cpu_fpr[rd/2+1], tcg_env, addr, r_asi, r_mop);
2307                 tcg_gen_mov_i64(cpu_fpr[rd / 2], d64);
2308                 break;
2309             default:
2310                 g_assert_not_reached();
2311             }
2312         }
2313         break;
2314     }
2315 }
2316 
2317 static void gen_stf_asi(DisasContext *dc, TCGv addr,
2318                         int insn, int size, int rd)
2319 {
2320     DisasASI da = get_asi(dc, insn, (size == 4 ? MO_TEUL : MO_TEUQ));
2321     TCGv_i32 d32;
2322 
2323     switch (da.type) {
2324     case GET_ASI_EXCP:
2325         break;
2326 
2327     case GET_ASI_DIRECT:
2328         gen_address_mask(dc, addr);
2329         switch (size) {
2330         case 4:
2331             d32 = gen_load_fpr_F(dc, rd);
2332             tcg_gen_qemu_st_i32(d32, addr, da.mem_idx, da.memop | MO_ALIGN);
2333             break;
2334         case 8:
2335             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2336                                 da.memop | MO_ALIGN_4);
2337             break;
2338         case 16:
2339             /* Only 4-byte alignment required.  However, it is legal for the
2340                cpu to signal the alignment fault, and the OS trap handler is
2341                required to fix it up.  Requiring 16-byte alignment here avoids
2342                having to probe the second page before performing the first
2343                write.  */
2344             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2345                                 da.memop | MO_ALIGN_16);
2346             tcg_gen_addi_tl(addr, addr, 8);
2347             tcg_gen_qemu_st_i64(cpu_fpr[rd/2+1], addr, da.mem_idx, da.memop);
2348             break;
2349         default:
2350             g_assert_not_reached();
2351         }
2352         break;
2353 
2354     case GET_ASI_BLOCK:
2355         /* Valid for stdfa on aligned registers only.  */
2356         if (size == 8 && (rd & 7) == 0) {
2357             MemOp memop;
2358             TCGv eight;
2359             int i;
2360 
2361             gen_address_mask(dc, addr);
2362 
2363             /* The first operation checks required alignment.  */
2364             memop = da.memop | MO_ALIGN_64;
2365             eight = tcg_constant_tl(8);
2366             for (i = 0; ; ++i) {
2367                 tcg_gen_qemu_st_i64(cpu_fpr[rd / 2 + i], addr,
2368                                     da.mem_idx, memop);
2369                 if (i == 7) {
2370                     break;
2371                 }
2372                 tcg_gen_add_tl(addr, addr, eight);
2373                 memop = da.memop;
2374             }
2375         } else {
2376             gen_exception(dc, TT_ILL_INSN);
2377         }
2378         break;
2379 
2380     case GET_ASI_SHORT:
2381         /* Valid for stdfa only.  */
2382         if (size == 8) {
2383             gen_address_mask(dc, addr);
2384             tcg_gen_qemu_st_i64(cpu_fpr[rd / 2], addr, da.mem_idx,
2385                                 da.memop | MO_ALIGN);
2386         } else {
2387             gen_exception(dc, TT_ILL_INSN);
2388         }
2389         break;
2390 
2391     default:
2392         /* According to the table in the UA2011 manual, the only
2393            other asis that are valid for ldfa/lddfa/ldqfa are
2394            the PST* asis, which aren't currently handled.  */
2395         gen_exception(dc, TT_ILL_INSN);
2396         break;
2397     }
2398 }
2399 
2400 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2401 {
2402     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2403     TCGv_i64 hi = gen_dest_gpr(dc, rd);
2404     TCGv_i64 lo = gen_dest_gpr(dc, rd + 1);
2405 
2406     switch (da.type) {
2407     case GET_ASI_EXCP:
2408         return;
2409 
2410     case GET_ASI_DTWINX:
2411         gen_address_mask(dc, addr);
2412         tcg_gen_qemu_ld_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2413         tcg_gen_addi_tl(addr, addr, 8);
2414         tcg_gen_qemu_ld_i64(lo, addr, da.mem_idx, da.memop);
2415         break;
2416 
2417     case GET_ASI_DIRECT:
2418         {
2419             TCGv_i64 tmp = tcg_temp_new_i64();
2420 
2421             gen_address_mask(dc, addr);
2422             tcg_gen_qemu_ld_i64(tmp, addr, da.mem_idx, da.memop | MO_ALIGN);
2423 
2424             /* Note that LE ldda acts as if each 32-bit register
2425                result is byte swapped.  Having just performed one
2426                64-bit bswap, we need now to swap the writebacks.  */
2427             if ((da.memop & MO_BSWAP) == MO_TE) {
2428                 tcg_gen_extr32_i64(lo, hi, tmp);
2429             } else {
2430                 tcg_gen_extr32_i64(hi, lo, tmp);
2431             }
2432         }
2433         break;
2434 
2435     default:
2436         /* ??? In theory we've handled all of the ASIs that are valid
2437            for ldda, and this should raise DAE_invalid_asi.  However,
2438            real hardware allows others.  This can be seen with e.g.
2439            FreeBSD 10.3 wrt ASI_IC_TAG.  */
2440         {
2441             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2442             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2443             TCGv_i64 tmp = tcg_temp_new_i64();
2444 
2445             save_state(dc);
2446             gen_helper_ld_asi(tmp, tcg_env, addr, r_asi, r_mop);
2447 
2448             /* See above.  */
2449             if ((da.memop & MO_BSWAP) == MO_TE) {
2450                 tcg_gen_extr32_i64(lo, hi, tmp);
2451             } else {
2452                 tcg_gen_extr32_i64(hi, lo, tmp);
2453             }
2454         }
2455         break;
2456     }
2457 
2458     gen_store_gpr(dc, rd, hi);
2459     gen_store_gpr(dc, rd + 1, lo);
2460 }
2461 
2462 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2463                          int insn, int rd)
2464 {
2465     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2466     TCGv lo = gen_load_gpr(dc, rd + 1);
2467 
2468     switch (da.type) {
2469     case GET_ASI_EXCP:
2470         break;
2471 
2472     case GET_ASI_DTWINX:
2473         gen_address_mask(dc, addr);
2474         tcg_gen_qemu_st_i64(hi, addr, da.mem_idx, da.memop | MO_ALIGN_16);
2475         tcg_gen_addi_tl(addr, addr, 8);
2476         tcg_gen_qemu_st_i64(lo, addr, da.mem_idx, da.memop);
2477         break;
2478 
2479     case GET_ASI_DIRECT:
2480         {
2481             TCGv_i64 t64 = tcg_temp_new_i64();
2482 
2483             /* Note that LE stda acts as if each 32-bit register result is
2484                byte swapped.  We will perform one 64-bit LE store, so now
2485                we must swap the order of the construction.  */
2486             if ((da.memop & MO_BSWAP) == MO_TE) {
2487                 tcg_gen_concat32_i64(t64, lo, hi);
2488             } else {
2489                 tcg_gen_concat32_i64(t64, hi, lo);
2490             }
2491             gen_address_mask(dc, addr);
2492             tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2493         }
2494         break;
2495 
2496     default:
2497         /* ??? In theory we've handled all of the ASIs that are valid
2498            for stda, and this should raise DAE_invalid_asi.  */
2499         {
2500             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2501             TCGv_i32 r_mop = tcg_constant_i32(da.memop);
2502             TCGv_i64 t64 = tcg_temp_new_i64();
2503 
2504             /* See above.  */
2505             if ((da.memop & MO_BSWAP) == MO_TE) {
2506                 tcg_gen_concat32_i64(t64, lo, hi);
2507             } else {
2508                 tcg_gen_concat32_i64(t64, hi, lo);
2509             }
2510 
2511             save_state(dc);
2512             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2513         }
2514         break;
2515     }
2516 }
2517 
2518 static void gen_casx_asi(DisasContext *dc, TCGv addr, TCGv cmpv,
2519                          int insn, int rd)
2520 {
2521     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2522     TCGv oldv;
2523 
2524     switch (da.type) {
2525     case GET_ASI_EXCP:
2526         return;
2527     case GET_ASI_DIRECT:
2528         oldv = tcg_temp_new();
2529         tcg_gen_atomic_cmpxchg_tl(oldv, addr, cmpv, gen_load_gpr(dc, rd),
2530                                   da.mem_idx, da.memop | MO_ALIGN);
2531         gen_store_gpr(dc, rd, oldv);
2532         break;
2533     default:
2534         /* ??? Should be DAE_invalid_asi.  */
2535         gen_exception(dc, TT_DATA_ACCESS);
2536         break;
2537     }
2538 }
2539 
2540 #elif !defined(CONFIG_USER_ONLY)
2541 static void gen_ldda_asi(DisasContext *dc, TCGv addr, int insn, int rd)
2542 {
2543     /* ??? Work around an apparent bug in Ubuntu gcc 4.8.2-10ubuntu2+12,
2544        whereby "rd + 1" elicits "error: array subscript is above array".
2545        Since we have already asserted that rd is even, the semantics
2546        are unchanged.  */
2547     TCGv lo = gen_dest_gpr(dc, rd | 1);
2548     TCGv hi = gen_dest_gpr(dc, rd);
2549     TCGv_i64 t64 = tcg_temp_new_i64();
2550     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2551 
2552     switch (da.type) {
2553     case GET_ASI_EXCP:
2554         return;
2555     case GET_ASI_DIRECT:
2556         gen_address_mask(dc, addr);
2557         tcg_gen_qemu_ld_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2558         break;
2559     default:
2560         {
2561             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2562             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2563 
2564             save_state(dc);
2565             gen_helper_ld_asi(t64, tcg_env, addr, r_asi, r_mop);
2566         }
2567         break;
2568     }
2569 
2570     tcg_gen_extr_i64_i32(lo, hi, t64);
2571     gen_store_gpr(dc, rd | 1, lo);
2572     gen_store_gpr(dc, rd, hi);
2573 }
2574 
2575 static void gen_stda_asi(DisasContext *dc, TCGv hi, TCGv addr,
2576                          int insn, int rd)
2577 {
2578     DisasASI da = get_asi(dc, insn, MO_TEUQ);
2579     TCGv lo = gen_load_gpr(dc, rd + 1);
2580     TCGv_i64 t64 = tcg_temp_new_i64();
2581 
2582     tcg_gen_concat_tl_i64(t64, lo, hi);
2583 
2584     switch (da.type) {
2585     case GET_ASI_EXCP:
2586         break;
2587     case GET_ASI_DIRECT:
2588         gen_address_mask(dc, addr);
2589         tcg_gen_qemu_st_i64(t64, addr, da.mem_idx, da.memop | MO_ALIGN);
2590         break;
2591     case GET_ASI_BFILL:
2592         /* Store 32 bytes of T64 to ADDR.  */
2593         /* ??? The original qemu code suggests 8-byte alignment, dropping
2594            the low bits, but the only place I can see this used is in the
2595            Linux kernel with 32 byte alignment, which would make more sense
2596            as a cacheline-style operation.  */
2597         {
2598             TCGv d_addr = tcg_temp_new();
2599             TCGv eight = tcg_constant_tl(8);
2600             int i;
2601 
2602             tcg_gen_andi_tl(d_addr, addr, -8);
2603             for (i = 0; i < 32; i += 8) {
2604                 tcg_gen_qemu_st_i64(t64, d_addr, da.mem_idx, da.memop);
2605                 tcg_gen_add_tl(d_addr, d_addr, eight);
2606             }
2607         }
2608         break;
2609     default:
2610         {
2611             TCGv_i32 r_asi = tcg_constant_i32(da.asi);
2612             TCGv_i32 r_mop = tcg_constant_i32(MO_UQ);
2613 
2614             save_state(dc);
2615             gen_helper_st_asi(tcg_env, addr, t64, r_asi, r_mop);
2616         }
2617         break;
2618     }
2619 }
2620 #endif
2621 
2622 static TCGv get_src1(DisasContext *dc, unsigned int insn)
2623 {
2624     unsigned int rs1 = GET_FIELD(insn, 13, 17);
2625     return gen_load_gpr(dc, rs1);
2626 }
2627 
2628 static TCGv get_src2(DisasContext *dc, unsigned int insn)
2629 {
2630     if (IS_IMM) { /* immediate */
2631         target_long simm = GET_FIELDs(insn, 19, 31);
2632         TCGv t = tcg_temp_new();
2633         tcg_gen_movi_tl(t, simm);
2634         return t;
2635     } else {      /* register */
2636         unsigned int rs2 = GET_FIELD(insn, 27, 31);
2637         return gen_load_gpr(dc, rs2);
2638     }
2639 }
2640 
2641 #ifdef TARGET_SPARC64
2642 static void gen_fmovs(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2643 {
2644     TCGv_i32 c32, zero, dst, s1, s2;
2645 
2646     /* We have two choices here: extend the 32 bit data and use movcond_i64,
2647        or fold the comparison down to 32 bits and use movcond_i32.  Choose
2648        the later.  */
2649     c32 = tcg_temp_new_i32();
2650     if (cmp->is_bool) {
2651         tcg_gen_extrl_i64_i32(c32, cmp->c1);
2652     } else {
2653         TCGv_i64 c64 = tcg_temp_new_i64();
2654         tcg_gen_setcond_i64(cmp->cond, c64, cmp->c1, cmp->c2);
2655         tcg_gen_extrl_i64_i32(c32, c64);
2656     }
2657 
2658     s1 = gen_load_fpr_F(dc, rs);
2659     s2 = gen_load_fpr_F(dc, rd);
2660     dst = gen_dest_fpr_F(dc);
2661     zero = tcg_constant_i32(0);
2662 
2663     tcg_gen_movcond_i32(TCG_COND_NE, dst, c32, zero, s1, s2);
2664 
2665     gen_store_fpr_F(dc, rd, dst);
2666 }
2667 
2668 static void gen_fmovd(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2669 {
2670     TCGv_i64 dst = gen_dest_fpr_D(dc, rd);
2671     tcg_gen_movcond_i64(cmp->cond, dst, cmp->c1, cmp->c2,
2672                         gen_load_fpr_D(dc, rs),
2673                         gen_load_fpr_D(dc, rd));
2674     gen_store_fpr_D(dc, rd, dst);
2675 }
2676 
2677 static void gen_fmovq(DisasContext *dc, DisasCompare *cmp, int rd, int rs)
2678 {
2679     int qd = QFPREG(rd);
2680     int qs = QFPREG(rs);
2681 
2682     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2], cmp->c1, cmp->c2,
2683                         cpu_fpr[qs / 2], cpu_fpr[qd / 2]);
2684     tcg_gen_movcond_i64(cmp->cond, cpu_fpr[qd / 2 + 1], cmp->c1, cmp->c2,
2685                         cpu_fpr[qs / 2 + 1], cpu_fpr[qd / 2 + 1]);
2686 
2687     gen_update_fprs_dirty(dc, qd);
2688 }
2689 
2690 #ifndef CONFIG_USER_ONLY
2691 static void gen_load_trap_state_at_tl(TCGv_ptr r_tsptr, TCGv_env tcg_env)
2692 {
2693     TCGv_i32 r_tl = tcg_temp_new_i32();
2694 
2695     /* load env->tl into r_tl */
2696     tcg_gen_ld_i32(r_tl, tcg_env, offsetof(CPUSPARCState, tl));
2697 
2698     /* tl = [0 ... MAXTL_MASK] where MAXTL_MASK must be power of 2 */
2699     tcg_gen_andi_i32(r_tl, r_tl, MAXTL_MASK);
2700 
2701     /* calculate offset to current trap state from env->ts, reuse r_tl */
2702     tcg_gen_muli_i32(r_tl, r_tl, sizeof (trap_state));
2703     tcg_gen_addi_ptr(r_tsptr, tcg_env, offsetof(CPUSPARCState, ts));
2704 
2705     /* tsptr = env->ts[env->tl & MAXTL_MASK] */
2706     {
2707         TCGv_ptr r_tl_tmp = tcg_temp_new_ptr();
2708         tcg_gen_ext_i32_ptr(r_tl_tmp, r_tl);
2709         tcg_gen_add_ptr(r_tsptr, r_tsptr, r_tl_tmp);
2710     }
2711 }
2712 #endif
2713 
2714 static void gen_edge(DisasContext *dc, TCGv dst, TCGv s1, TCGv s2,
2715                      int width, bool cc, bool left)
2716 {
2717     TCGv lo1, lo2;
2718     uint64_t amask, tabl, tabr;
2719     int shift, imask, omask;
2720 
2721     if (cc) {
2722         tcg_gen_mov_tl(cpu_cc_src, s1);
2723         tcg_gen_mov_tl(cpu_cc_src2, s2);
2724         tcg_gen_sub_tl(cpu_cc_dst, s1, s2);
2725         tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
2726         dc->cc_op = CC_OP_SUB;
2727     }
2728 
2729     /* Theory of operation: there are two tables, left and right (not to
2730        be confused with the left and right versions of the opcode).  These
2731        are indexed by the low 3 bits of the inputs.  To make things "easy",
2732        these tables are loaded into two constants, TABL and TABR below.
2733        The operation index = (input & imask) << shift calculates the index
2734        into the constant, while val = (table >> index) & omask calculates
2735        the value we're looking for.  */
2736     switch (width) {
2737     case 8:
2738         imask = 0x7;
2739         shift = 3;
2740         omask = 0xff;
2741         if (left) {
2742             tabl = 0x80c0e0f0f8fcfeffULL;
2743             tabr = 0xff7f3f1f0f070301ULL;
2744         } else {
2745             tabl = 0x0103070f1f3f7fffULL;
2746             tabr = 0xfffefcf8f0e0c080ULL;
2747         }
2748         break;
2749     case 16:
2750         imask = 0x6;
2751         shift = 1;
2752         omask = 0xf;
2753         if (left) {
2754             tabl = 0x8cef;
2755             tabr = 0xf731;
2756         } else {
2757             tabl = 0x137f;
2758             tabr = 0xfec8;
2759         }
2760         break;
2761     case 32:
2762         imask = 0x4;
2763         shift = 0;
2764         omask = 0x3;
2765         if (left) {
2766             tabl = (2 << 2) | 3;
2767             tabr = (3 << 2) | 1;
2768         } else {
2769             tabl = (1 << 2) | 3;
2770             tabr = (3 << 2) | 2;
2771         }
2772         break;
2773     default:
2774         abort();
2775     }
2776 
2777     lo1 = tcg_temp_new();
2778     lo2 = tcg_temp_new();
2779     tcg_gen_andi_tl(lo1, s1, imask);
2780     tcg_gen_andi_tl(lo2, s2, imask);
2781     tcg_gen_shli_tl(lo1, lo1, shift);
2782     tcg_gen_shli_tl(lo2, lo2, shift);
2783 
2784     tcg_gen_shr_tl(lo1, tcg_constant_tl(tabl), lo1);
2785     tcg_gen_shr_tl(lo2, tcg_constant_tl(tabr), lo2);
2786     tcg_gen_andi_tl(lo1, lo1, omask);
2787     tcg_gen_andi_tl(lo2, lo2, omask);
2788 
2789     amask = -8;
2790     if (AM_CHECK(dc)) {
2791         amask &= 0xffffffffULL;
2792     }
2793     tcg_gen_andi_tl(s1, s1, amask);
2794     tcg_gen_andi_tl(s2, s2, amask);
2795 
2796     /* Compute dst = (s1 == s2 ? lo1 : lo1 & lo2). */
2797     tcg_gen_and_tl(lo2, lo2, lo1);
2798     tcg_gen_movcond_tl(TCG_COND_EQ, dst, s1, s2, lo1, lo2);
2799 }
2800 
2801 static void gen_alignaddr(TCGv dst, TCGv s1, TCGv s2, bool left)
2802 {
2803     TCGv tmp = tcg_temp_new();
2804 
2805     tcg_gen_add_tl(tmp, s1, s2);
2806     tcg_gen_andi_tl(dst, tmp, -8);
2807     if (left) {
2808         tcg_gen_neg_tl(tmp, tmp);
2809     }
2810     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, tmp, 0, 3);
2811 }
2812 
2813 static void gen_faligndata(TCGv dst, TCGv gsr, TCGv s1, TCGv s2)
2814 {
2815     TCGv t1, t2, shift;
2816 
2817     t1 = tcg_temp_new();
2818     t2 = tcg_temp_new();
2819     shift = tcg_temp_new();
2820 
2821     tcg_gen_andi_tl(shift, gsr, 7);
2822     tcg_gen_shli_tl(shift, shift, 3);
2823     tcg_gen_shl_tl(t1, s1, shift);
2824 
2825     /* A shift of 64 does not produce 0 in TCG.  Divide this into a
2826        shift of (up to 63) followed by a constant shift of 1.  */
2827     tcg_gen_xori_tl(shift, shift, 63);
2828     tcg_gen_shr_tl(t2, s2, shift);
2829     tcg_gen_shri_tl(t2, t2, 1);
2830 
2831     tcg_gen_or_tl(dst, t1, t2);
2832 }
2833 #endif
2834 
2835 /* Include the auto-generated decoder.  */
2836 #include "decode-insns.c.inc"
2837 
2838 #define TRANS(NAME, AVAIL, FUNC, ...) \
2839     static bool trans_##NAME(DisasContext *dc, arg_##NAME *a) \
2840     { return avail_##AVAIL(dc) && FUNC(dc, __VA_ARGS__); }
2841 
2842 #define avail_ALL(C)      true
2843 #ifdef TARGET_SPARC64
2844 # define avail_32(C)      false
2845 # define avail_64(C)      true
2846 #else
2847 # define avail_32(C)      true
2848 # define avail_64(C)      false
2849 #endif
2850 
2851 /* Default case for non jump instructions. */
2852 static bool advance_pc(DisasContext *dc)
2853 {
2854     if (dc->npc & 3) {
2855         switch (dc->npc) {
2856         case DYNAMIC_PC:
2857         case DYNAMIC_PC_LOOKUP:
2858             dc->pc = dc->npc;
2859             gen_op_next_insn();
2860             break;
2861         case JUMP_PC:
2862             /* we can do a static jump */
2863             gen_branch2(dc, dc->jump_pc[0], dc->jump_pc[1], cpu_cond);
2864             dc->base.is_jmp = DISAS_NORETURN;
2865             break;
2866         default:
2867             g_assert_not_reached();
2868         }
2869     } else {
2870         dc->pc = dc->npc;
2871         dc->npc = dc->npc + 4;
2872     }
2873     return true;
2874 }
2875 
2876 /*
2877  * Major opcodes 00 and 01 -- branches, call, and sethi
2878  */
2879 
2880 static bool advance_jump_uncond_never(DisasContext *dc, bool annul)
2881 {
2882     if (annul) {
2883         dc->pc = dc->npc + 4;
2884         dc->npc = dc->pc + 4;
2885     } else {
2886         dc->pc = dc->npc;
2887         dc->npc = dc->pc + 4;
2888     }
2889     return true;
2890 }
2891 
2892 static bool advance_jump_uncond_always(DisasContext *dc, bool annul,
2893                                        target_ulong dest)
2894 {
2895     if (annul) {
2896         dc->pc = dest;
2897         dc->npc = dest + 4;
2898     } else {
2899         dc->pc = dc->npc;
2900         dc->npc = dest;
2901         tcg_gen_mov_tl(cpu_pc, cpu_npc);
2902     }
2903     return true;
2904 }
2905 
2906 static bool advance_jump_cond(DisasContext *dc, DisasCompare *cmp,
2907                               bool annul, target_ulong dest)
2908 {
2909     target_ulong npc = dc->npc;
2910 
2911     if (annul) {
2912         TCGLabel *l1 = gen_new_label();
2913 
2914         tcg_gen_brcond_tl(tcg_invert_cond(cmp->cond), cmp->c1, cmp->c2, l1);
2915         gen_goto_tb(dc, 0, npc, dest);
2916         gen_set_label(l1);
2917         gen_goto_tb(dc, 1, npc + 4, npc + 8);
2918 
2919         dc->base.is_jmp = DISAS_NORETURN;
2920     } else {
2921         if (npc & 3) {
2922             switch (npc) {
2923             case DYNAMIC_PC:
2924             case DYNAMIC_PC_LOOKUP:
2925                 tcg_gen_mov_tl(cpu_pc, cpu_npc);
2926                 tcg_gen_addi_tl(cpu_npc, cpu_npc, 4);
2927                 tcg_gen_movcond_tl(cmp->cond, cpu_npc,
2928                                    cmp->c1, cmp->c2,
2929                                    tcg_constant_tl(dest), cpu_npc);
2930                 dc->pc = npc;
2931                 break;
2932             default:
2933                 g_assert_not_reached();
2934             }
2935         } else {
2936             dc->pc = npc;
2937             dc->jump_pc[0] = dest;
2938             dc->jump_pc[1] = npc + 4;
2939             dc->npc = JUMP_PC;
2940             if (cmp->is_bool) {
2941                 tcg_gen_mov_tl(cpu_cond, cmp->c1);
2942             } else {
2943                 tcg_gen_setcond_tl(cmp->cond, cpu_cond, cmp->c1, cmp->c2);
2944             }
2945         }
2946     }
2947     return true;
2948 }
2949 
2950 static bool do_bpcc(DisasContext *dc, arg_bcc *a)
2951 {
2952     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2953     DisasCompare cmp;
2954 
2955     switch (a->cond) {
2956     case 0x0:
2957         return advance_jump_uncond_never(dc, a->a);
2958     case 0x8:
2959         return advance_jump_uncond_always(dc, a->a, target);
2960     default:
2961         flush_cond(dc);
2962 
2963         gen_compare(&cmp, a->cc, a->cond, dc);
2964         return advance_jump_cond(dc, &cmp, a->a, target);
2965     }
2966 }
2967 
2968 TRANS(Bicc, ALL, do_bpcc, a)
2969 TRANS(BPcc,  64, do_bpcc, a)
2970 
2971 static bool do_fbpfcc(DisasContext *dc, arg_bcc *a)
2972 {
2973     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2974     DisasCompare cmp;
2975 
2976     if (gen_trap_ifnofpu(dc)) {
2977         return true;
2978     }
2979     switch (a->cond) {
2980     case 0x0:
2981         return advance_jump_uncond_never(dc, a->a);
2982     case 0x8:
2983         return advance_jump_uncond_always(dc, a->a, target);
2984     default:
2985         flush_cond(dc);
2986 
2987         gen_fcompare(&cmp, a->cc, a->cond);
2988         return advance_jump_cond(dc, &cmp, a->a, target);
2989     }
2990 }
2991 
2992 TRANS(FBPfcc,  64, do_fbpfcc, a)
2993 TRANS(FBfcc,  ALL, do_fbpfcc, a)
2994 
2995 static bool trans_BPr(DisasContext *dc, arg_BPr *a)
2996 {
2997     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
2998     DisasCompare cmp;
2999 
3000     if (!avail_64(dc)) {
3001         return false;
3002     }
3003     if (gen_tcg_cond_reg[a->cond] == TCG_COND_NEVER) {
3004         return false;
3005     }
3006 
3007     flush_cond(dc);
3008     gen_compare_reg(&cmp, a->cond, gen_load_gpr(dc, a->rs1));
3009     return advance_jump_cond(dc, &cmp, a->a, target);
3010 }
3011 
3012 static bool trans_CALL(DisasContext *dc, arg_CALL *a)
3013 {
3014     target_long target = address_mask_i(dc, dc->pc + a->i * 4);
3015 
3016     gen_store_gpr(dc, 15, tcg_constant_tl(dc->pc));
3017     gen_mov_pc_npc(dc);
3018     dc->npc = target;
3019     return true;
3020 }
3021 
3022 static bool trans_NCP(DisasContext *dc, arg_NCP *a)
3023 {
3024     /*
3025      * For sparc32, always generate the no-coprocessor exception.
3026      * For sparc64, always generate illegal instruction.
3027      */
3028 #ifdef TARGET_SPARC64
3029     return false;
3030 #else
3031     gen_exception(dc, TT_NCP_INSN);
3032     return true;
3033 #endif
3034 }
3035 
3036 static bool trans_SETHI(DisasContext *dc, arg_SETHI *a)
3037 {
3038     /* Special-case %g0 because that's the canonical nop.  */
3039     if (a->rd) {
3040         gen_store_gpr(dc, a->rd, tcg_constant_tl((uint32_t)a->i << 10));
3041     }
3042     return advance_pc(dc);
3043 }
3044 
3045 static bool do_tcc(DisasContext *dc, int cond, int cc,
3046                    int rs1, bool imm, int rs2_or_imm)
3047 {
3048     int mask = ((dc->def->features & CPU_FEATURE_HYPV) && supervisor(dc)
3049                 ? UA2005_HTRAP_MASK : V8_TRAP_MASK);
3050     DisasCompare cmp;
3051     TCGLabel *lab;
3052     TCGv_i32 trap;
3053 
3054     /* Trap never.  */
3055     if (cond == 0) {
3056         return advance_pc(dc);
3057     }
3058 
3059     /*
3060      * Immediate traps are the most common case.  Since this value is
3061      * live across the branch, it really pays to evaluate the constant.
3062      */
3063     if (rs1 == 0 && (imm || rs2_or_imm == 0)) {
3064         trap = tcg_constant_i32((rs2_or_imm & mask) + TT_TRAP);
3065     } else {
3066         trap = tcg_temp_new_i32();
3067         tcg_gen_trunc_tl_i32(trap, gen_load_gpr(dc, rs1));
3068         if (imm) {
3069             tcg_gen_addi_i32(trap, trap, rs2_or_imm);
3070         } else {
3071             TCGv_i32 t2 = tcg_temp_new_i32();
3072             tcg_gen_trunc_tl_i32(t2, gen_load_gpr(dc, rs2_or_imm));
3073             tcg_gen_add_i32(trap, trap, t2);
3074         }
3075         tcg_gen_andi_i32(trap, trap, mask);
3076         tcg_gen_addi_i32(trap, trap, TT_TRAP);
3077     }
3078 
3079     /* Trap always.  */
3080     if (cond == 8) {
3081         save_state(dc);
3082         gen_helper_raise_exception(tcg_env, trap);
3083         dc->base.is_jmp = DISAS_NORETURN;
3084         return true;
3085     }
3086 
3087     /* Conditional trap.  */
3088     flush_cond(dc);
3089     lab = delay_exceptionv(dc, trap);
3090     gen_compare(&cmp, cc, cond, dc);
3091     tcg_gen_brcond_tl(cmp.cond, cmp.c1, cmp.c2, lab);
3092 
3093     return advance_pc(dc);
3094 }
3095 
3096 static bool trans_Tcc_r(DisasContext *dc, arg_Tcc_r *a)
3097 {
3098     if (avail_32(dc) && a->cc) {
3099         return false;
3100     }
3101     return do_tcc(dc, a->cond, a->cc, a->rs1, false, a->rs2);
3102 }
3103 
3104 static bool trans_Tcc_i_v7(DisasContext *dc, arg_Tcc_i_v7 *a)
3105 {
3106     if (avail_64(dc)) {
3107         return false;
3108     }
3109     return do_tcc(dc, a->cond, 0, a->rs1, true, a->i);
3110 }
3111 
3112 static bool trans_Tcc_i_v9(DisasContext *dc, arg_Tcc_i_v9 *a)
3113 {
3114     if (avail_32(dc)) {
3115         return false;
3116     }
3117     return do_tcc(dc, a->cond, a->cc, a->rs1, true, a->i);
3118 }
3119 
3120 #define CHECK_IU_FEATURE(dc, FEATURE)                      \
3121     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3122         goto illegal_insn;
3123 #define CHECK_FPU_FEATURE(dc, FEATURE)                     \
3124     if (!((dc)->def->features & CPU_FEATURE_ ## FEATURE))  \
3125         goto nfpu_insn;
3126 
3127 /* before an instruction, dc->pc must be static */
3128 static void disas_sparc_legacy(DisasContext *dc, unsigned int insn)
3129 {
3130     unsigned int opc, rs1, rs2, rd;
3131     TCGv cpu_src1, cpu_src2;
3132     TCGv_i32 cpu_src1_32, cpu_src2_32, cpu_dst_32;
3133     TCGv_i64 cpu_src1_64, cpu_src2_64, cpu_dst_64;
3134     target_long simm;
3135 
3136     opc = GET_FIELD(insn, 0, 1);
3137     rd = GET_FIELD(insn, 2, 6);
3138 
3139     switch (opc) {
3140     case 0:
3141         goto illegal_insn; /* in decodetree */
3142     case 1:
3143         g_assert_not_reached(); /* in decodetree */
3144     case 2:                     /* FPU & Logical Operations */
3145         {
3146             unsigned int xop = GET_FIELD(insn, 7, 12);
3147             TCGv cpu_dst = tcg_temp_new();
3148             TCGv cpu_tmp0;
3149 
3150             if (xop == 0x28) {
3151                 rs1 = GET_FIELD(insn, 13, 17);
3152                 switch(rs1) {
3153                 case 0: /* rdy */
3154 #ifndef TARGET_SPARC64
3155                 case 0x01 ... 0x0e: /* undefined in the SPARCv8
3156                                        manual, rdy on the microSPARC
3157                                        II */
3158                 case 0x0f:          /* stbar in the SPARCv8 manual,
3159                                        rdy on the microSPARC II */
3160                 case 0x10 ... 0x1f: /* implementation-dependent in the
3161                                        SPARCv8 manual, rdy on the
3162                                        microSPARC II */
3163                     /* Read Asr17 */
3164                     if (rs1 == 0x11 && dc->def->features & CPU_FEATURE_ASR17) {
3165                         TCGv t = gen_dest_gpr(dc, rd);
3166                         /* Read Asr17 for a Leon3 monoprocessor */
3167                         tcg_gen_movi_tl(t, (1 << 8) | (dc->def->nwindows - 1));
3168                         gen_store_gpr(dc, rd, t);
3169                         break;
3170                     }
3171 #endif
3172                     gen_store_gpr(dc, rd, cpu_y);
3173                     break;
3174 #ifdef TARGET_SPARC64
3175                 case 0x2: /* V9 rdccr */
3176                     update_psr(dc);
3177                     gen_helper_rdccr(cpu_dst, tcg_env);
3178                     gen_store_gpr(dc, rd, cpu_dst);
3179                     break;
3180                 case 0x3: /* V9 rdasi */
3181                     tcg_gen_movi_tl(cpu_dst, dc->asi);
3182                     gen_store_gpr(dc, rd, cpu_dst);
3183                     break;
3184                 case 0x4: /* V9 rdtick */
3185                     {
3186                         TCGv_ptr r_tickptr;
3187                         TCGv_i32 r_const;
3188 
3189                         r_tickptr = tcg_temp_new_ptr();
3190                         r_const = tcg_constant_i32(dc->mem_idx);
3191                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3192                                        offsetof(CPUSPARCState, tick));
3193                         if (translator_io_start(&dc->base)) {
3194                             dc->base.is_jmp = DISAS_EXIT;
3195                         }
3196                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3197                                                   r_const);
3198                         gen_store_gpr(dc, rd, cpu_dst);
3199                     }
3200                     break;
3201                 case 0x5: /* V9 rdpc */
3202                     {
3203                         TCGv t = gen_dest_gpr(dc, rd);
3204                         if (unlikely(AM_CHECK(dc))) {
3205                             tcg_gen_movi_tl(t, dc->pc & 0xffffffffULL);
3206                         } else {
3207                             tcg_gen_movi_tl(t, dc->pc);
3208                         }
3209                         gen_store_gpr(dc, rd, t);
3210                     }
3211                     break;
3212                 case 0x6: /* V9 rdfprs */
3213                     tcg_gen_ext_i32_tl(cpu_dst, cpu_fprs);
3214                     gen_store_gpr(dc, rd, cpu_dst);
3215                     break;
3216                 case 0xf: /* V9 membar */
3217                     break; /* no effect */
3218                 case 0x13: /* Graphics Status */
3219                     if (gen_trap_ifnofpu(dc)) {
3220                         goto jmp_insn;
3221                     }
3222                     gen_store_gpr(dc, rd, cpu_gsr);
3223                     break;
3224                 case 0x16: /* Softint */
3225                     tcg_gen_ld32s_tl(cpu_dst, tcg_env,
3226                                      offsetof(CPUSPARCState, softint));
3227                     gen_store_gpr(dc, rd, cpu_dst);
3228                     break;
3229                 case 0x17: /* Tick compare */
3230                     gen_store_gpr(dc, rd, cpu_tick_cmpr);
3231                     break;
3232                 case 0x18: /* System tick */
3233                     {
3234                         TCGv_ptr r_tickptr;
3235                         TCGv_i32 r_const;
3236 
3237                         r_tickptr = tcg_temp_new_ptr();
3238                         r_const = tcg_constant_i32(dc->mem_idx);
3239                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3240                                        offsetof(CPUSPARCState, stick));
3241                         if (translator_io_start(&dc->base)) {
3242                             dc->base.is_jmp = DISAS_EXIT;
3243                         }
3244                         gen_helper_tick_get_count(cpu_dst, tcg_env, r_tickptr,
3245                                                   r_const);
3246                         gen_store_gpr(dc, rd, cpu_dst);
3247                     }
3248                     break;
3249                 case 0x19: /* System tick compare */
3250                     gen_store_gpr(dc, rd, cpu_stick_cmpr);
3251                     break;
3252                 case 0x1a: /* UltraSPARC-T1 Strand status */
3253                     /* XXX HYPV check maybe not enough, UA2005 & UA2007 describe
3254                      * this ASR as impl. dep
3255                      */
3256                     CHECK_IU_FEATURE(dc, HYPV);
3257                     {
3258                         TCGv t = gen_dest_gpr(dc, rd);
3259                         tcg_gen_movi_tl(t, 1UL);
3260                         gen_store_gpr(dc, rd, t);
3261                     }
3262                     break;
3263                 case 0x10: /* Performance Control */
3264                 case 0x11: /* Performance Instrumentation Counter */
3265                 case 0x12: /* Dispatch Control */
3266                 case 0x14: /* Softint set, WO */
3267                 case 0x15: /* Softint clear, WO */
3268 #endif
3269                 default:
3270                     goto illegal_insn;
3271                 }
3272 #if !defined(CONFIG_USER_ONLY)
3273             } else if (xop == 0x29) { /* rdpsr / UA2005 rdhpr */
3274 #ifndef TARGET_SPARC64
3275                 if (!supervisor(dc)) {
3276                     goto priv_insn;
3277                 }
3278                 update_psr(dc);
3279                 gen_helper_rdpsr(cpu_dst, tcg_env);
3280 #else
3281                 CHECK_IU_FEATURE(dc, HYPV);
3282                 if (!hypervisor(dc))
3283                     goto priv_insn;
3284                 rs1 = GET_FIELD(insn, 13, 17);
3285                 switch (rs1) {
3286                 case 0: // hpstate
3287                     tcg_gen_ld_i64(cpu_dst, tcg_env,
3288                                    offsetof(CPUSPARCState, hpstate));
3289                     break;
3290                 case 1: // htstate
3291                     // gen_op_rdhtstate();
3292                     break;
3293                 case 3: // hintp
3294                     tcg_gen_mov_tl(cpu_dst, cpu_hintp);
3295                     break;
3296                 case 5: // htba
3297                     tcg_gen_mov_tl(cpu_dst, cpu_htba);
3298                     break;
3299                 case 6: // hver
3300                     tcg_gen_mov_tl(cpu_dst, cpu_hver);
3301                     break;
3302                 case 31: // hstick_cmpr
3303                     tcg_gen_mov_tl(cpu_dst, cpu_hstick_cmpr);
3304                     break;
3305                 default:
3306                     goto illegal_insn;
3307                 }
3308 #endif
3309                 gen_store_gpr(dc, rd, cpu_dst);
3310                 break;
3311             } else if (xop == 0x2a) { /* rdwim / V9 rdpr */
3312                 if (!supervisor(dc)) {
3313                     goto priv_insn;
3314                 }
3315                 cpu_tmp0 = tcg_temp_new();
3316 #ifdef TARGET_SPARC64
3317                 rs1 = GET_FIELD(insn, 13, 17);
3318                 switch (rs1) {
3319                 case 0: // tpc
3320                     {
3321                         TCGv_ptr r_tsptr;
3322 
3323                         r_tsptr = tcg_temp_new_ptr();
3324                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3325                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3326                                       offsetof(trap_state, tpc));
3327                     }
3328                     break;
3329                 case 1: // tnpc
3330                     {
3331                         TCGv_ptr r_tsptr;
3332 
3333                         r_tsptr = tcg_temp_new_ptr();
3334                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3335                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3336                                       offsetof(trap_state, tnpc));
3337                     }
3338                     break;
3339                 case 2: // tstate
3340                     {
3341                         TCGv_ptr r_tsptr;
3342 
3343                         r_tsptr = tcg_temp_new_ptr();
3344                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3345                         tcg_gen_ld_tl(cpu_tmp0, r_tsptr,
3346                                       offsetof(trap_state, tstate));
3347                     }
3348                     break;
3349                 case 3: // tt
3350                     {
3351                         TCGv_ptr r_tsptr = tcg_temp_new_ptr();
3352 
3353                         gen_load_trap_state_at_tl(r_tsptr, tcg_env);
3354                         tcg_gen_ld32s_tl(cpu_tmp0, r_tsptr,
3355                                          offsetof(trap_state, tt));
3356                     }
3357                     break;
3358                 case 4: // tick
3359                     {
3360                         TCGv_ptr r_tickptr;
3361                         TCGv_i32 r_const;
3362 
3363                         r_tickptr = tcg_temp_new_ptr();
3364                         r_const = tcg_constant_i32(dc->mem_idx);
3365                         tcg_gen_ld_ptr(r_tickptr, tcg_env,
3366                                        offsetof(CPUSPARCState, tick));
3367                         if (translator_io_start(&dc->base)) {
3368                             dc->base.is_jmp = DISAS_EXIT;
3369                         }
3370                         gen_helper_tick_get_count(cpu_tmp0, tcg_env,
3371                                                   r_tickptr, r_const);
3372                     }
3373                     break;
3374                 case 5: // tba
3375                     tcg_gen_mov_tl(cpu_tmp0, cpu_tbr);
3376                     break;
3377                 case 6: // pstate
3378                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3379                                      offsetof(CPUSPARCState, pstate));
3380                     break;
3381                 case 7: // tl
3382                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3383                                      offsetof(CPUSPARCState, tl));
3384                     break;
3385                 case 8: // pil
3386                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3387                                      offsetof(CPUSPARCState, psrpil));
3388                     break;
3389                 case 9: // cwp
3390                     gen_helper_rdcwp(cpu_tmp0, tcg_env);
3391                     break;
3392                 case 10: // cansave
3393                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3394                                      offsetof(CPUSPARCState, cansave));
3395                     break;
3396                 case 11: // canrestore
3397                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3398                                      offsetof(CPUSPARCState, canrestore));
3399                     break;
3400                 case 12: // cleanwin
3401                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3402                                      offsetof(CPUSPARCState, cleanwin));
3403                     break;
3404                 case 13: // otherwin
3405                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3406                                      offsetof(CPUSPARCState, otherwin));
3407                     break;
3408                 case 14: // wstate
3409                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3410                                      offsetof(CPUSPARCState, wstate));
3411                     break;
3412                 case 16: // UA2005 gl
3413                     CHECK_IU_FEATURE(dc, GL);
3414                     tcg_gen_ld32s_tl(cpu_tmp0, tcg_env,
3415                                      offsetof(CPUSPARCState, gl));
3416                     break;
3417                 case 26: // UA2005 strand status
3418                     CHECK_IU_FEATURE(dc, HYPV);
3419                     if (!hypervisor(dc))
3420                         goto priv_insn;
3421                     tcg_gen_mov_tl(cpu_tmp0, cpu_ssr);
3422                     break;
3423                 case 31: // ver
3424                     tcg_gen_mov_tl(cpu_tmp0, cpu_ver);
3425                     break;
3426                 case 15: // fq
3427                 default:
3428                     goto illegal_insn;
3429                 }
3430 #else
3431                 tcg_gen_ext_i32_tl(cpu_tmp0, cpu_wim);
3432 #endif
3433                 gen_store_gpr(dc, rd, cpu_tmp0);
3434                 break;
3435 #endif
3436 #if defined(TARGET_SPARC64) || !defined(CONFIG_USER_ONLY)
3437             } else if (xop == 0x2b) { /* rdtbr / V9 flushw */
3438 #ifdef TARGET_SPARC64
3439                 gen_helper_flushw(tcg_env);
3440 #else
3441                 if (!supervisor(dc))
3442                     goto priv_insn;
3443                 gen_store_gpr(dc, rd, cpu_tbr);
3444 #endif
3445                 break;
3446 #endif
3447             } else if (xop == 0x34) {   /* FPU Operations */
3448                 if (gen_trap_ifnofpu(dc)) {
3449                     goto jmp_insn;
3450                 }
3451                 gen_op_clear_ieee_excp_and_FTT();
3452                 rs1 = GET_FIELD(insn, 13, 17);
3453                 rs2 = GET_FIELD(insn, 27, 31);
3454                 xop = GET_FIELD(insn, 18, 26);
3455 
3456                 switch (xop) {
3457                 case 0x1: /* fmovs */
3458                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
3459                     gen_store_fpr_F(dc, rd, cpu_src1_32);
3460                     break;
3461                 case 0x5: /* fnegs */
3462                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fnegs);
3463                     break;
3464                 case 0x9: /* fabss */
3465                     gen_ne_fop_FF(dc, rd, rs2, gen_helper_fabss);
3466                     break;
3467                 case 0x29: /* fsqrts */
3468                     gen_fop_FF(dc, rd, rs2, gen_helper_fsqrts);
3469                     break;
3470                 case 0x2a: /* fsqrtd */
3471                     gen_fop_DD(dc, rd, rs2, gen_helper_fsqrtd);
3472                     break;
3473                 case 0x2b: /* fsqrtq */
3474                     CHECK_FPU_FEATURE(dc, FLOAT128);
3475                     gen_fop_QQ(dc, rd, rs2, gen_helper_fsqrtq);
3476                     break;
3477                 case 0x41: /* fadds */
3478                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fadds);
3479                     break;
3480                 case 0x42: /* faddd */
3481                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_faddd);
3482                     break;
3483                 case 0x43: /* faddq */
3484                     CHECK_FPU_FEATURE(dc, FLOAT128);
3485                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_faddq);
3486                     break;
3487                 case 0x45: /* fsubs */
3488                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fsubs);
3489                     break;
3490                 case 0x46: /* fsubd */
3491                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fsubd);
3492                     break;
3493                 case 0x47: /* fsubq */
3494                     CHECK_FPU_FEATURE(dc, FLOAT128);
3495                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fsubq);
3496                     break;
3497                 case 0x49: /* fmuls */
3498                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fmuls);
3499                     break;
3500                 case 0x4a: /* fmuld */
3501                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld);
3502                     break;
3503                 case 0x4b: /* fmulq */
3504                     CHECK_FPU_FEATURE(dc, FLOAT128);
3505                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fmulq);
3506                     break;
3507                 case 0x4d: /* fdivs */
3508                     gen_fop_FFF(dc, rd, rs1, rs2, gen_helper_fdivs);
3509                     break;
3510                 case 0x4e: /* fdivd */
3511                     gen_fop_DDD(dc, rd, rs1, rs2, gen_helper_fdivd);
3512                     break;
3513                 case 0x4f: /* fdivq */
3514                     CHECK_FPU_FEATURE(dc, FLOAT128);
3515                     gen_fop_QQQ(dc, rd, rs1, rs2, gen_helper_fdivq);
3516                     break;
3517                 case 0x69: /* fsmuld */
3518                     CHECK_FPU_FEATURE(dc, FSMULD);
3519                     gen_fop_DFF(dc, rd, rs1, rs2, gen_helper_fsmuld);
3520                     break;
3521                 case 0x6e: /* fdmulq */
3522                     CHECK_FPU_FEATURE(dc, FLOAT128);
3523                     gen_fop_QDD(dc, rd, rs1, rs2, gen_helper_fdmulq);
3524                     break;
3525                 case 0xc4: /* fitos */
3526                     gen_fop_FF(dc, rd, rs2, gen_helper_fitos);
3527                     break;
3528                 case 0xc6: /* fdtos */
3529                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtos);
3530                     break;
3531                 case 0xc7: /* fqtos */
3532                     CHECK_FPU_FEATURE(dc, FLOAT128);
3533                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtos);
3534                     break;
3535                 case 0xc8: /* fitod */
3536                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fitod);
3537                     break;
3538                 case 0xc9: /* fstod */
3539                     gen_ne_fop_DF(dc, rd, rs2, gen_helper_fstod);
3540                     break;
3541                 case 0xcb: /* fqtod */
3542                     CHECK_FPU_FEATURE(dc, FLOAT128);
3543                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtod);
3544                     break;
3545                 case 0xcc: /* fitoq */
3546                     CHECK_FPU_FEATURE(dc, FLOAT128);
3547                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fitoq);
3548                     break;
3549                 case 0xcd: /* fstoq */
3550                     CHECK_FPU_FEATURE(dc, FLOAT128);
3551                     gen_ne_fop_QF(dc, rd, rs2, gen_helper_fstoq);
3552                     break;
3553                 case 0xce: /* fdtoq */
3554                     CHECK_FPU_FEATURE(dc, FLOAT128);
3555                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fdtoq);
3556                     break;
3557                 case 0xd1: /* fstoi */
3558                     gen_fop_FF(dc, rd, rs2, gen_helper_fstoi);
3559                     break;
3560                 case 0xd2: /* fdtoi */
3561                     gen_fop_FD(dc, rd, rs2, gen_helper_fdtoi);
3562                     break;
3563                 case 0xd3: /* fqtoi */
3564                     CHECK_FPU_FEATURE(dc, FLOAT128);
3565                     gen_fop_FQ(dc, rd, rs2, gen_helper_fqtoi);
3566                     break;
3567 #ifdef TARGET_SPARC64
3568                 case 0x2: /* V9 fmovd */
3569                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
3570                     gen_store_fpr_D(dc, rd, cpu_src1_64);
3571                     break;
3572                 case 0x3: /* V9 fmovq */
3573                     CHECK_FPU_FEATURE(dc, FLOAT128);
3574                     gen_move_Q(dc, rd, rs2);
3575                     break;
3576                 case 0x6: /* V9 fnegd */
3577                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fnegd);
3578                     break;
3579                 case 0x7: /* V9 fnegq */
3580                     CHECK_FPU_FEATURE(dc, FLOAT128);
3581                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fnegq);
3582                     break;
3583                 case 0xa: /* V9 fabsd */
3584                     gen_ne_fop_DD(dc, rd, rs2, gen_helper_fabsd);
3585                     break;
3586                 case 0xb: /* V9 fabsq */
3587                     CHECK_FPU_FEATURE(dc, FLOAT128);
3588                     gen_ne_fop_QQ(dc, rd, rs2, gen_helper_fabsq);
3589                     break;
3590                 case 0x81: /* V9 fstox */
3591                     gen_fop_DF(dc, rd, rs2, gen_helper_fstox);
3592                     break;
3593                 case 0x82: /* V9 fdtox */
3594                     gen_fop_DD(dc, rd, rs2, gen_helper_fdtox);
3595                     break;
3596                 case 0x83: /* V9 fqtox */
3597                     CHECK_FPU_FEATURE(dc, FLOAT128);
3598                     gen_fop_DQ(dc, rd, rs2, gen_helper_fqtox);
3599                     break;
3600                 case 0x84: /* V9 fxtos */
3601                     gen_fop_FD(dc, rd, rs2, gen_helper_fxtos);
3602                     break;
3603                 case 0x88: /* V9 fxtod */
3604                     gen_fop_DD(dc, rd, rs2, gen_helper_fxtod);
3605                     break;
3606                 case 0x8c: /* V9 fxtoq */
3607                     CHECK_FPU_FEATURE(dc, FLOAT128);
3608                     gen_ne_fop_QD(dc, rd, rs2, gen_helper_fxtoq);
3609                     break;
3610 #endif
3611                 default:
3612                     goto illegal_insn;
3613                 }
3614             } else if (xop == 0x35) {   /* FPU Operations */
3615 #ifdef TARGET_SPARC64
3616                 int cond;
3617 #endif
3618                 if (gen_trap_ifnofpu(dc)) {
3619                     goto jmp_insn;
3620                 }
3621                 gen_op_clear_ieee_excp_and_FTT();
3622                 rs1 = GET_FIELD(insn, 13, 17);
3623                 rs2 = GET_FIELD(insn, 27, 31);
3624                 xop = GET_FIELD(insn, 18, 26);
3625 
3626 #ifdef TARGET_SPARC64
3627 #define FMOVR(sz)                                                  \
3628                 do {                                               \
3629                     DisasCompare cmp;                              \
3630                     cond = GET_FIELD_SP(insn, 10, 12);             \
3631                     cpu_src1 = get_src1(dc, insn);                 \
3632                     gen_compare_reg(&cmp, cond, cpu_src1);         \
3633                     gen_fmov##sz(dc, &cmp, rd, rs2);               \
3634                 } while (0)
3635 
3636                 if ((xop & 0x11f) == 0x005) { /* V9 fmovsr */
3637                     FMOVR(s);
3638                     break;
3639                 } else if ((xop & 0x11f) == 0x006) { // V9 fmovdr
3640                     FMOVR(d);
3641                     break;
3642                 } else if ((xop & 0x11f) == 0x007) { // V9 fmovqr
3643                     CHECK_FPU_FEATURE(dc, FLOAT128);
3644                     FMOVR(q);
3645                     break;
3646                 }
3647 #undef FMOVR
3648 #endif
3649                 switch (xop) {
3650 #ifdef TARGET_SPARC64
3651 #define FMOVCC(fcc, sz)                                                 \
3652                     do {                                                \
3653                         DisasCompare cmp;                               \
3654                         cond = GET_FIELD_SP(insn, 14, 17);              \
3655                         gen_fcompare(&cmp, fcc, cond);                  \
3656                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3657                     } while (0)
3658 
3659                     case 0x001: /* V9 fmovscc %fcc0 */
3660                         FMOVCC(0, s);
3661                         break;
3662                     case 0x002: /* V9 fmovdcc %fcc0 */
3663                         FMOVCC(0, d);
3664                         break;
3665                     case 0x003: /* V9 fmovqcc %fcc0 */
3666                         CHECK_FPU_FEATURE(dc, FLOAT128);
3667                         FMOVCC(0, q);
3668                         break;
3669                     case 0x041: /* V9 fmovscc %fcc1 */
3670                         FMOVCC(1, s);
3671                         break;
3672                     case 0x042: /* V9 fmovdcc %fcc1 */
3673                         FMOVCC(1, d);
3674                         break;
3675                     case 0x043: /* V9 fmovqcc %fcc1 */
3676                         CHECK_FPU_FEATURE(dc, FLOAT128);
3677                         FMOVCC(1, q);
3678                         break;
3679                     case 0x081: /* V9 fmovscc %fcc2 */
3680                         FMOVCC(2, s);
3681                         break;
3682                     case 0x082: /* V9 fmovdcc %fcc2 */
3683                         FMOVCC(2, d);
3684                         break;
3685                     case 0x083: /* V9 fmovqcc %fcc2 */
3686                         CHECK_FPU_FEATURE(dc, FLOAT128);
3687                         FMOVCC(2, q);
3688                         break;
3689                     case 0x0c1: /* V9 fmovscc %fcc3 */
3690                         FMOVCC(3, s);
3691                         break;
3692                     case 0x0c2: /* V9 fmovdcc %fcc3 */
3693                         FMOVCC(3, d);
3694                         break;
3695                     case 0x0c3: /* V9 fmovqcc %fcc3 */
3696                         CHECK_FPU_FEATURE(dc, FLOAT128);
3697                         FMOVCC(3, q);
3698                         break;
3699 #undef FMOVCC
3700 #define FMOVCC(xcc, sz)                                                 \
3701                     do {                                                \
3702                         DisasCompare cmp;                               \
3703                         cond = GET_FIELD_SP(insn, 14, 17);              \
3704                         gen_compare(&cmp, xcc, cond, dc);               \
3705                         gen_fmov##sz(dc, &cmp, rd, rs2);                \
3706                     } while (0)
3707 
3708                     case 0x101: /* V9 fmovscc %icc */
3709                         FMOVCC(0, s);
3710                         break;
3711                     case 0x102: /* V9 fmovdcc %icc */
3712                         FMOVCC(0, d);
3713                         break;
3714                     case 0x103: /* V9 fmovqcc %icc */
3715                         CHECK_FPU_FEATURE(dc, FLOAT128);
3716                         FMOVCC(0, q);
3717                         break;
3718                     case 0x181: /* V9 fmovscc %xcc */
3719                         FMOVCC(1, s);
3720                         break;
3721                     case 0x182: /* V9 fmovdcc %xcc */
3722                         FMOVCC(1, d);
3723                         break;
3724                     case 0x183: /* V9 fmovqcc %xcc */
3725                         CHECK_FPU_FEATURE(dc, FLOAT128);
3726                         FMOVCC(1, q);
3727                         break;
3728 #undef FMOVCC
3729 #endif
3730                     case 0x51: /* fcmps, V9 %fcc */
3731                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3732                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3733                         gen_op_fcmps(rd & 3, cpu_src1_32, cpu_src2_32);
3734                         break;
3735                     case 0x52: /* fcmpd, V9 %fcc */
3736                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3737                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3738                         gen_op_fcmpd(rd & 3, cpu_src1_64, cpu_src2_64);
3739                         break;
3740                     case 0x53: /* fcmpq, V9 %fcc */
3741                         CHECK_FPU_FEATURE(dc, FLOAT128);
3742                         gen_op_load_fpr_QT0(QFPREG(rs1));
3743                         gen_op_load_fpr_QT1(QFPREG(rs2));
3744                         gen_op_fcmpq(rd & 3);
3745                         break;
3746                     case 0x55: /* fcmpes, V9 %fcc */
3747                         cpu_src1_32 = gen_load_fpr_F(dc, rs1);
3748                         cpu_src2_32 = gen_load_fpr_F(dc, rs2);
3749                         gen_op_fcmpes(rd & 3, cpu_src1_32, cpu_src2_32);
3750                         break;
3751                     case 0x56: /* fcmped, V9 %fcc */
3752                         cpu_src1_64 = gen_load_fpr_D(dc, rs1);
3753                         cpu_src2_64 = gen_load_fpr_D(dc, rs2);
3754                         gen_op_fcmped(rd & 3, cpu_src1_64, cpu_src2_64);
3755                         break;
3756                     case 0x57: /* fcmpeq, V9 %fcc */
3757                         CHECK_FPU_FEATURE(dc, FLOAT128);
3758                         gen_op_load_fpr_QT0(QFPREG(rs1));
3759                         gen_op_load_fpr_QT1(QFPREG(rs2));
3760                         gen_op_fcmpeq(rd & 3);
3761                         break;
3762                     default:
3763                         goto illegal_insn;
3764                 }
3765             } else if (xop == 0x2) {
3766                 TCGv dst = gen_dest_gpr(dc, rd);
3767                 rs1 = GET_FIELD(insn, 13, 17);
3768                 if (rs1 == 0) {
3769                     /* clr/mov shortcut : or %g0, x, y -> mov x, y */
3770                     if (IS_IMM) {       /* immediate */
3771                         simm = GET_FIELDs(insn, 19, 31);
3772                         tcg_gen_movi_tl(dst, simm);
3773                         gen_store_gpr(dc, rd, dst);
3774                     } else {            /* register */
3775                         rs2 = GET_FIELD(insn, 27, 31);
3776                         if (rs2 == 0) {
3777                             tcg_gen_movi_tl(dst, 0);
3778                             gen_store_gpr(dc, rd, dst);
3779                         } else {
3780                             cpu_src2 = gen_load_gpr(dc, rs2);
3781                             gen_store_gpr(dc, rd, cpu_src2);
3782                         }
3783                     }
3784                 } else {
3785                     cpu_src1 = get_src1(dc, insn);
3786                     if (IS_IMM) {       /* immediate */
3787                         simm = GET_FIELDs(insn, 19, 31);
3788                         tcg_gen_ori_tl(dst, cpu_src1, simm);
3789                         gen_store_gpr(dc, rd, dst);
3790                     } else {            /* register */
3791                         rs2 = GET_FIELD(insn, 27, 31);
3792                         if (rs2 == 0) {
3793                             /* mov shortcut:  or x, %g0, y -> mov x, y */
3794                             gen_store_gpr(dc, rd, cpu_src1);
3795                         } else {
3796                             cpu_src2 = gen_load_gpr(dc, rs2);
3797                             tcg_gen_or_tl(dst, cpu_src1, cpu_src2);
3798                             gen_store_gpr(dc, rd, dst);
3799                         }
3800                     }
3801                 }
3802 #ifdef TARGET_SPARC64
3803             } else if (xop == 0x25) { /* sll, V9 sllx */
3804                 cpu_src1 = get_src1(dc, insn);
3805                 if (IS_IMM) {   /* immediate */
3806                     simm = GET_FIELDs(insn, 20, 31);
3807                     if (insn & (1 << 12)) {
3808                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x3f);
3809                     } else {
3810                         tcg_gen_shli_i64(cpu_dst, cpu_src1, simm & 0x1f);
3811                     }
3812                 } else {                /* register */
3813                     rs2 = GET_FIELD(insn, 27, 31);
3814                     cpu_src2 = gen_load_gpr(dc, rs2);
3815                     cpu_tmp0 = tcg_temp_new();
3816                     if (insn & (1 << 12)) {
3817                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3818                     } else {
3819                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3820                     }
3821                     tcg_gen_shl_i64(cpu_dst, cpu_src1, cpu_tmp0);
3822                 }
3823                 gen_store_gpr(dc, rd, cpu_dst);
3824             } else if (xop == 0x26) { /* srl, V9 srlx */
3825                 cpu_src1 = get_src1(dc, insn);
3826                 if (IS_IMM) {   /* immediate */
3827                     simm = GET_FIELDs(insn, 20, 31);
3828                     if (insn & (1 << 12)) {
3829                         tcg_gen_shri_i64(cpu_dst, cpu_src1, simm & 0x3f);
3830                     } else {
3831                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3832                         tcg_gen_shri_i64(cpu_dst, cpu_dst, simm & 0x1f);
3833                     }
3834                 } else {                /* register */
3835                     rs2 = GET_FIELD(insn, 27, 31);
3836                     cpu_src2 = gen_load_gpr(dc, rs2);
3837                     cpu_tmp0 = tcg_temp_new();
3838                     if (insn & (1 << 12)) {
3839                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3840                         tcg_gen_shr_i64(cpu_dst, cpu_src1, cpu_tmp0);
3841                     } else {
3842                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3843                         tcg_gen_andi_i64(cpu_dst, cpu_src1, 0xffffffffULL);
3844                         tcg_gen_shr_i64(cpu_dst, cpu_dst, cpu_tmp0);
3845                     }
3846                 }
3847                 gen_store_gpr(dc, rd, cpu_dst);
3848             } else if (xop == 0x27) { /* sra, V9 srax */
3849                 cpu_src1 = get_src1(dc, insn);
3850                 if (IS_IMM) {   /* immediate */
3851                     simm = GET_FIELDs(insn, 20, 31);
3852                     if (insn & (1 << 12)) {
3853                         tcg_gen_sari_i64(cpu_dst, cpu_src1, simm & 0x3f);
3854                     } else {
3855                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3856                         tcg_gen_sari_i64(cpu_dst, cpu_dst, simm & 0x1f);
3857                     }
3858                 } else {                /* register */
3859                     rs2 = GET_FIELD(insn, 27, 31);
3860                     cpu_src2 = gen_load_gpr(dc, rs2);
3861                     cpu_tmp0 = tcg_temp_new();
3862                     if (insn & (1 << 12)) {
3863                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x3f);
3864                         tcg_gen_sar_i64(cpu_dst, cpu_src1, cpu_tmp0);
3865                     } else {
3866                         tcg_gen_andi_i64(cpu_tmp0, cpu_src2, 0x1f);
3867                         tcg_gen_ext32s_i64(cpu_dst, cpu_src1);
3868                         tcg_gen_sar_i64(cpu_dst, cpu_dst, cpu_tmp0);
3869                     }
3870                 }
3871                 gen_store_gpr(dc, rd, cpu_dst);
3872 #endif
3873             } else if (xop < 0x36) {
3874                 if (xop < 0x20) {
3875                     cpu_src1 = get_src1(dc, insn);
3876                     cpu_src2 = get_src2(dc, insn);
3877                     switch (xop & ~0x10) {
3878                     case 0x0: /* add */
3879                         if (xop & 0x10) {
3880                             gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
3881                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
3882                             dc->cc_op = CC_OP_ADD;
3883                         } else {
3884                             tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
3885                         }
3886                         break;
3887                     case 0x1: /* and */
3888                         tcg_gen_and_tl(cpu_dst, cpu_src1, cpu_src2);
3889                         if (xop & 0x10) {
3890                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3891                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3892                             dc->cc_op = CC_OP_LOGIC;
3893                         }
3894                         break;
3895                     case 0x2: /* or */
3896                         tcg_gen_or_tl(cpu_dst, cpu_src1, cpu_src2);
3897                         if (xop & 0x10) {
3898                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3899                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3900                             dc->cc_op = CC_OP_LOGIC;
3901                         }
3902                         break;
3903                     case 0x3: /* xor */
3904                         tcg_gen_xor_tl(cpu_dst, cpu_src1, cpu_src2);
3905                         if (xop & 0x10) {
3906                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3907                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3908                             dc->cc_op = CC_OP_LOGIC;
3909                         }
3910                         break;
3911                     case 0x4: /* sub */
3912                         if (xop & 0x10) {
3913                             gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
3914                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_SUB);
3915                             dc->cc_op = CC_OP_SUB;
3916                         } else {
3917                             tcg_gen_sub_tl(cpu_dst, cpu_src1, cpu_src2);
3918                         }
3919                         break;
3920                     case 0x5: /* andn */
3921                         tcg_gen_andc_tl(cpu_dst, cpu_src1, cpu_src2);
3922                         if (xop & 0x10) {
3923                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3924                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3925                             dc->cc_op = CC_OP_LOGIC;
3926                         }
3927                         break;
3928                     case 0x6: /* orn */
3929                         tcg_gen_orc_tl(cpu_dst, cpu_src1, cpu_src2);
3930                         if (xop & 0x10) {
3931                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3932                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3933                             dc->cc_op = CC_OP_LOGIC;
3934                         }
3935                         break;
3936                     case 0x7: /* xorn */
3937                         tcg_gen_eqv_tl(cpu_dst, cpu_src1, cpu_src2);
3938                         if (xop & 0x10) {
3939                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3940                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3941                             dc->cc_op = CC_OP_LOGIC;
3942                         }
3943                         break;
3944                     case 0x8: /* addx, V9 addc */
3945                         gen_op_addx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3946                                         (xop & 0x10));
3947                         break;
3948 #ifdef TARGET_SPARC64
3949                     case 0x9: /* V9 mulx */
3950                         tcg_gen_mul_i64(cpu_dst, cpu_src1, cpu_src2);
3951                         break;
3952 #endif
3953                     case 0xa: /* umul */
3954                         CHECK_IU_FEATURE(dc, MUL);
3955                         gen_op_umul(cpu_dst, cpu_src1, cpu_src2);
3956                         if (xop & 0x10) {
3957                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3958                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3959                             dc->cc_op = CC_OP_LOGIC;
3960                         }
3961                         break;
3962                     case 0xb: /* smul */
3963                         CHECK_IU_FEATURE(dc, MUL);
3964                         gen_op_smul(cpu_dst, cpu_src1, cpu_src2);
3965                         if (xop & 0x10) {
3966                             tcg_gen_mov_tl(cpu_cc_dst, cpu_dst);
3967                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_LOGIC);
3968                             dc->cc_op = CC_OP_LOGIC;
3969                         }
3970                         break;
3971                     case 0xc: /* subx, V9 subc */
3972                         gen_op_subx_int(dc, cpu_dst, cpu_src1, cpu_src2,
3973                                         (xop & 0x10));
3974                         break;
3975 #ifdef TARGET_SPARC64
3976                     case 0xd: /* V9 udivx */
3977                         gen_helper_udivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
3978                         break;
3979 #endif
3980                     case 0xe: /* udiv */
3981                         CHECK_IU_FEATURE(dc, DIV);
3982                         if (xop & 0x10) {
3983                             gen_helper_udiv_cc(cpu_dst, tcg_env, cpu_src1,
3984                                                cpu_src2);
3985                             dc->cc_op = CC_OP_DIV;
3986                         } else {
3987                             gen_helper_udiv(cpu_dst, tcg_env, cpu_src1,
3988                                             cpu_src2);
3989                         }
3990                         break;
3991                     case 0xf: /* sdiv */
3992                         CHECK_IU_FEATURE(dc, DIV);
3993                         if (xop & 0x10) {
3994                             gen_helper_sdiv_cc(cpu_dst, tcg_env, cpu_src1,
3995                                                cpu_src2);
3996                             dc->cc_op = CC_OP_DIV;
3997                         } else {
3998                             gen_helper_sdiv(cpu_dst, tcg_env, cpu_src1,
3999                                             cpu_src2);
4000                         }
4001                         break;
4002                     default:
4003                         goto illegal_insn;
4004                     }
4005                     gen_store_gpr(dc, rd, cpu_dst);
4006                 } else {
4007                     cpu_src1 = get_src1(dc, insn);
4008                     cpu_src2 = get_src2(dc, insn);
4009                     switch (xop) {
4010                     case 0x20: /* taddcc */
4011                         gen_op_add_cc(cpu_dst, cpu_src1, cpu_src2);
4012                         gen_store_gpr(dc, rd, cpu_dst);
4013                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TADD);
4014                         dc->cc_op = CC_OP_TADD;
4015                         break;
4016                     case 0x21: /* tsubcc */
4017                         gen_op_sub_cc(cpu_dst, cpu_src1, cpu_src2);
4018                         gen_store_gpr(dc, rd, cpu_dst);
4019                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_TSUB);
4020                         dc->cc_op = CC_OP_TSUB;
4021                         break;
4022                     case 0x22: /* taddcctv */
4023                         gen_helper_taddcctv(cpu_dst, tcg_env,
4024                                             cpu_src1, cpu_src2);
4025                         gen_store_gpr(dc, rd, cpu_dst);
4026                         dc->cc_op = CC_OP_TADDTV;
4027                         break;
4028                     case 0x23: /* tsubcctv */
4029                         gen_helper_tsubcctv(cpu_dst, tcg_env,
4030                                             cpu_src1, cpu_src2);
4031                         gen_store_gpr(dc, rd, cpu_dst);
4032                         dc->cc_op = CC_OP_TSUBTV;
4033                         break;
4034                     case 0x24: /* mulscc */
4035                         update_psr(dc);
4036                         gen_op_mulscc(cpu_dst, cpu_src1, cpu_src2);
4037                         gen_store_gpr(dc, rd, cpu_dst);
4038                         tcg_gen_movi_i32(cpu_cc_op, CC_OP_ADD);
4039                         dc->cc_op = CC_OP_ADD;
4040                         break;
4041 #ifndef TARGET_SPARC64
4042                     case 0x25:  /* sll */
4043                         if (IS_IMM) { /* immediate */
4044                             simm = GET_FIELDs(insn, 20, 31);
4045                             tcg_gen_shli_tl(cpu_dst, cpu_src1, simm & 0x1f);
4046                         } else { /* register */
4047                             cpu_tmp0 = tcg_temp_new();
4048                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4049                             tcg_gen_shl_tl(cpu_dst, cpu_src1, cpu_tmp0);
4050                         }
4051                         gen_store_gpr(dc, rd, cpu_dst);
4052                         break;
4053                     case 0x26:  /* srl */
4054                         if (IS_IMM) { /* immediate */
4055                             simm = GET_FIELDs(insn, 20, 31);
4056                             tcg_gen_shri_tl(cpu_dst, cpu_src1, simm & 0x1f);
4057                         } else { /* register */
4058                             cpu_tmp0 = tcg_temp_new();
4059                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4060                             tcg_gen_shr_tl(cpu_dst, cpu_src1, cpu_tmp0);
4061                         }
4062                         gen_store_gpr(dc, rd, cpu_dst);
4063                         break;
4064                     case 0x27:  /* sra */
4065                         if (IS_IMM) { /* immediate */
4066                             simm = GET_FIELDs(insn, 20, 31);
4067                             tcg_gen_sari_tl(cpu_dst, cpu_src1, simm & 0x1f);
4068                         } else { /* register */
4069                             cpu_tmp0 = tcg_temp_new();
4070                             tcg_gen_andi_tl(cpu_tmp0, cpu_src2, 0x1f);
4071                             tcg_gen_sar_tl(cpu_dst, cpu_src1, cpu_tmp0);
4072                         }
4073                         gen_store_gpr(dc, rd, cpu_dst);
4074                         break;
4075 #endif
4076                     case 0x30:
4077                         {
4078                             cpu_tmp0 = tcg_temp_new();
4079                             switch(rd) {
4080                             case 0: /* wry */
4081                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4082                                 tcg_gen_andi_tl(cpu_y, cpu_tmp0, 0xffffffff);
4083                                 break;
4084 #ifndef TARGET_SPARC64
4085                             case 0x01 ... 0x0f: /* undefined in the
4086                                                    SPARCv8 manual, nop
4087                                                    on the microSPARC
4088                                                    II */
4089                             case 0x10 ... 0x1f: /* implementation-dependent
4090                                                    in the SPARCv8
4091                                                    manual, nop on the
4092                                                    microSPARC II */
4093                                 if ((rd == 0x13) && (dc->def->features &
4094                                                      CPU_FEATURE_POWERDOWN)) {
4095                                     /* LEON3 power-down */
4096                                     save_state(dc);
4097                                     gen_helper_power_down(tcg_env);
4098                                 }
4099                                 break;
4100 #else
4101                             case 0x2: /* V9 wrccr */
4102                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4103                                 gen_helper_wrccr(tcg_env, cpu_tmp0);
4104                                 tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4105                                 dc->cc_op = CC_OP_FLAGS;
4106                                 break;
4107                             case 0x3: /* V9 wrasi */
4108                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4109                                 tcg_gen_andi_tl(cpu_tmp0, cpu_tmp0, 0xff);
4110                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4111                                                 offsetof(CPUSPARCState, asi));
4112                                 /*
4113                                  * End TB to notice changed ASI.
4114                                  * TODO: Could notice src1 = %g0 and IS_IMM,
4115                                  * update DisasContext and not exit the TB.
4116                                  */
4117                                 save_state(dc);
4118                                 gen_op_next_insn();
4119                                 tcg_gen_lookup_and_goto_ptr();
4120                                 dc->base.is_jmp = DISAS_NORETURN;
4121                                 break;
4122                             case 0x6: /* V9 wrfprs */
4123                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4124                                 tcg_gen_trunc_tl_i32(cpu_fprs, cpu_tmp0);
4125                                 dc->fprs_dirty = 0;
4126                                 save_state(dc);
4127                                 gen_op_next_insn();
4128                                 tcg_gen_exit_tb(NULL, 0);
4129                                 dc->base.is_jmp = DISAS_NORETURN;
4130                                 break;
4131                             case 0xf: /* V9 sir, nop if user */
4132 #if !defined(CONFIG_USER_ONLY)
4133                                 if (supervisor(dc)) {
4134                                     ; // XXX
4135                                 }
4136 #endif
4137                                 break;
4138                             case 0x13: /* Graphics Status */
4139                                 if (gen_trap_ifnofpu(dc)) {
4140                                     goto jmp_insn;
4141                                 }
4142                                 tcg_gen_xor_tl(cpu_gsr, cpu_src1, cpu_src2);
4143                                 break;
4144                             case 0x14: /* Softint set */
4145                                 if (!supervisor(dc))
4146                                     goto illegal_insn;
4147                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4148                                 gen_helper_set_softint(tcg_env, cpu_tmp0);
4149                                 break;
4150                             case 0x15: /* Softint clear */
4151                                 if (!supervisor(dc))
4152                                     goto illegal_insn;
4153                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4154                                 gen_helper_clear_softint(tcg_env, cpu_tmp0);
4155                                 break;
4156                             case 0x16: /* Softint write */
4157                                 if (!supervisor(dc))
4158                                     goto illegal_insn;
4159                                 tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4160                                 gen_helper_write_softint(tcg_env, cpu_tmp0);
4161                                 break;
4162                             case 0x17: /* Tick compare */
4163 #if !defined(CONFIG_USER_ONLY)
4164                                 if (!supervisor(dc))
4165                                     goto illegal_insn;
4166 #endif
4167                                 {
4168                                     TCGv_ptr r_tickptr;
4169 
4170                                     tcg_gen_xor_tl(cpu_tick_cmpr, cpu_src1,
4171                                                    cpu_src2);
4172                                     r_tickptr = tcg_temp_new_ptr();
4173                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4174                                                    offsetof(CPUSPARCState, tick));
4175                                     translator_io_start(&dc->base);
4176                                     gen_helper_tick_set_limit(r_tickptr,
4177                                                               cpu_tick_cmpr);
4178                                     /* End TB to handle timer interrupt */
4179                                     dc->base.is_jmp = DISAS_EXIT;
4180                                 }
4181                                 break;
4182                             case 0x18: /* System tick */
4183 #if !defined(CONFIG_USER_ONLY)
4184                                 if (!supervisor(dc))
4185                                     goto illegal_insn;
4186 #endif
4187                                 {
4188                                     TCGv_ptr r_tickptr;
4189 
4190                                     tcg_gen_xor_tl(cpu_tmp0, cpu_src1,
4191                                                    cpu_src2);
4192                                     r_tickptr = tcg_temp_new_ptr();
4193                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4194                                                    offsetof(CPUSPARCState, stick));
4195                                     translator_io_start(&dc->base);
4196                                     gen_helper_tick_set_count(r_tickptr,
4197                                                               cpu_tmp0);
4198                                     /* End TB to handle timer interrupt */
4199                                     dc->base.is_jmp = DISAS_EXIT;
4200                                 }
4201                                 break;
4202                             case 0x19: /* System tick compare */
4203 #if !defined(CONFIG_USER_ONLY)
4204                                 if (!supervisor(dc))
4205                                     goto illegal_insn;
4206 #endif
4207                                 {
4208                                     TCGv_ptr r_tickptr;
4209 
4210                                     tcg_gen_xor_tl(cpu_stick_cmpr, cpu_src1,
4211                                                    cpu_src2);
4212                                     r_tickptr = tcg_temp_new_ptr();
4213                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4214                                                    offsetof(CPUSPARCState, stick));
4215                                     translator_io_start(&dc->base);
4216                                     gen_helper_tick_set_limit(r_tickptr,
4217                                                               cpu_stick_cmpr);
4218                                     /* End TB to handle timer interrupt */
4219                                     dc->base.is_jmp = DISAS_EXIT;
4220                                 }
4221                                 break;
4222 
4223                             case 0x10: /* Performance Control */
4224                             case 0x11: /* Performance Instrumentation
4225                                           Counter */
4226                             case 0x12: /* Dispatch Control */
4227 #endif
4228                             default:
4229                                 goto illegal_insn;
4230                             }
4231                         }
4232                         break;
4233 #if !defined(CONFIG_USER_ONLY)
4234                     case 0x31: /* wrpsr, V9 saved, restored */
4235                         {
4236                             if (!supervisor(dc))
4237                                 goto priv_insn;
4238 #ifdef TARGET_SPARC64
4239                             switch (rd) {
4240                             case 0:
4241                                 gen_helper_saved(tcg_env);
4242                                 break;
4243                             case 1:
4244                                 gen_helper_restored(tcg_env);
4245                                 break;
4246                             case 2: /* UA2005 allclean */
4247                             case 3: /* UA2005 otherw */
4248                             case 4: /* UA2005 normalw */
4249                             case 5: /* UA2005 invalw */
4250                                 // XXX
4251                             default:
4252                                 goto illegal_insn;
4253                             }
4254 #else
4255                             cpu_tmp0 = tcg_temp_new();
4256                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4257                             gen_helper_wrpsr(tcg_env, cpu_tmp0);
4258                             tcg_gen_movi_i32(cpu_cc_op, CC_OP_FLAGS);
4259                             dc->cc_op = CC_OP_FLAGS;
4260                             save_state(dc);
4261                             gen_op_next_insn();
4262                             tcg_gen_exit_tb(NULL, 0);
4263                             dc->base.is_jmp = DISAS_NORETURN;
4264 #endif
4265                         }
4266                         break;
4267                     case 0x32: /* wrwim, V9 wrpr */
4268                         {
4269                             if (!supervisor(dc))
4270                                 goto priv_insn;
4271                             cpu_tmp0 = tcg_temp_new();
4272                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4273 #ifdef TARGET_SPARC64
4274                             switch (rd) {
4275                             case 0: // tpc
4276                                 {
4277                                     TCGv_ptr r_tsptr;
4278 
4279                                     r_tsptr = tcg_temp_new_ptr();
4280                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4281                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4282                                                   offsetof(trap_state, tpc));
4283                                 }
4284                                 break;
4285                             case 1: // tnpc
4286                                 {
4287                                     TCGv_ptr r_tsptr;
4288 
4289                                     r_tsptr = tcg_temp_new_ptr();
4290                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4291                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4292                                                   offsetof(trap_state, tnpc));
4293                                 }
4294                                 break;
4295                             case 2: // tstate
4296                                 {
4297                                     TCGv_ptr r_tsptr;
4298 
4299                                     r_tsptr = tcg_temp_new_ptr();
4300                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4301                                     tcg_gen_st_tl(cpu_tmp0, r_tsptr,
4302                                                   offsetof(trap_state,
4303                                                            tstate));
4304                                 }
4305                                 break;
4306                             case 3: // tt
4307                                 {
4308                                     TCGv_ptr r_tsptr;
4309 
4310                                     r_tsptr = tcg_temp_new_ptr();
4311                                     gen_load_trap_state_at_tl(r_tsptr, tcg_env);
4312                                     tcg_gen_st32_tl(cpu_tmp0, r_tsptr,
4313                                                     offsetof(trap_state, tt));
4314                                 }
4315                                 break;
4316                             case 4: // tick
4317                                 {
4318                                     TCGv_ptr r_tickptr;
4319 
4320                                     r_tickptr = tcg_temp_new_ptr();
4321                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4322                                                    offsetof(CPUSPARCState, tick));
4323                                     translator_io_start(&dc->base);
4324                                     gen_helper_tick_set_count(r_tickptr,
4325                                                               cpu_tmp0);
4326                                     /* End TB to handle timer interrupt */
4327                                     dc->base.is_jmp = DISAS_EXIT;
4328                                 }
4329                                 break;
4330                             case 5: // tba
4331                                 tcg_gen_mov_tl(cpu_tbr, cpu_tmp0);
4332                                 break;
4333                             case 6: // pstate
4334                                 save_state(dc);
4335                                 if (translator_io_start(&dc->base)) {
4336                                     dc->base.is_jmp = DISAS_EXIT;
4337                                 }
4338                                 gen_helper_wrpstate(tcg_env, cpu_tmp0);
4339                                 dc->npc = DYNAMIC_PC;
4340                                 break;
4341                             case 7: // tl
4342                                 save_state(dc);
4343                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4344                                                offsetof(CPUSPARCState, tl));
4345                                 dc->npc = DYNAMIC_PC;
4346                                 break;
4347                             case 8: // pil
4348                                 if (translator_io_start(&dc->base)) {
4349                                     dc->base.is_jmp = DISAS_EXIT;
4350                                 }
4351                                 gen_helper_wrpil(tcg_env, cpu_tmp0);
4352                                 break;
4353                             case 9: // cwp
4354                                 gen_helper_wrcwp(tcg_env, cpu_tmp0);
4355                                 break;
4356                             case 10: // cansave
4357                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4358                                                 offsetof(CPUSPARCState,
4359                                                          cansave));
4360                                 break;
4361                             case 11: // canrestore
4362                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4363                                                 offsetof(CPUSPARCState,
4364                                                          canrestore));
4365                                 break;
4366                             case 12: // cleanwin
4367                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4368                                                 offsetof(CPUSPARCState,
4369                                                          cleanwin));
4370                                 break;
4371                             case 13: // otherwin
4372                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4373                                                 offsetof(CPUSPARCState,
4374                                                          otherwin));
4375                                 break;
4376                             case 14: // wstate
4377                                 tcg_gen_st32_tl(cpu_tmp0, tcg_env,
4378                                                 offsetof(CPUSPARCState,
4379                                                          wstate));
4380                                 break;
4381                             case 16: // UA2005 gl
4382                                 CHECK_IU_FEATURE(dc, GL);
4383                                 gen_helper_wrgl(tcg_env, cpu_tmp0);
4384                                 break;
4385                             case 26: // UA2005 strand status
4386                                 CHECK_IU_FEATURE(dc, HYPV);
4387                                 if (!hypervisor(dc))
4388                                     goto priv_insn;
4389                                 tcg_gen_mov_tl(cpu_ssr, cpu_tmp0);
4390                                 break;
4391                             default:
4392                                 goto illegal_insn;
4393                             }
4394 #else
4395                             tcg_gen_trunc_tl_i32(cpu_wim, cpu_tmp0);
4396                             if (dc->def->nwindows != 32) {
4397                                 tcg_gen_andi_tl(cpu_wim, cpu_wim,
4398                                                 (1 << dc->def->nwindows) - 1);
4399                             }
4400 #endif
4401                         }
4402                         break;
4403                     case 0x33: /* wrtbr, UA2005 wrhpr */
4404                         {
4405 #ifndef TARGET_SPARC64
4406                             if (!supervisor(dc))
4407                                 goto priv_insn;
4408                             tcg_gen_xor_tl(cpu_tbr, cpu_src1, cpu_src2);
4409 #else
4410                             CHECK_IU_FEATURE(dc, HYPV);
4411                             if (!hypervisor(dc))
4412                                 goto priv_insn;
4413                             cpu_tmp0 = tcg_temp_new();
4414                             tcg_gen_xor_tl(cpu_tmp0, cpu_src1, cpu_src2);
4415                             switch (rd) {
4416                             case 0: // hpstate
4417                                 tcg_gen_st_i64(cpu_tmp0, tcg_env,
4418                                                offsetof(CPUSPARCState,
4419                                                         hpstate));
4420                                 save_state(dc);
4421                                 gen_op_next_insn();
4422                                 tcg_gen_exit_tb(NULL, 0);
4423                                 dc->base.is_jmp = DISAS_NORETURN;
4424                                 break;
4425                             case 1: // htstate
4426                                 // XXX gen_op_wrhtstate();
4427                                 break;
4428                             case 3: // hintp
4429                                 tcg_gen_mov_tl(cpu_hintp, cpu_tmp0);
4430                                 break;
4431                             case 5: // htba
4432                                 tcg_gen_mov_tl(cpu_htba, cpu_tmp0);
4433                                 break;
4434                             case 31: // hstick_cmpr
4435                                 {
4436                                     TCGv_ptr r_tickptr;
4437 
4438                                     tcg_gen_mov_tl(cpu_hstick_cmpr, cpu_tmp0);
4439                                     r_tickptr = tcg_temp_new_ptr();
4440                                     tcg_gen_ld_ptr(r_tickptr, tcg_env,
4441                                                    offsetof(CPUSPARCState, hstick));
4442                                     translator_io_start(&dc->base);
4443                                     gen_helper_tick_set_limit(r_tickptr,
4444                                                               cpu_hstick_cmpr);
4445                                     /* End TB to handle timer interrupt */
4446                                     dc->base.is_jmp = DISAS_EXIT;
4447                                 }
4448                                 break;
4449                             case 6: // hver readonly
4450                             default:
4451                                 goto illegal_insn;
4452                             }
4453 #endif
4454                         }
4455                         break;
4456 #endif
4457 #ifdef TARGET_SPARC64
4458                     case 0x2c: /* V9 movcc */
4459                         {
4460                             int cc = GET_FIELD_SP(insn, 11, 12);
4461                             int cond = GET_FIELD_SP(insn, 14, 17);
4462                             DisasCompare cmp;
4463                             TCGv dst;
4464 
4465                             if (insn & (1 << 18)) {
4466                                 if (cc == 0) {
4467                                     gen_compare(&cmp, 0, cond, dc);
4468                                 } else if (cc == 2) {
4469                                     gen_compare(&cmp, 1, cond, dc);
4470                                 } else {
4471                                     goto illegal_insn;
4472                                 }
4473                             } else {
4474                                 gen_fcompare(&cmp, cc, cond);
4475                             }
4476 
4477                             /* The get_src2 above loaded the normal 13-bit
4478                                immediate field, not the 11-bit field we have
4479                                in movcc.  But it did handle the reg case.  */
4480                             if (IS_IMM) {
4481                                 simm = GET_FIELD_SPs(insn, 0, 10);
4482                                 tcg_gen_movi_tl(cpu_src2, simm);
4483                             }
4484 
4485                             dst = gen_load_gpr(dc, rd);
4486                             tcg_gen_movcond_tl(cmp.cond, dst,
4487                                                cmp.c1, cmp.c2,
4488                                                cpu_src2, dst);
4489                             gen_store_gpr(dc, rd, dst);
4490                             break;
4491                         }
4492                     case 0x2d: /* V9 sdivx */
4493                         gen_helper_sdivx(cpu_dst, tcg_env, cpu_src1, cpu_src2);
4494                         gen_store_gpr(dc, rd, cpu_dst);
4495                         break;
4496                     case 0x2e: /* V9 popc */
4497                         tcg_gen_ctpop_tl(cpu_dst, cpu_src2);
4498                         gen_store_gpr(dc, rd, cpu_dst);
4499                         break;
4500                     case 0x2f: /* V9 movr */
4501                         {
4502                             int cond = GET_FIELD_SP(insn, 10, 12);
4503                             DisasCompare cmp;
4504                             TCGv dst;
4505 
4506                             gen_compare_reg(&cmp, cond, cpu_src1);
4507 
4508                             /* The get_src2 above loaded the normal 13-bit
4509                                immediate field, not the 10-bit field we have
4510                                in movr.  But it did handle the reg case.  */
4511                             if (IS_IMM) {
4512                                 simm = GET_FIELD_SPs(insn, 0, 9);
4513                                 tcg_gen_movi_tl(cpu_src2, simm);
4514                             }
4515 
4516                             dst = gen_load_gpr(dc, rd);
4517                             tcg_gen_movcond_tl(cmp.cond, dst,
4518                                                cmp.c1, cmp.c2,
4519                                                cpu_src2, dst);
4520                             gen_store_gpr(dc, rd, dst);
4521                             break;
4522                         }
4523 #endif
4524                     default:
4525                         goto illegal_insn;
4526                     }
4527                 }
4528             } else if (xop == 0x36) { /* UltraSparc shutdown, VIS, V8 CPop1 */
4529 #ifdef TARGET_SPARC64
4530                 int opf = GET_FIELD_SP(insn, 5, 13);
4531                 rs1 = GET_FIELD(insn, 13, 17);
4532                 rs2 = GET_FIELD(insn, 27, 31);
4533                 if (gen_trap_ifnofpu(dc)) {
4534                     goto jmp_insn;
4535                 }
4536 
4537                 switch (opf) {
4538                 case 0x000: /* VIS I edge8cc */
4539                     CHECK_FPU_FEATURE(dc, VIS1);
4540                     cpu_src1 = gen_load_gpr(dc, rs1);
4541                     cpu_src2 = gen_load_gpr(dc, rs2);
4542                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 0);
4543                     gen_store_gpr(dc, rd, cpu_dst);
4544                     break;
4545                 case 0x001: /* VIS II edge8n */
4546                     CHECK_FPU_FEATURE(dc, VIS2);
4547                     cpu_src1 = gen_load_gpr(dc, rs1);
4548                     cpu_src2 = gen_load_gpr(dc, rs2);
4549                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 0);
4550                     gen_store_gpr(dc, rd, cpu_dst);
4551                     break;
4552                 case 0x002: /* VIS I edge8lcc */
4553                     CHECK_FPU_FEATURE(dc, VIS1);
4554                     cpu_src1 = gen_load_gpr(dc, rs1);
4555                     cpu_src2 = gen_load_gpr(dc, rs2);
4556                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 1, 1);
4557                     gen_store_gpr(dc, rd, cpu_dst);
4558                     break;
4559                 case 0x003: /* VIS II edge8ln */
4560                     CHECK_FPU_FEATURE(dc, VIS2);
4561                     cpu_src1 = gen_load_gpr(dc, rs1);
4562                     cpu_src2 = gen_load_gpr(dc, rs2);
4563                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 8, 0, 1);
4564                     gen_store_gpr(dc, rd, cpu_dst);
4565                     break;
4566                 case 0x004: /* VIS I edge16cc */
4567                     CHECK_FPU_FEATURE(dc, VIS1);
4568                     cpu_src1 = gen_load_gpr(dc, rs1);
4569                     cpu_src2 = gen_load_gpr(dc, rs2);
4570                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 0);
4571                     gen_store_gpr(dc, rd, cpu_dst);
4572                     break;
4573                 case 0x005: /* VIS II edge16n */
4574                     CHECK_FPU_FEATURE(dc, VIS2);
4575                     cpu_src1 = gen_load_gpr(dc, rs1);
4576                     cpu_src2 = gen_load_gpr(dc, rs2);
4577                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 0);
4578                     gen_store_gpr(dc, rd, cpu_dst);
4579                     break;
4580                 case 0x006: /* VIS I edge16lcc */
4581                     CHECK_FPU_FEATURE(dc, VIS1);
4582                     cpu_src1 = gen_load_gpr(dc, rs1);
4583                     cpu_src2 = gen_load_gpr(dc, rs2);
4584                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 1, 1);
4585                     gen_store_gpr(dc, rd, cpu_dst);
4586                     break;
4587                 case 0x007: /* VIS II edge16ln */
4588                     CHECK_FPU_FEATURE(dc, VIS2);
4589                     cpu_src1 = gen_load_gpr(dc, rs1);
4590                     cpu_src2 = gen_load_gpr(dc, rs2);
4591                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 16, 0, 1);
4592                     gen_store_gpr(dc, rd, cpu_dst);
4593                     break;
4594                 case 0x008: /* VIS I edge32cc */
4595                     CHECK_FPU_FEATURE(dc, VIS1);
4596                     cpu_src1 = gen_load_gpr(dc, rs1);
4597                     cpu_src2 = gen_load_gpr(dc, rs2);
4598                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 0);
4599                     gen_store_gpr(dc, rd, cpu_dst);
4600                     break;
4601                 case 0x009: /* VIS II edge32n */
4602                     CHECK_FPU_FEATURE(dc, VIS2);
4603                     cpu_src1 = gen_load_gpr(dc, rs1);
4604                     cpu_src2 = gen_load_gpr(dc, rs2);
4605                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 0);
4606                     gen_store_gpr(dc, rd, cpu_dst);
4607                     break;
4608                 case 0x00a: /* VIS I edge32lcc */
4609                     CHECK_FPU_FEATURE(dc, VIS1);
4610                     cpu_src1 = gen_load_gpr(dc, rs1);
4611                     cpu_src2 = gen_load_gpr(dc, rs2);
4612                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 1, 1);
4613                     gen_store_gpr(dc, rd, cpu_dst);
4614                     break;
4615                 case 0x00b: /* VIS II edge32ln */
4616                     CHECK_FPU_FEATURE(dc, VIS2);
4617                     cpu_src1 = gen_load_gpr(dc, rs1);
4618                     cpu_src2 = gen_load_gpr(dc, rs2);
4619                     gen_edge(dc, cpu_dst, cpu_src1, cpu_src2, 32, 0, 1);
4620                     gen_store_gpr(dc, rd, cpu_dst);
4621                     break;
4622                 case 0x010: /* VIS I array8 */
4623                     CHECK_FPU_FEATURE(dc, VIS1);
4624                     cpu_src1 = gen_load_gpr(dc, rs1);
4625                     cpu_src2 = gen_load_gpr(dc, rs2);
4626                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4627                     gen_store_gpr(dc, rd, cpu_dst);
4628                     break;
4629                 case 0x012: /* VIS I array16 */
4630                     CHECK_FPU_FEATURE(dc, VIS1);
4631                     cpu_src1 = gen_load_gpr(dc, rs1);
4632                     cpu_src2 = gen_load_gpr(dc, rs2);
4633                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4634                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 1);
4635                     gen_store_gpr(dc, rd, cpu_dst);
4636                     break;
4637                 case 0x014: /* VIS I array32 */
4638                     CHECK_FPU_FEATURE(dc, VIS1);
4639                     cpu_src1 = gen_load_gpr(dc, rs1);
4640                     cpu_src2 = gen_load_gpr(dc, rs2);
4641                     gen_helper_array8(cpu_dst, cpu_src1, cpu_src2);
4642                     tcg_gen_shli_i64(cpu_dst, cpu_dst, 2);
4643                     gen_store_gpr(dc, rd, cpu_dst);
4644                     break;
4645                 case 0x018: /* VIS I alignaddr */
4646                     CHECK_FPU_FEATURE(dc, VIS1);
4647                     cpu_src1 = gen_load_gpr(dc, rs1);
4648                     cpu_src2 = gen_load_gpr(dc, rs2);
4649                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 0);
4650                     gen_store_gpr(dc, rd, cpu_dst);
4651                     break;
4652                 case 0x01a: /* VIS I alignaddrl */
4653                     CHECK_FPU_FEATURE(dc, VIS1);
4654                     cpu_src1 = gen_load_gpr(dc, rs1);
4655                     cpu_src2 = gen_load_gpr(dc, rs2);
4656                     gen_alignaddr(cpu_dst, cpu_src1, cpu_src2, 1);
4657                     gen_store_gpr(dc, rd, cpu_dst);
4658                     break;
4659                 case 0x019: /* VIS II bmask */
4660                     CHECK_FPU_FEATURE(dc, VIS2);
4661                     cpu_src1 = gen_load_gpr(dc, rs1);
4662                     cpu_src2 = gen_load_gpr(dc, rs2);
4663                     tcg_gen_add_tl(cpu_dst, cpu_src1, cpu_src2);
4664                     tcg_gen_deposit_tl(cpu_gsr, cpu_gsr, cpu_dst, 32, 32);
4665                     gen_store_gpr(dc, rd, cpu_dst);
4666                     break;
4667                 case 0x020: /* VIS I fcmple16 */
4668                     CHECK_FPU_FEATURE(dc, VIS1);
4669                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4670                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4671                     gen_helper_fcmple16(cpu_dst, cpu_src1_64, cpu_src2_64);
4672                     gen_store_gpr(dc, rd, cpu_dst);
4673                     break;
4674                 case 0x022: /* VIS I fcmpne16 */
4675                     CHECK_FPU_FEATURE(dc, VIS1);
4676                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4677                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4678                     gen_helper_fcmpne16(cpu_dst, cpu_src1_64, cpu_src2_64);
4679                     gen_store_gpr(dc, rd, cpu_dst);
4680                     break;
4681                 case 0x024: /* VIS I fcmple32 */
4682                     CHECK_FPU_FEATURE(dc, VIS1);
4683                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4684                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4685                     gen_helper_fcmple32(cpu_dst, cpu_src1_64, cpu_src2_64);
4686                     gen_store_gpr(dc, rd, cpu_dst);
4687                     break;
4688                 case 0x026: /* VIS I fcmpne32 */
4689                     CHECK_FPU_FEATURE(dc, VIS1);
4690                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4691                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4692                     gen_helper_fcmpne32(cpu_dst, cpu_src1_64, cpu_src2_64);
4693                     gen_store_gpr(dc, rd, cpu_dst);
4694                     break;
4695                 case 0x028: /* VIS I fcmpgt16 */
4696                     CHECK_FPU_FEATURE(dc, VIS1);
4697                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4698                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4699                     gen_helper_fcmpgt16(cpu_dst, cpu_src1_64, cpu_src2_64);
4700                     gen_store_gpr(dc, rd, cpu_dst);
4701                     break;
4702                 case 0x02a: /* VIS I fcmpeq16 */
4703                     CHECK_FPU_FEATURE(dc, VIS1);
4704                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4705                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4706                     gen_helper_fcmpeq16(cpu_dst, cpu_src1_64, cpu_src2_64);
4707                     gen_store_gpr(dc, rd, cpu_dst);
4708                     break;
4709                 case 0x02c: /* VIS I fcmpgt32 */
4710                     CHECK_FPU_FEATURE(dc, VIS1);
4711                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4712                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4713                     gen_helper_fcmpgt32(cpu_dst, cpu_src1_64, cpu_src2_64);
4714                     gen_store_gpr(dc, rd, cpu_dst);
4715                     break;
4716                 case 0x02e: /* VIS I fcmpeq32 */
4717                     CHECK_FPU_FEATURE(dc, VIS1);
4718                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4719                     cpu_src2_64 = gen_load_fpr_D(dc, rs2);
4720                     gen_helper_fcmpeq32(cpu_dst, cpu_src1_64, cpu_src2_64);
4721                     gen_store_gpr(dc, rd, cpu_dst);
4722                     break;
4723                 case 0x031: /* VIS I fmul8x16 */
4724                     CHECK_FPU_FEATURE(dc, VIS1);
4725                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16);
4726                     break;
4727                 case 0x033: /* VIS I fmul8x16au */
4728                     CHECK_FPU_FEATURE(dc, VIS1);
4729                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16au);
4730                     break;
4731                 case 0x035: /* VIS I fmul8x16al */
4732                     CHECK_FPU_FEATURE(dc, VIS1);
4733                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8x16al);
4734                     break;
4735                 case 0x036: /* VIS I fmul8sux16 */
4736                     CHECK_FPU_FEATURE(dc, VIS1);
4737                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8sux16);
4738                     break;
4739                 case 0x037: /* VIS I fmul8ulx16 */
4740                     CHECK_FPU_FEATURE(dc, VIS1);
4741                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmul8ulx16);
4742                     break;
4743                 case 0x038: /* VIS I fmuld8sux16 */
4744                     CHECK_FPU_FEATURE(dc, VIS1);
4745                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8sux16);
4746                     break;
4747                 case 0x039: /* VIS I fmuld8ulx16 */
4748                     CHECK_FPU_FEATURE(dc, VIS1);
4749                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fmuld8ulx16);
4750                     break;
4751                 case 0x03a: /* VIS I fpack32 */
4752                     CHECK_FPU_FEATURE(dc, VIS1);
4753                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpack32);
4754                     break;
4755                 case 0x03b: /* VIS I fpack16 */
4756                     CHECK_FPU_FEATURE(dc, VIS1);
4757                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4758                     cpu_dst_32 = gen_dest_fpr_F(dc);
4759                     gen_helper_fpack16(cpu_dst_32, cpu_gsr, cpu_src1_64);
4760                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4761                     break;
4762                 case 0x03d: /* VIS I fpackfix */
4763                     CHECK_FPU_FEATURE(dc, VIS1);
4764                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4765                     cpu_dst_32 = gen_dest_fpr_F(dc);
4766                     gen_helper_fpackfix(cpu_dst_32, cpu_gsr, cpu_src1_64);
4767                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4768                     break;
4769                 case 0x03e: /* VIS I pdist */
4770                     CHECK_FPU_FEATURE(dc, VIS1);
4771                     gen_ne_fop_DDDD(dc, rd, rs1, rs2, gen_helper_pdist);
4772                     break;
4773                 case 0x048: /* VIS I faligndata */
4774                     CHECK_FPU_FEATURE(dc, VIS1);
4775                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_faligndata);
4776                     break;
4777                 case 0x04b: /* VIS I fpmerge */
4778                     CHECK_FPU_FEATURE(dc, VIS1);
4779                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpmerge);
4780                     break;
4781                 case 0x04c: /* VIS II bshuffle */
4782                     CHECK_FPU_FEATURE(dc, VIS2);
4783                     gen_gsr_fop_DDD(dc, rd, rs1, rs2, gen_helper_bshuffle);
4784                     break;
4785                 case 0x04d: /* VIS I fexpand */
4786                     CHECK_FPU_FEATURE(dc, VIS1);
4787                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fexpand);
4788                     break;
4789                 case 0x050: /* VIS I fpadd16 */
4790                     CHECK_FPU_FEATURE(dc, VIS1);
4791                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd16);
4792                     break;
4793                 case 0x051: /* VIS I fpadd16s */
4794                     CHECK_FPU_FEATURE(dc, VIS1);
4795                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpadd16s);
4796                     break;
4797                 case 0x052: /* VIS I fpadd32 */
4798                     CHECK_FPU_FEATURE(dc, VIS1);
4799                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpadd32);
4800                     break;
4801                 case 0x053: /* VIS I fpadd32s */
4802                     CHECK_FPU_FEATURE(dc, VIS1);
4803                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_add_i32);
4804                     break;
4805                 case 0x054: /* VIS I fpsub16 */
4806                     CHECK_FPU_FEATURE(dc, VIS1);
4807                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub16);
4808                     break;
4809                 case 0x055: /* VIS I fpsub16s */
4810                     CHECK_FPU_FEATURE(dc, VIS1);
4811                     gen_ne_fop_FFF(dc, rd, rs1, rs2, gen_helper_fpsub16s);
4812                     break;
4813                 case 0x056: /* VIS I fpsub32 */
4814                     CHECK_FPU_FEATURE(dc, VIS1);
4815                     gen_ne_fop_DDD(dc, rd, rs1, rs2, gen_helper_fpsub32);
4816                     break;
4817                 case 0x057: /* VIS I fpsub32s */
4818                     CHECK_FPU_FEATURE(dc, VIS1);
4819                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_sub_i32);
4820                     break;
4821                 case 0x060: /* VIS I fzero */
4822                     CHECK_FPU_FEATURE(dc, VIS1);
4823                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4824                     tcg_gen_movi_i64(cpu_dst_64, 0);
4825                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4826                     break;
4827                 case 0x061: /* VIS I fzeros */
4828                     CHECK_FPU_FEATURE(dc, VIS1);
4829                     cpu_dst_32 = gen_dest_fpr_F(dc);
4830                     tcg_gen_movi_i32(cpu_dst_32, 0);
4831                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4832                     break;
4833                 case 0x062: /* VIS I fnor */
4834                     CHECK_FPU_FEATURE(dc, VIS1);
4835                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nor_i64);
4836                     break;
4837                 case 0x063: /* VIS I fnors */
4838                     CHECK_FPU_FEATURE(dc, VIS1);
4839                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nor_i32);
4840                     break;
4841                 case 0x064: /* VIS I fandnot2 */
4842                     CHECK_FPU_FEATURE(dc, VIS1);
4843                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_andc_i64);
4844                     break;
4845                 case 0x065: /* VIS I fandnot2s */
4846                     CHECK_FPU_FEATURE(dc, VIS1);
4847                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_andc_i32);
4848                     break;
4849                 case 0x066: /* VIS I fnot2 */
4850                     CHECK_FPU_FEATURE(dc, VIS1);
4851                     gen_ne_fop_DD(dc, rd, rs2, tcg_gen_not_i64);
4852                     break;
4853                 case 0x067: /* VIS I fnot2s */
4854                     CHECK_FPU_FEATURE(dc, VIS1);
4855                     gen_ne_fop_FF(dc, rd, rs2, tcg_gen_not_i32);
4856                     break;
4857                 case 0x068: /* VIS I fandnot1 */
4858                     CHECK_FPU_FEATURE(dc, VIS1);
4859                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_andc_i64);
4860                     break;
4861                 case 0x069: /* VIS I fandnot1s */
4862                     CHECK_FPU_FEATURE(dc, VIS1);
4863                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_andc_i32);
4864                     break;
4865                 case 0x06a: /* VIS I fnot1 */
4866                     CHECK_FPU_FEATURE(dc, VIS1);
4867                     gen_ne_fop_DD(dc, rd, rs1, tcg_gen_not_i64);
4868                     break;
4869                 case 0x06b: /* VIS I fnot1s */
4870                     CHECK_FPU_FEATURE(dc, VIS1);
4871                     gen_ne_fop_FF(dc, rd, rs1, tcg_gen_not_i32);
4872                     break;
4873                 case 0x06c: /* VIS I fxor */
4874                     CHECK_FPU_FEATURE(dc, VIS1);
4875                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_xor_i64);
4876                     break;
4877                 case 0x06d: /* VIS I fxors */
4878                     CHECK_FPU_FEATURE(dc, VIS1);
4879                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_xor_i32);
4880                     break;
4881                 case 0x06e: /* VIS I fnand */
4882                     CHECK_FPU_FEATURE(dc, VIS1);
4883                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_nand_i64);
4884                     break;
4885                 case 0x06f: /* VIS I fnands */
4886                     CHECK_FPU_FEATURE(dc, VIS1);
4887                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_nand_i32);
4888                     break;
4889                 case 0x070: /* VIS I fand */
4890                     CHECK_FPU_FEATURE(dc, VIS1);
4891                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_and_i64);
4892                     break;
4893                 case 0x071: /* VIS I fands */
4894                     CHECK_FPU_FEATURE(dc, VIS1);
4895                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_and_i32);
4896                     break;
4897                 case 0x072: /* VIS I fxnor */
4898                     CHECK_FPU_FEATURE(dc, VIS1);
4899                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_eqv_i64);
4900                     break;
4901                 case 0x073: /* VIS I fxnors */
4902                     CHECK_FPU_FEATURE(dc, VIS1);
4903                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_eqv_i32);
4904                     break;
4905                 case 0x074: /* VIS I fsrc1 */
4906                     CHECK_FPU_FEATURE(dc, VIS1);
4907                     cpu_src1_64 = gen_load_fpr_D(dc, rs1);
4908                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4909                     break;
4910                 case 0x075: /* VIS I fsrc1s */
4911                     CHECK_FPU_FEATURE(dc, VIS1);
4912                     cpu_src1_32 = gen_load_fpr_F(dc, rs1);
4913                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4914                     break;
4915                 case 0x076: /* VIS I fornot2 */
4916                     CHECK_FPU_FEATURE(dc, VIS1);
4917                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_orc_i64);
4918                     break;
4919                 case 0x077: /* VIS I fornot2s */
4920                     CHECK_FPU_FEATURE(dc, VIS1);
4921                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_orc_i32);
4922                     break;
4923                 case 0x078: /* VIS I fsrc2 */
4924                     CHECK_FPU_FEATURE(dc, VIS1);
4925                     cpu_src1_64 = gen_load_fpr_D(dc, rs2);
4926                     gen_store_fpr_D(dc, rd, cpu_src1_64);
4927                     break;
4928                 case 0x079: /* VIS I fsrc2s */
4929                     CHECK_FPU_FEATURE(dc, VIS1);
4930                     cpu_src1_32 = gen_load_fpr_F(dc, rs2);
4931                     gen_store_fpr_F(dc, rd, cpu_src1_32);
4932                     break;
4933                 case 0x07a: /* VIS I fornot1 */
4934                     CHECK_FPU_FEATURE(dc, VIS1);
4935                     gen_ne_fop_DDD(dc, rd, rs2, rs1, tcg_gen_orc_i64);
4936                     break;
4937                 case 0x07b: /* VIS I fornot1s */
4938                     CHECK_FPU_FEATURE(dc, VIS1);
4939                     gen_ne_fop_FFF(dc, rd, rs2, rs1, tcg_gen_orc_i32);
4940                     break;
4941                 case 0x07c: /* VIS I for */
4942                     CHECK_FPU_FEATURE(dc, VIS1);
4943                     gen_ne_fop_DDD(dc, rd, rs1, rs2, tcg_gen_or_i64);
4944                     break;
4945                 case 0x07d: /* VIS I fors */
4946                     CHECK_FPU_FEATURE(dc, VIS1);
4947                     gen_ne_fop_FFF(dc, rd, rs1, rs2, tcg_gen_or_i32);
4948                     break;
4949                 case 0x07e: /* VIS I fone */
4950                     CHECK_FPU_FEATURE(dc, VIS1);
4951                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
4952                     tcg_gen_movi_i64(cpu_dst_64, -1);
4953                     gen_store_fpr_D(dc, rd, cpu_dst_64);
4954                     break;
4955                 case 0x07f: /* VIS I fones */
4956                     CHECK_FPU_FEATURE(dc, VIS1);
4957                     cpu_dst_32 = gen_dest_fpr_F(dc);
4958                     tcg_gen_movi_i32(cpu_dst_32, -1);
4959                     gen_store_fpr_F(dc, rd, cpu_dst_32);
4960                     break;
4961                 case 0x080: /* VIS I shutdown */
4962                 case 0x081: /* VIS II siam */
4963                     // XXX
4964                     goto illegal_insn;
4965                 default:
4966                     goto illegal_insn;
4967                 }
4968 #else
4969                 goto ncp_insn;
4970 #endif
4971             } else if (xop == 0x37) { /* V8 CPop2, V9 impdep2 */
4972 #ifdef TARGET_SPARC64
4973                 goto illegal_insn;
4974 #else
4975                 goto ncp_insn;
4976 #endif
4977 #ifdef TARGET_SPARC64
4978             } else if (xop == 0x39) { /* V9 return */
4979                 save_state(dc);
4980                 cpu_src1 = get_src1(dc, insn);
4981                 cpu_tmp0 = tcg_temp_new();
4982                 if (IS_IMM) {   /* immediate */
4983                     simm = GET_FIELDs(insn, 19, 31);
4984                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
4985                 } else {                /* register */
4986                     rs2 = GET_FIELD(insn, 27, 31);
4987                     if (rs2) {
4988                         cpu_src2 = gen_load_gpr(dc, rs2);
4989                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
4990                     } else {
4991                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
4992                     }
4993                 }
4994                 gen_check_align(dc, cpu_tmp0, 3);
4995                 gen_helper_restore(tcg_env);
4996                 gen_mov_pc_npc(dc);
4997                 tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
4998                 dc->npc = DYNAMIC_PC_LOOKUP;
4999                 goto jmp_insn;
5000 #endif
5001             } else {
5002                 cpu_src1 = get_src1(dc, insn);
5003                 cpu_tmp0 = tcg_temp_new();
5004                 if (IS_IMM) {   /* immediate */
5005                     simm = GET_FIELDs(insn, 19, 31);
5006                     tcg_gen_addi_tl(cpu_tmp0, cpu_src1, simm);
5007                 } else {                /* register */
5008                     rs2 = GET_FIELD(insn, 27, 31);
5009                     if (rs2) {
5010                         cpu_src2 = gen_load_gpr(dc, rs2);
5011                         tcg_gen_add_tl(cpu_tmp0, cpu_src1, cpu_src2);
5012                     } else {
5013                         tcg_gen_mov_tl(cpu_tmp0, cpu_src1);
5014                     }
5015                 }
5016                 switch (xop) {
5017                 case 0x38:      /* jmpl */
5018                     {
5019                         gen_check_align(dc, cpu_tmp0, 3);
5020                         gen_store_gpr(dc, rd, tcg_constant_tl(dc->pc));
5021                         gen_mov_pc_npc(dc);
5022                         gen_address_mask(dc, cpu_tmp0);
5023                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5024                         dc->npc = DYNAMIC_PC_LOOKUP;
5025                     }
5026                     goto jmp_insn;
5027 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5028                 case 0x39:      /* rett, V9 return */
5029                     {
5030                         if (!supervisor(dc))
5031                             goto priv_insn;
5032                         gen_check_align(dc, cpu_tmp0, 3);
5033                         gen_mov_pc_npc(dc);
5034                         tcg_gen_mov_tl(cpu_npc, cpu_tmp0);
5035                         dc->npc = DYNAMIC_PC;
5036                         gen_helper_rett(tcg_env);
5037                     }
5038                     goto jmp_insn;
5039 #endif
5040                 case 0x3b: /* flush */
5041                     /* nop */
5042                     break;
5043                 case 0x3c:      /* save */
5044                     gen_helper_save(tcg_env);
5045                     gen_store_gpr(dc, rd, cpu_tmp0);
5046                     break;
5047                 case 0x3d:      /* restore */
5048                     gen_helper_restore(tcg_env);
5049                     gen_store_gpr(dc, rd, cpu_tmp0);
5050                     break;
5051 #if !defined(CONFIG_USER_ONLY) && defined(TARGET_SPARC64)
5052                 case 0x3e:      /* V9 done/retry */
5053                     {
5054                         switch (rd) {
5055                         case 0:
5056                             if (!supervisor(dc))
5057                                 goto priv_insn;
5058                             dc->npc = DYNAMIC_PC;
5059                             dc->pc = DYNAMIC_PC;
5060                             translator_io_start(&dc->base);
5061                             gen_helper_done(tcg_env);
5062                             goto jmp_insn;
5063                         case 1:
5064                             if (!supervisor(dc))
5065                                 goto priv_insn;
5066                             dc->npc = DYNAMIC_PC;
5067                             dc->pc = DYNAMIC_PC;
5068                             translator_io_start(&dc->base);
5069                             gen_helper_retry(tcg_env);
5070                             goto jmp_insn;
5071                         default:
5072                             goto illegal_insn;
5073                         }
5074                     }
5075                     break;
5076 #endif
5077                 default:
5078                     goto illegal_insn;
5079                 }
5080             }
5081             break;
5082         }
5083         break;
5084     case 3:                     /* load/store instructions */
5085         {
5086             unsigned int xop = GET_FIELD(insn, 7, 12);
5087             /* ??? gen_address_mask prevents us from using a source
5088                register directly.  Always generate a temporary.  */
5089             TCGv cpu_addr = tcg_temp_new();
5090 
5091             tcg_gen_mov_tl(cpu_addr, get_src1(dc, insn));
5092             if (xop == 0x3c || xop == 0x3e) {
5093                 /* V9 casa/casxa : no offset */
5094             } else if (IS_IMM) {     /* immediate */
5095                 simm = GET_FIELDs(insn, 19, 31);
5096                 if (simm != 0) {
5097                     tcg_gen_addi_tl(cpu_addr, cpu_addr, simm);
5098                 }
5099             } else {            /* register */
5100                 rs2 = GET_FIELD(insn, 27, 31);
5101                 if (rs2 != 0) {
5102                     tcg_gen_add_tl(cpu_addr, cpu_addr, gen_load_gpr(dc, rs2));
5103                 }
5104             }
5105             if (xop < 4 || (xop > 7 && xop < 0x14 && xop != 0x0e) ||
5106                 (xop > 0x17 && xop <= 0x1d ) ||
5107                 (xop > 0x2c && xop <= 0x33) || xop == 0x1f || xop == 0x3d) {
5108                 TCGv cpu_val = gen_dest_gpr(dc, rd);
5109 
5110                 switch (xop) {
5111                 case 0x0:       /* ld, V9 lduw, load unsigned word */
5112                     gen_address_mask(dc, cpu_addr);
5113                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5114                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5115                     break;
5116                 case 0x1:       /* ldub, load unsigned byte */
5117                     gen_address_mask(dc, cpu_addr);
5118                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5119                                        dc->mem_idx, MO_UB);
5120                     break;
5121                 case 0x2:       /* lduh, load unsigned halfword */
5122                     gen_address_mask(dc, cpu_addr);
5123                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5124                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5125                     break;
5126                 case 0x3:       /* ldd, load double word */
5127                     if (rd & 1)
5128                         goto illegal_insn;
5129                     else {
5130                         TCGv_i64 t64;
5131 
5132                         gen_address_mask(dc, cpu_addr);
5133                         t64 = tcg_temp_new_i64();
5134                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5135                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5136                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5137                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5138                         gen_store_gpr(dc, rd + 1, cpu_val);
5139                         tcg_gen_shri_i64(t64, t64, 32);
5140                         tcg_gen_trunc_i64_tl(cpu_val, t64);
5141                         tcg_gen_ext32u_tl(cpu_val, cpu_val);
5142                     }
5143                     break;
5144                 case 0x9:       /* ldsb, load signed byte */
5145                     gen_address_mask(dc, cpu_addr);
5146                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr, dc->mem_idx, MO_SB);
5147                     break;
5148                 case 0xa:       /* ldsh, load signed halfword */
5149                     gen_address_mask(dc, cpu_addr);
5150                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5151                                        dc->mem_idx, MO_TESW | MO_ALIGN);
5152                     break;
5153                 case 0xd:       /* ldstub */
5154                     gen_ldstub(dc, cpu_val, cpu_addr, dc->mem_idx);
5155                     break;
5156                 case 0x0f:
5157                     /* swap, swap register with memory. Also atomically */
5158                     cpu_src1 = gen_load_gpr(dc, rd);
5159                     gen_swap(dc, cpu_val, cpu_src1, cpu_addr,
5160                              dc->mem_idx, MO_TEUL);
5161                     break;
5162 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5163                 case 0x10:      /* lda, V9 lduwa, load word alternate */
5164                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5165                     break;
5166                 case 0x11:      /* lduba, load unsigned byte alternate */
5167                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5168                     break;
5169                 case 0x12:      /* lduha, load unsigned halfword alternate */
5170                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5171                     break;
5172                 case 0x13:      /* ldda, load double word alternate */
5173                     if (rd & 1) {
5174                         goto illegal_insn;
5175                     }
5176                     gen_ldda_asi(dc, cpu_addr, insn, rd);
5177                     goto skip_move;
5178                 case 0x19:      /* ldsba, load signed byte alternate */
5179                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_SB);
5180                     break;
5181                 case 0x1a:      /* ldsha, load signed halfword alternate */
5182                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESW);
5183                     break;
5184                 case 0x1d:      /* ldstuba -- XXX: should be atomically */
5185                     gen_ldstub_asi(dc, cpu_val, cpu_addr, insn);
5186                     break;
5187                 case 0x1f:      /* swapa, swap reg with alt. memory. Also
5188                                    atomically */
5189                     cpu_src1 = gen_load_gpr(dc, rd);
5190                     gen_swap_asi(dc, cpu_val, cpu_src1, cpu_addr, insn);
5191                     break;
5192 
5193 #ifndef TARGET_SPARC64
5194                 case 0x30: /* ldc */
5195                 case 0x31: /* ldcsr */
5196                 case 0x33: /* lddc */
5197                     goto ncp_insn;
5198 #endif
5199 #endif
5200 #ifdef TARGET_SPARC64
5201                 case 0x08: /* V9 ldsw */
5202                     gen_address_mask(dc, cpu_addr);
5203                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5204                                        dc->mem_idx, MO_TESL | MO_ALIGN);
5205                     break;
5206                 case 0x0b: /* V9 ldx */
5207                     gen_address_mask(dc, cpu_addr);
5208                     tcg_gen_qemu_ld_tl(cpu_val, cpu_addr,
5209                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5210                     break;
5211                 case 0x18: /* V9 ldswa */
5212                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TESL);
5213                     break;
5214                 case 0x1b: /* V9 ldxa */
5215                     gen_ld_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5216                     break;
5217                 case 0x2d: /* V9 prefetch, no effect */
5218                     goto skip_move;
5219                 case 0x30: /* V9 ldfa */
5220                     if (gen_trap_ifnofpu(dc)) {
5221                         goto jmp_insn;
5222                     }
5223                     gen_ldf_asi(dc, cpu_addr, insn, 4, rd);
5224                     gen_update_fprs_dirty(dc, rd);
5225                     goto skip_move;
5226                 case 0x33: /* V9 lddfa */
5227                     if (gen_trap_ifnofpu(dc)) {
5228                         goto jmp_insn;
5229                     }
5230                     gen_ldf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5231                     gen_update_fprs_dirty(dc, DFPREG(rd));
5232                     goto skip_move;
5233                 case 0x3d: /* V9 prefetcha, no effect */
5234                     goto skip_move;
5235                 case 0x32: /* V9 ldqfa */
5236                     CHECK_FPU_FEATURE(dc, FLOAT128);
5237                     if (gen_trap_ifnofpu(dc)) {
5238                         goto jmp_insn;
5239                     }
5240                     gen_ldf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5241                     gen_update_fprs_dirty(dc, QFPREG(rd));
5242                     goto skip_move;
5243 #endif
5244                 default:
5245                     goto illegal_insn;
5246                 }
5247                 gen_store_gpr(dc, rd, cpu_val);
5248 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5249             skip_move: ;
5250 #endif
5251             } else if (xop >= 0x20 && xop < 0x24) {
5252                 if (gen_trap_ifnofpu(dc)) {
5253                     goto jmp_insn;
5254                 }
5255                 switch (xop) {
5256                 case 0x20:      /* ldf, load fpreg */
5257                     gen_address_mask(dc, cpu_addr);
5258                     cpu_dst_32 = gen_dest_fpr_F(dc);
5259                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5260                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5261                     gen_store_fpr_F(dc, rd, cpu_dst_32);
5262                     break;
5263                 case 0x21:      /* ldfsr, V9 ldxfsr */
5264 #ifdef TARGET_SPARC64
5265                     gen_address_mask(dc, cpu_addr);
5266                     if (rd == 1) {
5267                         TCGv_i64 t64 = tcg_temp_new_i64();
5268                         tcg_gen_qemu_ld_i64(t64, cpu_addr,
5269                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5270                         gen_helper_ldxfsr(cpu_fsr, tcg_env, cpu_fsr, t64);
5271                         break;
5272                     }
5273 #endif
5274                     cpu_dst_32 = tcg_temp_new_i32();
5275                     tcg_gen_qemu_ld_i32(cpu_dst_32, cpu_addr,
5276                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5277                     gen_helper_ldfsr(cpu_fsr, tcg_env, cpu_fsr, cpu_dst_32);
5278                     break;
5279                 case 0x22:      /* ldqf, load quad fpreg */
5280                     CHECK_FPU_FEATURE(dc, FLOAT128);
5281                     gen_address_mask(dc, cpu_addr);
5282                     cpu_src1_64 = tcg_temp_new_i64();
5283                     tcg_gen_qemu_ld_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5284                                         MO_TEUQ | MO_ALIGN_4);
5285                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5286                     cpu_src2_64 = tcg_temp_new_i64();
5287                     tcg_gen_qemu_ld_i64(cpu_src2_64, cpu_addr, dc->mem_idx,
5288                                         MO_TEUQ | MO_ALIGN_4);
5289                     gen_store_fpr_Q(dc, rd, cpu_src1_64, cpu_src2_64);
5290                     break;
5291                 case 0x23:      /* lddf, load double fpreg */
5292                     gen_address_mask(dc, cpu_addr);
5293                     cpu_dst_64 = gen_dest_fpr_D(dc, rd);
5294                     tcg_gen_qemu_ld_i64(cpu_dst_64, cpu_addr, dc->mem_idx,
5295                                         MO_TEUQ | MO_ALIGN_4);
5296                     gen_store_fpr_D(dc, rd, cpu_dst_64);
5297                     break;
5298                 default:
5299                     goto illegal_insn;
5300                 }
5301             } else if (xop < 8 || (xop >= 0x14 && xop < 0x18) ||
5302                        xop == 0xe || xop == 0x1e) {
5303                 TCGv cpu_val = gen_load_gpr(dc, rd);
5304 
5305                 switch (xop) {
5306                 case 0x4: /* st, store word */
5307                     gen_address_mask(dc, cpu_addr);
5308                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5309                                        dc->mem_idx, MO_TEUL | MO_ALIGN);
5310                     break;
5311                 case 0x5: /* stb, store byte */
5312                     gen_address_mask(dc, cpu_addr);
5313                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr, dc->mem_idx, MO_UB);
5314                     break;
5315                 case 0x6: /* sth, store halfword */
5316                     gen_address_mask(dc, cpu_addr);
5317                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5318                                        dc->mem_idx, MO_TEUW | MO_ALIGN);
5319                     break;
5320                 case 0x7: /* std, store double word */
5321                     if (rd & 1)
5322                         goto illegal_insn;
5323                     else {
5324                         TCGv_i64 t64;
5325                         TCGv lo;
5326 
5327                         gen_address_mask(dc, cpu_addr);
5328                         lo = gen_load_gpr(dc, rd + 1);
5329                         t64 = tcg_temp_new_i64();
5330                         tcg_gen_concat_tl_i64(t64, lo, cpu_val);
5331                         tcg_gen_qemu_st_i64(t64, cpu_addr,
5332                                             dc->mem_idx, MO_TEUQ | MO_ALIGN);
5333                     }
5334                     break;
5335 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5336                 case 0x14: /* sta, V9 stwa, store word alternate */
5337                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUL);
5338                     break;
5339                 case 0x15: /* stba, store byte alternate */
5340                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_UB);
5341                     break;
5342                 case 0x16: /* stha, store halfword alternate */
5343                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUW);
5344                     break;
5345                 case 0x17: /* stda, store double word alternate */
5346                     if (rd & 1) {
5347                         goto illegal_insn;
5348                     }
5349                     gen_stda_asi(dc, cpu_val, cpu_addr, insn, rd);
5350                     break;
5351 #endif
5352 #ifdef TARGET_SPARC64
5353                 case 0x0e: /* V9 stx */
5354                     gen_address_mask(dc, cpu_addr);
5355                     tcg_gen_qemu_st_tl(cpu_val, cpu_addr,
5356                                        dc->mem_idx, MO_TEUQ | MO_ALIGN);
5357                     break;
5358                 case 0x1e: /* V9 stxa */
5359                     gen_st_asi(dc, cpu_val, cpu_addr, insn, MO_TEUQ);
5360                     break;
5361 #endif
5362                 default:
5363                     goto illegal_insn;
5364                 }
5365             } else if (xop > 0x23 && xop < 0x28) {
5366                 if (gen_trap_ifnofpu(dc)) {
5367                     goto jmp_insn;
5368                 }
5369                 switch (xop) {
5370                 case 0x24: /* stf, store fpreg */
5371                     gen_address_mask(dc, cpu_addr);
5372                     cpu_src1_32 = gen_load_fpr_F(dc, rd);
5373                     tcg_gen_qemu_st_i32(cpu_src1_32, cpu_addr,
5374                                         dc->mem_idx, MO_TEUL | MO_ALIGN);
5375                     break;
5376                 case 0x25: /* stfsr, V9 stxfsr */
5377                     {
5378 #ifdef TARGET_SPARC64
5379                         gen_address_mask(dc, cpu_addr);
5380                         if (rd == 1) {
5381                             tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5382                                                dc->mem_idx, MO_TEUQ | MO_ALIGN);
5383                             break;
5384                         }
5385 #endif
5386                         tcg_gen_qemu_st_tl(cpu_fsr, cpu_addr,
5387                                            dc->mem_idx, MO_TEUL | MO_ALIGN);
5388                     }
5389                     break;
5390                 case 0x26:
5391 #ifdef TARGET_SPARC64
5392                     /* V9 stqf, store quad fpreg */
5393                     CHECK_FPU_FEATURE(dc, FLOAT128);
5394                     gen_address_mask(dc, cpu_addr);
5395                     /* ??? While stqf only requires 4-byte alignment, it is
5396                        legal for the cpu to signal the unaligned exception.
5397                        The OS trap handler is then required to fix it up.
5398                        For qemu, this avoids having to probe the second page
5399                        before performing the first write.  */
5400                     cpu_src1_64 = gen_load_fpr_Q0(dc, rd);
5401                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5402                                         dc->mem_idx, MO_TEUQ | MO_ALIGN_16);
5403                     tcg_gen_addi_tl(cpu_addr, cpu_addr, 8);
5404                     cpu_src2_64 = gen_load_fpr_Q1(dc, rd);
5405                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr,
5406                                         dc->mem_idx, MO_TEUQ);
5407                     break;
5408 #else /* !TARGET_SPARC64 */
5409                     /* stdfq, store floating point queue */
5410 #if defined(CONFIG_USER_ONLY)
5411                     goto illegal_insn;
5412 #else
5413                     if (!supervisor(dc))
5414                         goto priv_insn;
5415                     if (gen_trap_ifnofpu(dc)) {
5416                         goto jmp_insn;
5417                     }
5418                     goto nfq_insn;
5419 #endif
5420 #endif
5421                 case 0x27: /* stdf, store double fpreg */
5422                     gen_address_mask(dc, cpu_addr);
5423                     cpu_src1_64 = gen_load_fpr_D(dc, rd);
5424                     tcg_gen_qemu_st_i64(cpu_src1_64, cpu_addr, dc->mem_idx,
5425                                         MO_TEUQ | MO_ALIGN_4);
5426                     break;
5427                 default:
5428                     goto illegal_insn;
5429                 }
5430             } else if (xop > 0x33 && xop < 0x3f) {
5431                 switch (xop) {
5432 #ifdef TARGET_SPARC64
5433                 case 0x34: /* V9 stfa */
5434                     if (gen_trap_ifnofpu(dc)) {
5435                         goto jmp_insn;
5436                     }
5437                     gen_stf_asi(dc, cpu_addr, insn, 4, rd);
5438                     break;
5439                 case 0x36: /* V9 stqfa */
5440                     {
5441                         CHECK_FPU_FEATURE(dc, FLOAT128);
5442                         if (gen_trap_ifnofpu(dc)) {
5443                             goto jmp_insn;
5444                         }
5445                         gen_stf_asi(dc, cpu_addr, insn, 16, QFPREG(rd));
5446                     }
5447                     break;
5448                 case 0x37: /* V9 stdfa */
5449                     if (gen_trap_ifnofpu(dc)) {
5450                         goto jmp_insn;
5451                     }
5452                     gen_stf_asi(dc, cpu_addr, insn, 8, DFPREG(rd));
5453                     break;
5454                 case 0x3e: /* V9 casxa */
5455                     rs2 = GET_FIELD(insn, 27, 31);
5456                     cpu_src2 = gen_load_gpr(dc, rs2);
5457                     gen_casx_asi(dc, cpu_addr, cpu_src2, insn, rd);
5458                     break;
5459 #else
5460                 case 0x34: /* stc */
5461                 case 0x35: /* stcsr */
5462                 case 0x36: /* stdcq */
5463                 case 0x37: /* stdc */
5464                     goto ncp_insn;
5465 #endif
5466 #if !defined(CONFIG_USER_ONLY) || defined(TARGET_SPARC64)
5467                 case 0x3c: /* V9 or LEON3 casa */
5468 #ifndef TARGET_SPARC64
5469                     CHECK_IU_FEATURE(dc, CASA);
5470 #endif
5471                     rs2 = GET_FIELD(insn, 27, 31);
5472                     cpu_src2 = gen_load_gpr(dc, rs2);
5473                     gen_cas_asi(dc, cpu_addr, cpu_src2, insn, rd);
5474                     break;
5475 #endif
5476                 default:
5477                     goto illegal_insn;
5478                 }
5479             } else {
5480                 goto illegal_insn;
5481             }
5482         }
5483         break;
5484     }
5485     advance_pc(dc);
5486  jmp_insn:
5487     return;
5488  illegal_insn:
5489     gen_exception(dc, TT_ILL_INSN);
5490     return;
5491 #if !defined(CONFIG_USER_ONLY)
5492  priv_insn:
5493     gen_exception(dc, TT_PRIV_INSN);
5494     return;
5495 #endif
5496  nfpu_insn:
5497     gen_op_fpexception_im(dc, FSR_FTT_UNIMPFPOP);
5498     return;
5499 #if !defined(CONFIG_USER_ONLY) && !defined(TARGET_SPARC64)
5500  nfq_insn:
5501     gen_op_fpexception_im(dc, FSR_FTT_SEQ_ERROR);
5502     return;
5503 #endif
5504 #ifndef TARGET_SPARC64
5505  ncp_insn:
5506     gen_exception(dc, TT_NCP_INSN);
5507     return;
5508 #endif
5509 }
5510 
5511 static void sparc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
5512 {
5513     DisasContext *dc = container_of(dcbase, DisasContext, base);
5514     CPUSPARCState *env = cpu_env(cs);
5515     int bound;
5516 
5517     dc->pc = dc->base.pc_first;
5518     dc->npc = (target_ulong)dc->base.tb->cs_base;
5519     dc->cc_op = CC_OP_DYNAMIC;
5520     dc->mem_idx = dc->base.tb->flags & TB_FLAG_MMU_MASK;
5521     dc->def = &env->def;
5522     dc->fpu_enabled = tb_fpu_enabled(dc->base.tb->flags);
5523     dc->address_mask_32bit = tb_am_enabled(dc->base.tb->flags);
5524 #ifndef CONFIG_USER_ONLY
5525     dc->supervisor = (dc->base.tb->flags & TB_FLAG_SUPER) != 0;
5526 #endif
5527 #ifdef TARGET_SPARC64
5528     dc->fprs_dirty = 0;
5529     dc->asi = (dc->base.tb->flags >> TB_FLAG_ASI_SHIFT) & 0xff;
5530 #ifndef CONFIG_USER_ONLY
5531     dc->hypervisor = (dc->base.tb->flags & TB_FLAG_HYPER) != 0;
5532 #endif
5533 #endif
5534     /*
5535      * if we reach a page boundary, we stop generation so that the
5536      * PC of a TT_TFAULT exception is always in the right page
5537      */
5538     bound = -(dc->base.pc_first | TARGET_PAGE_MASK) / 4;
5539     dc->base.max_insns = MIN(dc->base.max_insns, bound);
5540 }
5541 
5542 static void sparc_tr_tb_start(DisasContextBase *db, CPUState *cs)
5543 {
5544 }
5545 
5546 static void sparc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
5547 {
5548     DisasContext *dc = container_of(dcbase, DisasContext, base);
5549     target_ulong npc = dc->npc;
5550 
5551     if (npc & 3) {
5552         switch (npc) {
5553         case JUMP_PC:
5554             assert(dc->jump_pc[1] == dc->pc + 4);
5555             npc = dc->jump_pc[0] | JUMP_PC;
5556             break;
5557         case DYNAMIC_PC:
5558         case DYNAMIC_PC_LOOKUP:
5559             npc = DYNAMIC_PC;
5560             break;
5561         default:
5562             g_assert_not_reached();
5563         }
5564     }
5565     tcg_gen_insn_start(dc->pc, npc);
5566 }
5567 
5568 static void sparc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
5569 {
5570     DisasContext *dc = container_of(dcbase, DisasContext, base);
5571     CPUSPARCState *env = cpu_env(cs);
5572     unsigned int insn;
5573 
5574     insn = translator_ldl(env, &dc->base, dc->pc);
5575     dc->base.pc_next += 4;
5576 
5577     if (!decode(dc, insn)) {
5578         disas_sparc_legacy(dc, insn);
5579     }
5580 
5581     if (dc->base.is_jmp == DISAS_NORETURN) {
5582         return;
5583     }
5584     if (dc->pc != dc->base.pc_next) {
5585         dc->base.is_jmp = DISAS_TOO_MANY;
5586     }
5587 }
5588 
5589 static void sparc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
5590 {
5591     DisasContext *dc = container_of(dcbase, DisasContext, base);
5592     DisasDelayException *e, *e_next;
5593     bool may_lookup;
5594 
5595     switch (dc->base.is_jmp) {
5596     case DISAS_NEXT:
5597     case DISAS_TOO_MANY:
5598         if (((dc->pc | dc->npc) & 3) == 0) {
5599             /* static PC and NPC: we can use direct chaining */
5600             gen_goto_tb(dc, 0, dc->pc, dc->npc);
5601             break;
5602         }
5603 
5604         may_lookup = true;
5605         if (dc->pc & 3) {
5606             switch (dc->pc) {
5607             case DYNAMIC_PC_LOOKUP:
5608                 break;
5609             case DYNAMIC_PC:
5610                 may_lookup = false;
5611                 break;
5612             default:
5613                 g_assert_not_reached();
5614             }
5615         } else {
5616             tcg_gen_movi_tl(cpu_pc, dc->pc);
5617         }
5618 
5619         if (dc->npc & 3) {
5620             switch (dc->npc) {
5621             case JUMP_PC:
5622                 gen_generic_branch(dc);
5623                 break;
5624             case DYNAMIC_PC:
5625                 may_lookup = false;
5626                 break;
5627             case DYNAMIC_PC_LOOKUP:
5628                 break;
5629             default:
5630                 g_assert_not_reached();
5631             }
5632         } else {
5633             tcg_gen_movi_tl(cpu_npc, dc->npc);
5634         }
5635         if (may_lookup) {
5636             tcg_gen_lookup_and_goto_ptr();
5637         } else {
5638             tcg_gen_exit_tb(NULL, 0);
5639         }
5640         break;
5641 
5642     case DISAS_NORETURN:
5643        break;
5644 
5645     case DISAS_EXIT:
5646         /* Exit TB */
5647         save_state(dc);
5648         tcg_gen_exit_tb(NULL, 0);
5649         break;
5650 
5651     default:
5652         g_assert_not_reached();
5653     }
5654 
5655     for (e = dc->delay_excp_list; e ; e = e_next) {
5656         gen_set_label(e->lab);
5657 
5658         tcg_gen_movi_tl(cpu_pc, e->pc);
5659         if (e->npc % 4 == 0) {
5660             tcg_gen_movi_tl(cpu_npc, e->npc);
5661         }
5662         gen_helper_raise_exception(tcg_env, e->excp);
5663 
5664         e_next = e->next;
5665         g_free(e);
5666     }
5667 }
5668 
5669 static void sparc_tr_disas_log(const DisasContextBase *dcbase,
5670                                CPUState *cpu, FILE *logfile)
5671 {
5672     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
5673     target_disas(logfile, cpu, dcbase->pc_first, dcbase->tb->size);
5674 }
5675 
5676 static const TranslatorOps sparc_tr_ops = {
5677     .init_disas_context = sparc_tr_init_disas_context,
5678     .tb_start           = sparc_tr_tb_start,
5679     .insn_start         = sparc_tr_insn_start,
5680     .translate_insn     = sparc_tr_translate_insn,
5681     .tb_stop            = sparc_tr_tb_stop,
5682     .disas_log          = sparc_tr_disas_log,
5683 };
5684 
5685 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
5686                            target_ulong pc, void *host_pc)
5687 {
5688     DisasContext dc = {};
5689 
5690     translator_loop(cs, tb, max_insns, pc, host_pc, &sparc_tr_ops, &dc.base);
5691 }
5692 
5693 void sparc_tcg_init(void)
5694 {
5695     static const char gregnames[32][4] = {
5696         "g0", "g1", "g2", "g3", "g4", "g5", "g6", "g7",
5697         "o0", "o1", "o2", "o3", "o4", "o5", "o6", "o7",
5698         "l0", "l1", "l2", "l3", "l4", "l5", "l6", "l7",
5699         "i0", "i1", "i2", "i3", "i4", "i5", "i6", "i7",
5700     };
5701     static const char fregnames[32][4] = {
5702         "f0", "f2", "f4", "f6", "f8", "f10", "f12", "f14",
5703         "f16", "f18", "f20", "f22", "f24", "f26", "f28", "f30",
5704         "f32", "f34", "f36", "f38", "f40", "f42", "f44", "f46",
5705         "f48", "f50", "f52", "f54", "f56", "f58", "f60", "f62",
5706     };
5707 
5708     static const struct { TCGv_i32 *ptr; int off; const char *name; } r32[] = {
5709 #ifdef TARGET_SPARC64
5710         { &cpu_xcc, offsetof(CPUSPARCState, xcc), "xcc" },
5711         { &cpu_fprs, offsetof(CPUSPARCState, fprs), "fprs" },
5712 #else
5713         { &cpu_wim, offsetof(CPUSPARCState, wim), "wim" },
5714 #endif
5715         { &cpu_cc_op, offsetof(CPUSPARCState, cc_op), "cc_op" },
5716         { &cpu_psr, offsetof(CPUSPARCState, psr), "psr" },
5717     };
5718 
5719     static const struct { TCGv *ptr; int off; const char *name; } rtl[] = {
5720 #ifdef TARGET_SPARC64
5721         { &cpu_gsr, offsetof(CPUSPARCState, gsr), "gsr" },
5722         { &cpu_tick_cmpr, offsetof(CPUSPARCState, tick_cmpr), "tick_cmpr" },
5723         { &cpu_stick_cmpr, offsetof(CPUSPARCState, stick_cmpr), "stick_cmpr" },
5724         { &cpu_hstick_cmpr, offsetof(CPUSPARCState, hstick_cmpr),
5725           "hstick_cmpr" },
5726         { &cpu_hintp, offsetof(CPUSPARCState, hintp), "hintp" },
5727         { &cpu_htba, offsetof(CPUSPARCState, htba), "htba" },
5728         { &cpu_hver, offsetof(CPUSPARCState, hver), "hver" },
5729         { &cpu_ssr, offsetof(CPUSPARCState, ssr), "ssr" },
5730         { &cpu_ver, offsetof(CPUSPARCState, version), "ver" },
5731 #endif
5732         { &cpu_cond, offsetof(CPUSPARCState, cond), "cond" },
5733         { &cpu_cc_src, offsetof(CPUSPARCState, cc_src), "cc_src" },
5734         { &cpu_cc_src2, offsetof(CPUSPARCState, cc_src2), "cc_src2" },
5735         { &cpu_cc_dst, offsetof(CPUSPARCState, cc_dst), "cc_dst" },
5736         { &cpu_fsr, offsetof(CPUSPARCState, fsr), "fsr" },
5737         { &cpu_pc, offsetof(CPUSPARCState, pc), "pc" },
5738         { &cpu_npc, offsetof(CPUSPARCState, npc), "npc" },
5739         { &cpu_y, offsetof(CPUSPARCState, y), "y" },
5740 #ifndef CONFIG_USER_ONLY
5741         { &cpu_tbr, offsetof(CPUSPARCState, tbr), "tbr" },
5742 #endif
5743     };
5744 
5745     unsigned int i;
5746 
5747     cpu_regwptr = tcg_global_mem_new_ptr(tcg_env,
5748                                          offsetof(CPUSPARCState, regwptr),
5749                                          "regwptr");
5750 
5751     for (i = 0; i < ARRAY_SIZE(r32); ++i) {
5752         *r32[i].ptr = tcg_global_mem_new_i32(tcg_env, r32[i].off, r32[i].name);
5753     }
5754 
5755     for (i = 0; i < ARRAY_SIZE(rtl); ++i) {
5756         *rtl[i].ptr = tcg_global_mem_new(tcg_env, rtl[i].off, rtl[i].name);
5757     }
5758 
5759     cpu_regs[0] = NULL;
5760     for (i = 1; i < 8; ++i) {
5761         cpu_regs[i] = tcg_global_mem_new(tcg_env,
5762                                          offsetof(CPUSPARCState, gregs[i]),
5763                                          gregnames[i]);
5764     }
5765 
5766     for (i = 8; i < 32; ++i) {
5767         cpu_regs[i] = tcg_global_mem_new(cpu_regwptr,
5768                                          (i - 8) * sizeof(target_ulong),
5769                                          gregnames[i]);
5770     }
5771 
5772     for (i = 0; i < TARGET_DPREGS; i++) {
5773         cpu_fpr[i] = tcg_global_mem_new_i64(tcg_env,
5774                                             offsetof(CPUSPARCState, fpr[i]),
5775                                             fregnames[i]);
5776     }
5777 }
5778 
5779 void sparc_restore_state_to_opc(CPUState *cs,
5780                                 const TranslationBlock *tb,
5781                                 const uint64_t *data)
5782 {
5783     SPARCCPU *cpu = SPARC_CPU(cs);
5784     CPUSPARCState *env = &cpu->env;
5785     target_ulong pc = data[0];
5786     target_ulong npc = data[1];
5787 
5788     env->pc = pc;
5789     if (npc == DYNAMIC_PC) {
5790         /* dynamic NPC: already stored */
5791     } else if (npc & JUMP_PC) {
5792         /* jump PC: use 'cond' and the jump targets of the translation */
5793         if (env->cond) {
5794             env->npc = npc & ~3;
5795         } else {
5796             env->npc = pc + 4;
5797         }
5798     } else {
5799         env->npc = npc;
5800     }
5801 }
5802