xref: /openbmc/qemu/target/alpha/translate.c (revision 8417e137)
1 /*
2  *  Alpha emulation cpu translation for qemu.
3  *
4  *  Copyright (c) 2007 Jocelyn Mayer
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18  */
19 
20 #include "qemu/osdep.h"
21 #include "cpu.h"
22 #include "sysemu/cpus.h"
23 #include "sysemu/cpu-timers.h"
24 #include "disas/disas.h"
25 #include "qemu/host-utils.h"
26 #include "exec/exec-all.h"
27 #include "tcg/tcg-op.h"
28 #include "exec/cpu_ldst.h"
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 #include "exec/translator.h"
32 #include "exec/log.h"
33 
34 
35 #undef ALPHA_DEBUG_DISAS
36 #define CONFIG_SOFTFLOAT_INLINE
37 
38 #ifdef ALPHA_DEBUG_DISAS
39 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
40 #else
41 #  define LOG_DISAS(...) do { } while (0)
42 #endif
43 
44 typedef struct DisasContext DisasContext;
45 struct DisasContext {
46     DisasContextBase base;
47 
48 #ifndef CONFIG_USER_ONLY
49     uint64_t palbr;
50 #endif
51     uint32_t tbflags;
52     int mem_idx;
53 
54     /* implver and amask values for this CPU.  */
55     int implver;
56     int amask;
57 
58     /* Current rounding mode for this TB.  */
59     int tb_rm;
60     /* Current flush-to-zero setting for this TB.  */
61     int tb_ftz;
62 
63     /* The set of registers active in the current context.  */
64     TCGv *ir;
65 
66     /* Temporaries for $31 and $f31 as source and destination.  */
67     TCGv zero;
68     TCGv sink;
69     /* Temporary for immediate constants.  */
70     TCGv lit;
71 };
72 
73 /* Target-specific return values from translate_one, indicating the
74    state of the TB.  Note that DISAS_NEXT indicates that we are not
75    exiting the TB.  */
76 #define DISAS_PC_UPDATED_NOCHAIN  DISAS_TARGET_0
77 #define DISAS_PC_UPDATED          DISAS_TARGET_1
78 #define DISAS_PC_STALE            DISAS_TARGET_2
79 
80 /* global register indexes */
81 static TCGv cpu_std_ir[31];
82 static TCGv cpu_fir[31];
83 static TCGv cpu_pc;
84 static TCGv cpu_lock_addr;
85 static TCGv cpu_lock_value;
86 
87 #ifndef CONFIG_USER_ONLY
88 static TCGv cpu_pal_ir[31];
89 #endif
90 
91 #include "exec/gen-icount.h"
92 
93 void alpha_translate_init(void)
94 {
95 #define DEF_VAR(V)  { &cpu_##V, #V, offsetof(CPUAlphaState, V) }
96 
97     typedef struct { TCGv *var; const char *name; int ofs; } GlobalVar;
98     static const GlobalVar vars[] = {
99         DEF_VAR(pc),
100         DEF_VAR(lock_addr),
101         DEF_VAR(lock_value),
102     };
103 
104 #undef DEF_VAR
105 
106     /* Use the symbolic register names that match the disassembler.  */
107     static const char greg_names[31][4] = {
108         "v0", "t0", "t1", "t2", "t3", "t4", "t5", "t6",
109         "t7", "s0", "s1", "s2", "s3", "s4", "s5", "fp",
110         "a0", "a1", "a2", "a3", "a4", "a5", "t8", "t9",
111         "t10", "t11", "ra", "t12", "at", "gp", "sp"
112     };
113     static const char freg_names[31][4] = {
114         "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
115         "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
116         "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
117         "f24", "f25", "f26", "f27", "f28", "f29", "f30"
118     };
119 #ifndef CONFIG_USER_ONLY
120     static const char shadow_names[8][8] = {
121         "pal_t7", "pal_s0", "pal_s1", "pal_s2",
122         "pal_s3", "pal_s4", "pal_s5", "pal_t11"
123     };
124 #endif
125 
126     int i;
127 
128     for (i = 0; i < 31; i++) {
129         cpu_std_ir[i] = tcg_global_mem_new_i64(cpu_env,
130                                                offsetof(CPUAlphaState, ir[i]),
131                                                greg_names[i]);
132     }
133 
134     for (i = 0; i < 31; i++) {
135         cpu_fir[i] = tcg_global_mem_new_i64(cpu_env,
136                                             offsetof(CPUAlphaState, fir[i]),
137                                             freg_names[i]);
138     }
139 
140 #ifndef CONFIG_USER_ONLY
141     memcpy(cpu_pal_ir, cpu_std_ir, sizeof(cpu_pal_ir));
142     for (i = 0; i < 8; i++) {
143         int r = (i == 7 ? 25 : i + 8);
144         cpu_pal_ir[r] = tcg_global_mem_new_i64(cpu_env,
145                                                offsetof(CPUAlphaState,
146                                                         shadow[i]),
147                                                shadow_names[i]);
148     }
149 #endif
150 
151     for (i = 0; i < ARRAY_SIZE(vars); ++i) {
152         const GlobalVar *v = &vars[i];
153         *v->var = tcg_global_mem_new_i64(cpu_env, v->ofs, v->name);
154     }
155 }
156 
157 static TCGv load_zero(DisasContext *ctx)
158 {
159     if (!ctx->zero) {
160         ctx->zero = tcg_const_i64(0);
161     }
162     return ctx->zero;
163 }
164 
165 static TCGv dest_sink(DisasContext *ctx)
166 {
167     if (!ctx->sink) {
168         ctx->sink = tcg_temp_new();
169     }
170     return ctx->sink;
171 }
172 
173 static void free_context_temps(DisasContext *ctx)
174 {
175     if (ctx->sink) {
176         tcg_gen_discard_i64(ctx->sink);
177         tcg_temp_free(ctx->sink);
178         ctx->sink = NULL;
179     }
180     if (ctx->zero) {
181         tcg_temp_free(ctx->zero);
182         ctx->zero = NULL;
183     }
184     if (ctx->lit) {
185         tcg_temp_free(ctx->lit);
186         ctx->lit = NULL;
187     }
188 }
189 
190 static TCGv load_gpr(DisasContext *ctx, unsigned reg)
191 {
192     if (likely(reg < 31)) {
193         return ctx->ir[reg];
194     } else {
195         return load_zero(ctx);
196     }
197 }
198 
199 static TCGv load_gpr_lit(DisasContext *ctx, unsigned reg,
200                          uint8_t lit, bool islit)
201 {
202     if (islit) {
203         ctx->lit = tcg_const_i64(lit);
204         return ctx->lit;
205     } else if (likely(reg < 31)) {
206         return ctx->ir[reg];
207     } else {
208         return load_zero(ctx);
209     }
210 }
211 
212 static TCGv dest_gpr(DisasContext *ctx, unsigned reg)
213 {
214     if (likely(reg < 31)) {
215         return ctx->ir[reg];
216     } else {
217         return dest_sink(ctx);
218     }
219 }
220 
221 static TCGv load_fpr(DisasContext *ctx, unsigned reg)
222 {
223     if (likely(reg < 31)) {
224         return cpu_fir[reg];
225     } else {
226         return load_zero(ctx);
227     }
228 }
229 
230 static TCGv dest_fpr(DisasContext *ctx, unsigned reg)
231 {
232     if (likely(reg < 31)) {
233         return cpu_fir[reg];
234     } else {
235         return dest_sink(ctx);
236     }
237 }
238 
239 static int get_flag_ofs(unsigned shift)
240 {
241     int ofs = offsetof(CPUAlphaState, flags);
242 #ifdef HOST_WORDS_BIGENDIAN
243     ofs += 3 - (shift / 8);
244 #else
245     ofs += shift / 8;
246 #endif
247     return ofs;
248 }
249 
250 static void ld_flag_byte(TCGv val, unsigned shift)
251 {
252     tcg_gen_ld8u_i64(val, cpu_env, get_flag_ofs(shift));
253 }
254 
255 static void st_flag_byte(TCGv val, unsigned shift)
256 {
257     tcg_gen_st8_i64(val, cpu_env, get_flag_ofs(shift));
258 }
259 
260 static void gen_excp_1(int exception, int error_code)
261 {
262     TCGv_i32 tmp1, tmp2;
263 
264     tmp1 = tcg_const_i32(exception);
265     tmp2 = tcg_const_i32(error_code);
266     gen_helper_excp(cpu_env, tmp1, tmp2);
267     tcg_temp_free_i32(tmp2);
268     tcg_temp_free_i32(tmp1);
269 }
270 
271 static DisasJumpType gen_excp(DisasContext *ctx, int exception, int error_code)
272 {
273     tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
274     gen_excp_1(exception, error_code);
275     return DISAS_NORETURN;
276 }
277 
278 static inline DisasJumpType gen_invalid(DisasContext *ctx)
279 {
280     return gen_excp(ctx, EXCP_OPCDEC, 0);
281 }
282 
283 static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
284 {
285     TCGv_i32 tmp32 = tcg_temp_new_i32();
286     tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
287     gen_helper_memory_to_f(t0, tmp32);
288     tcg_temp_free_i32(tmp32);
289 }
290 
291 static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
292 {
293     TCGv tmp = tcg_temp_new();
294     tcg_gen_qemu_ld_i64(tmp, t1, flags, MO_LEQ);
295     gen_helper_memory_to_g(t0, tmp);
296     tcg_temp_free(tmp);
297 }
298 
299 static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
300 {
301     TCGv_i32 tmp32 = tcg_temp_new_i32();
302     tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
303     gen_helper_memory_to_s(t0, tmp32);
304     tcg_temp_free_i32(tmp32);
305 }
306 
307 static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
308 {
309     tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LESL);
310     tcg_gen_mov_i64(cpu_lock_addr, t1);
311     tcg_gen_mov_i64(cpu_lock_value, t0);
312 }
313 
314 static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
315 {
316     tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LEQ);
317     tcg_gen_mov_i64(cpu_lock_addr, t1);
318     tcg_gen_mov_i64(cpu_lock_value, t0);
319 }
320 
321 static inline void gen_load_mem(DisasContext *ctx,
322                                 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
323                                                           int flags),
324                                 int ra, int rb, int32_t disp16, bool fp,
325                                 bool clear)
326 {
327     TCGv tmp, addr, va;
328 
329     /* LDQ_U with ra $31 is UNOP.  Other various loads are forms of
330        prefetches, which we can treat as nops.  No worries about
331        missed exceptions here.  */
332     if (unlikely(ra == 31)) {
333         return;
334     }
335 
336     tmp = tcg_temp_new();
337     addr = load_gpr(ctx, rb);
338 
339     if (disp16) {
340         tcg_gen_addi_i64(tmp, addr, disp16);
341         addr = tmp;
342     }
343     if (clear) {
344         tcg_gen_andi_i64(tmp, addr, ~0x7);
345         addr = tmp;
346     }
347 
348     va = (fp ? cpu_fir[ra] : ctx->ir[ra]);
349     tcg_gen_qemu_load(va, addr, ctx->mem_idx);
350 
351     tcg_temp_free(tmp);
352 }
353 
354 static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
355 {
356     TCGv_i32 tmp32 = tcg_temp_new_i32();
357     gen_helper_f_to_memory(tmp32, t0);
358     tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
359     tcg_temp_free_i32(tmp32);
360 }
361 
362 static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
363 {
364     TCGv tmp = tcg_temp_new();
365     gen_helper_g_to_memory(tmp, t0);
366     tcg_gen_qemu_st_i64(tmp, t1, flags, MO_LEQ);
367     tcg_temp_free(tmp);
368 }
369 
370 static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
371 {
372     TCGv_i32 tmp32 = tcg_temp_new_i32();
373     gen_helper_s_to_memory(tmp32, t0);
374     tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
375     tcg_temp_free_i32(tmp32);
376 }
377 
378 static inline void gen_store_mem(DisasContext *ctx,
379                                  void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
380                                                             int flags),
381                                  int ra, int rb, int32_t disp16, bool fp,
382                                  bool clear)
383 {
384     TCGv tmp, addr, va;
385 
386     tmp = tcg_temp_new();
387     addr = load_gpr(ctx, rb);
388 
389     if (disp16) {
390         tcg_gen_addi_i64(tmp, addr, disp16);
391         addr = tmp;
392     }
393     if (clear) {
394         tcg_gen_andi_i64(tmp, addr, ~0x7);
395         addr = tmp;
396     }
397 
398     va = (fp ? load_fpr(ctx, ra) : load_gpr(ctx, ra));
399     tcg_gen_qemu_store(va, addr, ctx->mem_idx);
400 
401     tcg_temp_free(tmp);
402 }
403 
404 static DisasJumpType gen_store_conditional(DisasContext *ctx, int ra, int rb,
405                                            int32_t disp16, int mem_idx,
406                                            MemOp op)
407 {
408     TCGLabel *lab_fail, *lab_done;
409     TCGv addr, val;
410 
411     addr = tcg_temp_new_i64();
412     tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
413     free_context_temps(ctx);
414 
415     lab_fail = gen_new_label();
416     lab_done = gen_new_label();
417     tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
418     tcg_temp_free_i64(addr);
419 
420     val = tcg_temp_new_i64();
421     tcg_gen_atomic_cmpxchg_i64(val, cpu_lock_addr, cpu_lock_value,
422                                load_gpr(ctx, ra), mem_idx, op);
423     free_context_temps(ctx);
424 
425     if (ra != 31) {
426         tcg_gen_setcond_i64(TCG_COND_EQ, ctx->ir[ra], val, cpu_lock_value);
427     }
428     tcg_temp_free_i64(val);
429     tcg_gen_br(lab_done);
430 
431     gen_set_label(lab_fail);
432     if (ra != 31) {
433         tcg_gen_movi_i64(ctx->ir[ra], 0);
434     }
435 
436     gen_set_label(lab_done);
437     tcg_gen_movi_i64(cpu_lock_addr, -1);
438     return DISAS_NEXT;
439 }
440 
441 static bool use_goto_tb(DisasContext *ctx, uint64_t dest)
442 {
443     return translator_use_goto_tb(&ctx->base, dest);
444 }
445 
446 static DisasJumpType gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
447 {
448     uint64_t dest = ctx->base.pc_next + (disp << 2);
449 
450     if (ra != 31) {
451         tcg_gen_movi_i64(ctx->ir[ra], ctx->base.pc_next);
452     }
453 
454     /* Notice branch-to-next; used to initialize RA with the PC.  */
455     if (disp == 0) {
456         return 0;
457     } else if (use_goto_tb(ctx, dest)) {
458         tcg_gen_goto_tb(0);
459         tcg_gen_movi_i64(cpu_pc, dest);
460         tcg_gen_exit_tb(ctx->base.tb, 0);
461         return DISAS_NORETURN;
462     } else {
463         tcg_gen_movi_i64(cpu_pc, dest);
464         return DISAS_PC_UPDATED;
465     }
466 }
467 
468 static DisasJumpType gen_bcond_internal(DisasContext *ctx, TCGCond cond,
469                                         TCGv cmp, int32_t disp)
470 {
471     uint64_t dest = ctx->base.pc_next + (disp << 2);
472     TCGLabel *lab_true = gen_new_label();
473 
474     if (use_goto_tb(ctx, dest)) {
475         tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
476 
477         tcg_gen_goto_tb(0);
478         tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
479         tcg_gen_exit_tb(ctx->base.tb, 0);
480 
481         gen_set_label(lab_true);
482         tcg_gen_goto_tb(1);
483         tcg_gen_movi_i64(cpu_pc, dest);
484         tcg_gen_exit_tb(ctx->base.tb, 1);
485 
486         return DISAS_NORETURN;
487     } else {
488         TCGv_i64 z = tcg_const_i64(0);
489         TCGv_i64 d = tcg_const_i64(dest);
490         TCGv_i64 p = tcg_const_i64(ctx->base.pc_next);
491 
492         tcg_gen_movcond_i64(cond, cpu_pc, cmp, z, d, p);
493 
494         tcg_temp_free_i64(z);
495         tcg_temp_free_i64(d);
496         tcg_temp_free_i64(p);
497         return DISAS_PC_UPDATED;
498     }
499 }
500 
501 static DisasJumpType gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
502                                int32_t disp, int mask)
503 {
504     if (mask) {
505         TCGv tmp = tcg_temp_new();
506         DisasJumpType ret;
507 
508         tcg_gen_andi_i64(tmp, load_gpr(ctx, ra), 1);
509         ret = gen_bcond_internal(ctx, cond, tmp, disp);
510         tcg_temp_free(tmp);
511         return ret;
512     }
513     return gen_bcond_internal(ctx, cond, load_gpr(ctx, ra), disp);
514 }
515 
516 /* Fold -0.0 for comparison with COND.  */
517 
518 static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
519 {
520     uint64_t mzero = 1ull << 63;
521 
522     switch (cond) {
523     case TCG_COND_LE:
524     case TCG_COND_GT:
525         /* For <= or >, the -0.0 value directly compares the way we want.  */
526         tcg_gen_mov_i64(dest, src);
527         break;
528 
529     case TCG_COND_EQ:
530     case TCG_COND_NE:
531         /* For == or !=, we can simply mask off the sign bit and compare.  */
532         tcg_gen_andi_i64(dest, src, mzero - 1);
533         break;
534 
535     case TCG_COND_GE:
536     case TCG_COND_LT:
537         /* For >= or <, map -0.0 to +0.0 via comparison and mask.  */
538         tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
539         tcg_gen_neg_i64(dest, dest);
540         tcg_gen_and_i64(dest, dest, src);
541         break;
542 
543     default:
544         abort();
545     }
546 }
547 
548 static DisasJumpType gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
549                                 int32_t disp)
550 {
551     TCGv cmp_tmp = tcg_temp_new();
552     DisasJumpType ret;
553 
554     gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
555     ret = gen_bcond_internal(ctx, cond, cmp_tmp, disp);
556     tcg_temp_free(cmp_tmp);
557     return ret;
558 }
559 
560 static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
561 {
562     TCGv_i64 va, vb, z;
563 
564     z = load_zero(ctx);
565     vb = load_fpr(ctx, rb);
566     va = tcg_temp_new();
567     gen_fold_mzero(cond, va, load_fpr(ctx, ra));
568 
569     tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
570 
571     tcg_temp_free(va);
572 }
573 
574 #define QUAL_RM_N       0x080   /* Round mode nearest even */
575 #define QUAL_RM_C       0x000   /* Round mode chopped */
576 #define QUAL_RM_M       0x040   /* Round mode minus infinity */
577 #define QUAL_RM_D       0x0c0   /* Round mode dynamic */
578 #define QUAL_RM_MASK    0x0c0
579 
580 #define QUAL_U          0x100   /* Underflow enable (fp output) */
581 #define QUAL_V          0x100   /* Overflow enable (int output) */
582 #define QUAL_S          0x400   /* Software completion enable */
583 #define QUAL_I          0x200   /* Inexact detection enable */
584 
585 static void gen_qual_roundmode(DisasContext *ctx, int fn11)
586 {
587     TCGv_i32 tmp;
588 
589     fn11 &= QUAL_RM_MASK;
590     if (fn11 == ctx->tb_rm) {
591         return;
592     }
593     ctx->tb_rm = fn11;
594 
595     tmp = tcg_temp_new_i32();
596     switch (fn11) {
597     case QUAL_RM_N:
598         tcg_gen_movi_i32(tmp, float_round_nearest_even);
599         break;
600     case QUAL_RM_C:
601         tcg_gen_movi_i32(tmp, float_round_to_zero);
602         break;
603     case QUAL_RM_M:
604         tcg_gen_movi_i32(tmp, float_round_down);
605         break;
606     case QUAL_RM_D:
607         tcg_gen_ld8u_i32(tmp, cpu_env,
608                          offsetof(CPUAlphaState, fpcr_dyn_round));
609         break;
610     }
611 
612 #if defined(CONFIG_SOFTFLOAT_INLINE)
613     /* ??? The "fpu/softfloat.h" interface is to call set_float_rounding_mode.
614        With CONFIG_SOFTFLOAT that expands to an out-of-line call that just
615        sets the one field.  */
616     tcg_gen_st8_i32(tmp, cpu_env,
617                     offsetof(CPUAlphaState, fp_status.float_rounding_mode));
618 #else
619     gen_helper_setroundmode(tmp);
620 #endif
621 
622     tcg_temp_free_i32(tmp);
623 }
624 
625 static void gen_qual_flushzero(DisasContext *ctx, int fn11)
626 {
627     TCGv_i32 tmp;
628 
629     fn11 &= QUAL_U;
630     if (fn11 == ctx->tb_ftz) {
631         return;
632     }
633     ctx->tb_ftz = fn11;
634 
635     tmp = tcg_temp_new_i32();
636     if (fn11) {
637         /* Underflow is enabled, use the FPCR setting.  */
638         tcg_gen_ld8u_i32(tmp, cpu_env,
639                          offsetof(CPUAlphaState, fpcr_flush_to_zero));
640     } else {
641         /* Underflow is disabled, force flush-to-zero.  */
642         tcg_gen_movi_i32(tmp, 1);
643     }
644 
645 #if defined(CONFIG_SOFTFLOAT_INLINE)
646     tcg_gen_st8_i32(tmp, cpu_env,
647                     offsetof(CPUAlphaState, fp_status.flush_to_zero));
648 #else
649     gen_helper_setflushzero(tmp);
650 #endif
651 
652     tcg_temp_free_i32(tmp);
653 }
654 
655 static TCGv gen_ieee_input(DisasContext *ctx, int reg, int fn11, int is_cmp)
656 {
657     TCGv val;
658 
659     if (unlikely(reg == 31)) {
660         val = load_zero(ctx);
661     } else {
662         val = cpu_fir[reg];
663         if ((fn11 & QUAL_S) == 0) {
664             if (is_cmp) {
665                 gen_helper_ieee_input_cmp(cpu_env, val);
666             } else {
667                 gen_helper_ieee_input(cpu_env, val);
668             }
669         } else {
670 #ifndef CONFIG_USER_ONLY
671             /* In system mode, raise exceptions for denormals like real
672                hardware.  In user mode, proceed as if the OS completion
673                handler is handling the denormal as per spec.  */
674             gen_helper_ieee_input_s(cpu_env, val);
675 #endif
676         }
677     }
678     return val;
679 }
680 
681 static void gen_fp_exc_raise(int rc, int fn11)
682 {
683     /* ??? We ought to be able to do something with imprecise exceptions.
684        E.g. notice we're still in the trap shadow of something within the
685        TB and do not generate the code to signal the exception; end the TB
686        when an exception is forced to arrive, either by consumption of a
687        register value or TRAPB or EXCB.  */
688     TCGv_i32 reg, ign;
689     uint32_t ignore = 0;
690 
691     if (!(fn11 & QUAL_U)) {
692         /* Note that QUAL_U == QUAL_V, so ignore either.  */
693         ignore |= FPCR_UNF | FPCR_IOV;
694     }
695     if (!(fn11 & QUAL_I)) {
696         ignore |= FPCR_INE;
697     }
698     ign = tcg_const_i32(ignore);
699 
700     /* ??? Pass in the regno of the destination so that the helper can
701        set EXC_MASK, which contains a bitmask of destination registers
702        that have caused arithmetic traps.  A simple userspace emulation
703        does not require this.  We do need it for a guest kernel's entArith,
704        or if we were to do something clever with imprecise exceptions.  */
705     reg = tcg_const_i32(rc + 32);
706     if (fn11 & QUAL_S) {
707         gen_helper_fp_exc_raise_s(cpu_env, ign, reg);
708     } else {
709         gen_helper_fp_exc_raise(cpu_env, ign, reg);
710     }
711 
712     tcg_temp_free_i32(reg);
713     tcg_temp_free_i32(ign);
714 }
715 
716 static void gen_cvtlq(TCGv vc, TCGv vb)
717 {
718     TCGv tmp = tcg_temp_new();
719 
720     /* The arithmetic right shift here, plus the sign-extended mask below
721        yields a sign-extended result without an explicit ext32s_i64.  */
722     tcg_gen_shri_i64(tmp, vb, 29);
723     tcg_gen_sari_i64(vc, vb, 32);
724     tcg_gen_deposit_i64(vc, vc, tmp, 0, 30);
725 
726     tcg_temp_free(tmp);
727 }
728 
729 static void gen_ieee_arith2(DisasContext *ctx,
730                             void (*helper)(TCGv, TCGv_ptr, TCGv),
731                             int rb, int rc, int fn11)
732 {
733     TCGv vb;
734 
735     gen_qual_roundmode(ctx, fn11);
736     gen_qual_flushzero(ctx, fn11);
737 
738     vb = gen_ieee_input(ctx, rb, fn11, 0);
739     helper(dest_fpr(ctx, rc), cpu_env, vb);
740 
741     gen_fp_exc_raise(rc, fn11);
742 }
743 
744 #define IEEE_ARITH2(name)                                       \
745 static inline void glue(gen_, name)(DisasContext *ctx,          \
746                                     int rb, int rc, int fn11)   \
747 {                                                               \
748     gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11);      \
749 }
750 IEEE_ARITH2(sqrts)
751 IEEE_ARITH2(sqrtt)
752 IEEE_ARITH2(cvtst)
753 IEEE_ARITH2(cvtts)
754 
755 static void gen_cvttq(DisasContext *ctx, int rb, int rc, int fn11)
756 {
757     TCGv vb, vc;
758 
759     /* No need to set flushzero, since we have an integer output.  */
760     vb = gen_ieee_input(ctx, rb, fn11, 0);
761     vc = dest_fpr(ctx, rc);
762 
763     /* Almost all integer conversions use cropped rounding;
764        special case that.  */
765     if ((fn11 & QUAL_RM_MASK) == QUAL_RM_C) {
766         gen_helper_cvttq_c(vc, cpu_env, vb);
767     } else {
768         gen_qual_roundmode(ctx, fn11);
769         gen_helper_cvttq(vc, cpu_env, vb);
770     }
771     gen_fp_exc_raise(rc, fn11);
772 }
773 
774 static void gen_ieee_intcvt(DisasContext *ctx,
775                             void (*helper)(TCGv, TCGv_ptr, TCGv),
776                             int rb, int rc, int fn11)
777 {
778     TCGv vb, vc;
779 
780     gen_qual_roundmode(ctx, fn11);
781     vb = load_fpr(ctx, rb);
782     vc = dest_fpr(ctx, rc);
783 
784     /* The only exception that can be raised by integer conversion
785        is inexact.  Thus we only need to worry about exceptions when
786        inexact handling is requested.  */
787     if (fn11 & QUAL_I) {
788         helper(vc, cpu_env, vb);
789         gen_fp_exc_raise(rc, fn11);
790     } else {
791         helper(vc, cpu_env, vb);
792     }
793 }
794 
795 #define IEEE_INTCVT(name)                                       \
796 static inline void glue(gen_, name)(DisasContext *ctx,          \
797                                     int rb, int rc, int fn11)   \
798 {                                                               \
799     gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11);      \
800 }
801 IEEE_INTCVT(cvtqs)
802 IEEE_INTCVT(cvtqt)
803 
804 static void gen_cpy_mask(TCGv vc, TCGv va, TCGv vb, bool inv_a, uint64_t mask)
805 {
806     TCGv vmask = tcg_const_i64(mask);
807     TCGv tmp = tcg_temp_new_i64();
808 
809     if (inv_a) {
810         tcg_gen_andc_i64(tmp, vmask, va);
811     } else {
812         tcg_gen_and_i64(tmp, va, vmask);
813     }
814 
815     tcg_gen_andc_i64(vc, vb, vmask);
816     tcg_gen_or_i64(vc, vc, tmp);
817 
818     tcg_temp_free(vmask);
819     tcg_temp_free(tmp);
820 }
821 
822 static void gen_ieee_arith3(DisasContext *ctx,
823                             void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
824                             int ra, int rb, int rc, int fn11)
825 {
826     TCGv va, vb, vc;
827 
828     gen_qual_roundmode(ctx, fn11);
829     gen_qual_flushzero(ctx, fn11);
830 
831     va = gen_ieee_input(ctx, ra, fn11, 0);
832     vb = gen_ieee_input(ctx, rb, fn11, 0);
833     vc = dest_fpr(ctx, rc);
834     helper(vc, cpu_env, va, vb);
835 
836     gen_fp_exc_raise(rc, fn11);
837 }
838 
839 #define IEEE_ARITH3(name)                                               \
840 static inline void glue(gen_, name)(DisasContext *ctx,                  \
841                                     int ra, int rb, int rc, int fn11)   \
842 {                                                                       \
843     gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11);          \
844 }
845 IEEE_ARITH3(adds)
846 IEEE_ARITH3(subs)
847 IEEE_ARITH3(muls)
848 IEEE_ARITH3(divs)
849 IEEE_ARITH3(addt)
850 IEEE_ARITH3(subt)
851 IEEE_ARITH3(mult)
852 IEEE_ARITH3(divt)
853 
854 static void gen_ieee_compare(DisasContext *ctx,
855                              void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
856                              int ra, int rb, int rc, int fn11)
857 {
858     TCGv va, vb, vc;
859 
860     va = gen_ieee_input(ctx, ra, fn11, 1);
861     vb = gen_ieee_input(ctx, rb, fn11, 1);
862     vc = dest_fpr(ctx, rc);
863     helper(vc, cpu_env, va, vb);
864 
865     gen_fp_exc_raise(rc, fn11);
866 }
867 
868 #define IEEE_CMP3(name)                                                 \
869 static inline void glue(gen_, name)(DisasContext *ctx,                  \
870                                     int ra, int rb, int rc, int fn11)   \
871 {                                                                       \
872     gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11);         \
873 }
874 IEEE_CMP3(cmptun)
875 IEEE_CMP3(cmpteq)
876 IEEE_CMP3(cmptlt)
877 IEEE_CMP3(cmptle)
878 
879 static inline uint64_t zapnot_mask(uint8_t lit)
880 {
881     uint64_t mask = 0;
882     int i;
883 
884     for (i = 0; i < 8; ++i) {
885         if ((lit >> i) & 1) {
886             mask |= 0xffull << (i * 8);
887         }
888     }
889     return mask;
890 }
891 
892 /* Implement zapnot with an immediate operand, which expands to some
893    form of immediate AND.  This is a basic building block in the
894    definition of many of the other byte manipulation instructions.  */
895 static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
896 {
897     switch (lit) {
898     case 0x00:
899         tcg_gen_movi_i64(dest, 0);
900         break;
901     case 0x01:
902         tcg_gen_ext8u_i64(dest, src);
903         break;
904     case 0x03:
905         tcg_gen_ext16u_i64(dest, src);
906         break;
907     case 0x0f:
908         tcg_gen_ext32u_i64(dest, src);
909         break;
910     case 0xff:
911         tcg_gen_mov_i64(dest, src);
912         break;
913     default:
914         tcg_gen_andi_i64(dest, src, zapnot_mask(lit));
915         break;
916     }
917 }
918 
919 /* EXTWH, EXTLH, EXTQH */
920 static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
921                       uint8_t lit, uint8_t byte_mask)
922 {
923     if (islit) {
924         int pos = (64 - lit * 8) & 0x3f;
925         int len = cto32(byte_mask) * 8;
926         if (pos < len) {
927             tcg_gen_deposit_z_i64(vc, va, pos, len - pos);
928         } else {
929             tcg_gen_movi_i64(vc, 0);
930         }
931     } else {
932         TCGv tmp = tcg_temp_new();
933         tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
934         tcg_gen_neg_i64(tmp, tmp);
935         tcg_gen_andi_i64(tmp, tmp, 0x3f);
936         tcg_gen_shl_i64(vc, va, tmp);
937         tcg_temp_free(tmp);
938     }
939     gen_zapnoti(vc, vc, byte_mask);
940 }
941 
942 /* EXTBL, EXTWL, EXTLL, EXTQL */
943 static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
944                       uint8_t lit, uint8_t byte_mask)
945 {
946     if (islit) {
947         int pos = (lit & 7) * 8;
948         int len = cto32(byte_mask) * 8;
949         if (pos + len >= 64) {
950             len = 64 - pos;
951         }
952         tcg_gen_extract_i64(vc, va, pos, len);
953     } else {
954         TCGv tmp = tcg_temp_new();
955         tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
956         tcg_gen_shli_i64(tmp, tmp, 3);
957         tcg_gen_shr_i64(vc, va, tmp);
958         tcg_temp_free(tmp);
959         gen_zapnoti(vc, vc, byte_mask);
960     }
961 }
962 
963 /* INSWH, INSLH, INSQH */
964 static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
965                       uint8_t lit, uint8_t byte_mask)
966 {
967     if (islit) {
968         int pos = 64 - (lit & 7) * 8;
969         int len = cto32(byte_mask) * 8;
970         if (pos < len) {
971             tcg_gen_extract_i64(vc, va, pos, len - pos);
972         } else {
973             tcg_gen_movi_i64(vc, 0);
974         }
975     } else {
976         TCGv tmp = tcg_temp_new();
977         TCGv shift = tcg_temp_new();
978 
979         /* The instruction description has us left-shift the byte mask
980            and extract bits <15:8> and apply that zap at the end.  This
981            is equivalent to simply performing the zap first and shifting
982            afterward.  */
983         gen_zapnoti(tmp, va, byte_mask);
984 
985         /* If (B & 7) == 0, we need to shift by 64 and leave a zero.  Do this
986            portably by splitting the shift into two parts: shift_count-1 and 1.
987            Arrange for the -1 by using ones-complement instead of
988            twos-complement in the negation: ~(B * 8) & 63.  */
989 
990         tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
991         tcg_gen_not_i64(shift, shift);
992         tcg_gen_andi_i64(shift, shift, 0x3f);
993 
994         tcg_gen_shr_i64(vc, tmp, shift);
995         tcg_gen_shri_i64(vc, vc, 1);
996         tcg_temp_free(shift);
997         tcg_temp_free(tmp);
998     }
999 }
1000 
1001 /* INSBL, INSWL, INSLL, INSQL */
1002 static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1003                       uint8_t lit, uint8_t byte_mask)
1004 {
1005     if (islit) {
1006         int pos = (lit & 7) * 8;
1007         int len = cto32(byte_mask) * 8;
1008         if (pos + len > 64) {
1009             len = 64 - pos;
1010         }
1011         tcg_gen_deposit_z_i64(vc, va, pos, len);
1012     } else {
1013         TCGv tmp = tcg_temp_new();
1014         TCGv shift = tcg_temp_new();
1015 
1016         /* The instruction description has us left-shift the byte mask
1017            and extract bits <15:8> and apply that zap at the end.  This
1018            is equivalent to simply performing the zap first and shifting
1019            afterward.  */
1020         gen_zapnoti(tmp, va, byte_mask);
1021 
1022         tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1023         tcg_gen_shli_i64(shift, shift, 3);
1024         tcg_gen_shl_i64(vc, tmp, shift);
1025         tcg_temp_free(shift);
1026         tcg_temp_free(tmp);
1027     }
1028 }
1029 
1030 /* MSKWH, MSKLH, MSKQH */
1031 static void gen_msk_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1032                       uint8_t lit, uint8_t byte_mask)
1033 {
1034     if (islit) {
1035         gen_zapnoti(vc, va, ~((byte_mask << (lit & 7)) >> 8));
1036     } else {
1037         TCGv shift = tcg_temp_new();
1038         TCGv mask = tcg_temp_new();
1039 
1040         /* The instruction description is as above, where the byte_mask
1041            is shifted left, and then we extract bits <15:8>.  This can be
1042            emulated with a right-shift on the expanded byte mask.  This
1043            requires extra care because for an input <2:0> == 0 we need a
1044            shift of 64 bits in order to generate a zero.  This is done by
1045            splitting the shift into two parts, the variable shift - 1
1046            followed by a constant 1 shift.  The code we expand below is
1047            equivalent to ~(B * 8) & 63.  */
1048 
1049         tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1050         tcg_gen_not_i64(shift, shift);
1051         tcg_gen_andi_i64(shift, shift, 0x3f);
1052         tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1053         tcg_gen_shr_i64(mask, mask, shift);
1054         tcg_gen_shri_i64(mask, mask, 1);
1055 
1056         tcg_gen_andc_i64(vc, va, mask);
1057 
1058         tcg_temp_free(mask);
1059         tcg_temp_free(shift);
1060     }
1061 }
1062 
1063 /* MSKBL, MSKWL, MSKLL, MSKQL */
1064 static void gen_msk_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1065                       uint8_t lit, uint8_t byte_mask)
1066 {
1067     if (islit) {
1068         gen_zapnoti(vc, va, ~(byte_mask << (lit & 7)));
1069     } else {
1070         TCGv shift = tcg_temp_new();
1071         TCGv mask = tcg_temp_new();
1072 
1073         tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1074         tcg_gen_shli_i64(shift, shift, 3);
1075         tcg_gen_movi_i64(mask, zapnot_mask(byte_mask));
1076         tcg_gen_shl_i64(mask, mask, shift);
1077 
1078         tcg_gen_andc_i64(vc, va, mask);
1079 
1080         tcg_temp_free(mask);
1081         tcg_temp_free(shift);
1082     }
1083 }
1084 
1085 static void gen_rx(DisasContext *ctx, int ra, int set)
1086 {
1087     TCGv tmp;
1088 
1089     if (ra != 31) {
1090         ld_flag_byte(ctx->ir[ra], ENV_FLAG_RX_SHIFT);
1091     }
1092 
1093     tmp = tcg_const_i64(set);
1094     st_flag_byte(ctx->ir[ra], ENV_FLAG_RX_SHIFT);
1095     tcg_temp_free(tmp);
1096 }
1097 
1098 static DisasJumpType gen_call_pal(DisasContext *ctx, int palcode)
1099 {
1100     /* We're emulating OSF/1 PALcode.  Many of these are trivial access
1101        to internal cpu registers.  */
1102 
1103     /* Unprivileged PAL call */
1104     if (palcode >= 0x80 && palcode < 0xC0) {
1105         switch (palcode) {
1106         case 0x86:
1107             /* IMB */
1108             /* No-op inside QEMU.  */
1109             break;
1110         case 0x9E:
1111             /* RDUNIQUE */
1112             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1113                            offsetof(CPUAlphaState, unique));
1114             break;
1115         case 0x9F:
1116             /* WRUNIQUE */
1117             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1118                            offsetof(CPUAlphaState, unique));
1119             break;
1120         default:
1121             palcode &= 0xbf;
1122             goto do_call_pal;
1123         }
1124         return DISAS_NEXT;
1125     }
1126 
1127 #ifndef CONFIG_USER_ONLY
1128     /* Privileged PAL code */
1129     if (palcode < 0x40 && (ctx->tbflags & ENV_FLAG_PS_USER) == 0) {
1130         switch (palcode) {
1131         case 0x01:
1132             /* CFLUSH */
1133             /* No-op inside QEMU.  */
1134             break;
1135         case 0x02:
1136             /* DRAINA */
1137             /* No-op inside QEMU.  */
1138             break;
1139         case 0x2D:
1140             /* WRVPTPTR */
1141             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1142                            offsetof(CPUAlphaState, vptptr));
1143             break;
1144         case 0x31:
1145             /* WRVAL */
1146             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1147                            offsetof(CPUAlphaState, sysval));
1148             break;
1149         case 0x32:
1150             /* RDVAL */
1151             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1152                            offsetof(CPUAlphaState, sysval));
1153             break;
1154 
1155         case 0x35:
1156             /* SWPIPL */
1157             /* Note that we already know we're in kernel mode, so we know
1158                that PS only contains the 3 IPL bits.  */
1159             ld_flag_byte(ctx->ir[IR_V0], ENV_FLAG_PS_SHIFT);
1160 
1161             /* But make sure and store only the 3 IPL bits from the user.  */
1162             {
1163                 TCGv tmp = tcg_temp_new();
1164                 tcg_gen_andi_i64(tmp, ctx->ir[IR_A0], PS_INT_MASK);
1165                 st_flag_byte(tmp, ENV_FLAG_PS_SHIFT);
1166                 tcg_temp_free(tmp);
1167             }
1168 
1169             /* Allow interrupts to be recognized right away.  */
1170             tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
1171             return DISAS_PC_UPDATED_NOCHAIN;
1172 
1173         case 0x36:
1174             /* RDPS */
1175             ld_flag_byte(ctx->ir[IR_V0], ENV_FLAG_PS_SHIFT);
1176             break;
1177 
1178         case 0x38:
1179             /* WRUSP */
1180             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1181                            offsetof(CPUAlphaState, usp));
1182             break;
1183         case 0x3A:
1184             /* RDUSP */
1185             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1186                            offsetof(CPUAlphaState, usp));
1187             break;
1188         case 0x3C:
1189             /* WHAMI */
1190             tcg_gen_ld32s_i64(ctx->ir[IR_V0], cpu_env,
1191                 -offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
1192             break;
1193 
1194         case 0x3E:
1195             /* WTINT */
1196             {
1197                 TCGv_i32 tmp = tcg_const_i32(1);
1198                 tcg_gen_st_i32(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1199                                              offsetof(CPUState, halted));
1200                 tcg_temp_free_i32(tmp);
1201             }
1202             tcg_gen_movi_i64(ctx->ir[IR_V0], 0);
1203             return gen_excp(ctx, EXCP_HALTED, 0);
1204 
1205         default:
1206             palcode &= 0x3f;
1207             goto do_call_pal;
1208         }
1209         return DISAS_NEXT;
1210     }
1211 #endif
1212     return gen_invalid(ctx);
1213 
1214  do_call_pal:
1215 #ifdef CONFIG_USER_ONLY
1216     return gen_excp(ctx, EXCP_CALL_PAL, palcode);
1217 #else
1218     {
1219         TCGv tmp = tcg_temp_new();
1220         uint64_t exc_addr = ctx->base.pc_next;
1221         uint64_t entry = ctx->palbr;
1222 
1223         if (ctx->tbflags & ENV_FLAG_PAL_MODE) {
1224             exc_addr |= 1;
1225         } else {
1226             tcg_gen_movi_i64(tmp, 1);
1227             st_flag_byte(tmp, ENV_FLAG_PAL_SHIFT);
1228         }
1229 
1230         tcg_gen_movi_i64(tmp, exc_addr);
1231         tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUAlphaState, exc_addr));
1232         tcg_temp_free(tmp);
1233 
1234         entry += (palcode & 0x80
1235                   ? 0x2000 + (palcode - 0x80) * 64
1236                   : 0x1000 + palcode * 64);
1237 
1238         /* Since the destination is running in PALmode, we don't really
1239            need the page permissions check.  We'll see the existence of
1240            the page when we create the TB, and we'll flush all TBs if
1241            we change the PAL base register.  */
1242         if (!ctx->base.singlestep_enabled) {
1243             tcg_gen_goto_tb(0);
1244             tcg_gen_movi_i64(cpu_pc, entry);
1245             tcg_gen_exit_tb(ctx->base.tb, 0);
1246             return DISAS_NORETURN;
1247         } else {
1248             tcg_gen_movi_i64(cpu_pc, entry);
1249             return DISAS_PC_UPDATED;
1250         }
1251     }
1252 #endif
1253 }
1254 
1255 #ifndef CONFIG_USER_ONLY
1256 
1257 #define PR_LONG         0x200000
1258 
1259 static int cpu_pr_data(int pr)
1260 {
1261     switch (pr) {
1262     case  2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
1263     case  3: return offsetof(CPUAlphaState, trap_arg0);
1264     case  4: return offsetof(CPUAlphaState, trap_arg1);
1265     case  5: return offsetof(CPUAlphaState, trap_arg2);
1266     case  6: return offsetof(CPUAlphaState, exc_addr);
1267     case  7: return offsetof(CPUAlphaState, palbr);
1268     case  8: return offsetof(CPUAlphaState, ptbr);
1269     case  9: return offsetof(CPUAlphaState, vptptr);
1270     case 10: return offsetof(CPUAlphaState, unique);
1271     case 11: return offsetof(CPUAlphaState, sysval);
1272     case 12: return offsetof(CPUAlphaState, usp);
1273 
1274     case 40 ... 63:
1275         return offsetof(CPUAlphaState, scratch[pr - 40]);
1276 
1277     case 251:
1278         return offsetof(CPUAlphaState, alarm_expire);
1279     }
1280     return 0;
1281 }
1282 
1283 static DisasJumpType gen_mfpr(DisasContext *ctx, TCGv va, int regno)
1284 {
1285     void (*helper)(TCGv);
1286     int data;
1287 
1288     switch (regno) {
1289     case 32 ... 39:
1290         /* Accessing the "non-shadow" general registers.  */
1291         regno = regno == 39 ? 25 : regno - 32 + 8;
1292         tcg_gen_mov_i64(va, cpu_std_ir[regno]);
1293         break;
1294 
1295     case 250: /* WALLTIME */
1296         helper = gen_helper_get_walltime;
1297         goto do_helper;
1298     case 249: /* VMTIME */
1299         helper = gen_helper_get_vmtime;
1300     do_helper:
1301         if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
1302             gen_io_start();
1303             helper(va);
1304             return DISAS_PC_STALE;
1305         } else {
1306             helper(va);
1307         }
1308         break;
1309 
1310     case 0: /* PS */
1311         ld_flag_byte(va, ENV_FLAG_PS_SHIFT);
1312         break;
1313     case 1: /* FEN */
1314         ld_flag_byte(va, ENV_FLAG_FEN_SHIFT);
1315         break;
1316 
1317     default:
1318         /* The basic registers are data only, and unknown registers
1319            are read-zero, write-ignore.  */
1320         data = cpu_pr_data(regno);
1321         if (data == 0) {
1322             tcg_gen_movi_i64(va, 0);
1323         } else if (data & PR_LONG) {
1324             tcg_gen_ld32s_i64(va, cpu_env, data & ~PR_LONG);
1325         } else {
1326             tcg_gen_ld_i64(va, cpu_env, data);
1327         }
1328         break;
1329     }
1330 
1331     return DISAS_NEXT;
1332 }
1333 
1334 static DisasJumpType gen_mtpr(DisasContext *ctx, TCGv vb, int regno)
1335 {
1336     int data;
1337     DisasJumpType ret = DISAS_NEXT;
1338 
1339     switch (regno) {
1340     case 255:
1341         /* TBIA */
1342         gen_helper_tbia(cpu_env);
1343         break;
1344 
1345     case 254:
1346         /* TBIS */
1347         gen_helper_tbis(cpu_env, vb);
1348         break;
1349 
1350     case 253:
1351         /* WAIT */
1352         {
1353             TCGv_i32 tmp = tcg_const_i32(1);
1354             tcg_gen_st_i32(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1355                                          offsetof(CPUState, halted));
1356             tcg_temp_free_i32(tmp);
1357         }
1358         return gen_excp(ctx, EXCP_HALTED, 0);
1359 
1360     case 252:
1361         /* HALT */
1362         gen_helper_halt(vb);
1363         return DISAS_PC_STALE;
1364 
1365     case 251:
1366         /* ALARM */
1367         if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
1368             gen_io_start();
1369             ret = DISAS_PC_STALE;
1370         }
1371         gen_helper_set_alarm(cpu_env, vb);
1372         break;
1373 
1374     case 7:
1375         /* PALBR */
1376         tcg_gen_st_i64(vb, cpu_env, offsetof(CPUAlphaState, palbr));
1377         /* Changing the PAL base register implies un-chaining all of the TBs
1378            that ended with a CALL_PAL.  Since the base register usually only
1379            changes during boot, flushing everything works well.  */
1380         gen_helper_tb_flush(cpu_env);
1381         return DISAS_PC_STALE;
1382 
1383     case 32 ... 39:
1384         /* Accessing the "non-shadow" general registers.  */
1385         regno = regno == 39 ? 25 : regno - 32 + 8;
1386         tcg_gen_mov_i64(cpu_std_ir[regno], vb);
1387         break;
1388 
1389     case 0: /* PS */
1390         st_flag_byte(vb, ENV_FLAG_PS_SHIFT);
1391         break;
1392     case 1: /* FEN */
1393         st_flag_byte(vb, ENV_FLAG_FEN_SHIFT);
1394         break;
1395 
1396     default:
1397         /* The basic registers are data only, and unknown registers
1398            are read-zero, write-ignore.  */
1399         data = cpu_pr_data(regno);
1400         if (data != 0) {
1401             if (data & PR_LONG) {
1402                 tcg_gen_st32_i64(vb, cpu_env, data & ~PR_LONG);
1403             } else {
1404                 tcg_gen_st_i64(vb, cpu_env, data);
1405             }
1406         }
1407         break;
1408     }
1409 
1410     return ret;
1411 }
1412 #endif /* !USER_ONLY*/
1413 
1414 #define REQUIRE_NO_LIT                          \
1415     do {                                        \
1416         if (real_islit) {                       \
1417             goto invalid_opc;                   \
1418         }                                       \
1419     } while (0)
1420 
1421 #define REQUIRE_AMASK(FLAG)                     \
1422     do {                                        \
1423         if ((ctx->amask & AMASK_##FLAG) == 0) { \
1424             goto invalid_opc;                   \
1425         }                                       \
1426     } while (0)
1427 
1428 #define REQUIRE_TB_FLAG(FLAG)                   \
1429     do {                                        \
1430         if ((ctx->tbflags & (FLAG)) == 0) {     \
1431             goto invalid_opc;                   \
1432         }                                       \
1433     } while (0)
1434 
1435 #define REQUIRE_REG_31(WHICH)                   \
1436     do {                                        \
1437         if (WHICH != 31) {                      \
1438             goto invalid_opc;                   \
1439         }                                       \
1440     } while (0)
1441 
1442 #define REQUIRE_FEN                             \
1443     do {                                        \
1444         if (!(ctx->tbflags & ENV_FLAG_FEN)) {   \
1445             goto raise_fen;                     \
1446         }                                       \
1447     } while (0)
1448 
1449 static DisasJumpType translate_one(DisasContext *ctx, uint32_t insn)
1450 {
1451     int32_t disp21, disp16, disp12 __attribute__((unused));
1452     uint16_t fn11;
1453     uint8_t opc, ra, rb, rc, fpfn, fn7, lit;
1454     bool islit, real_islit;
1455     TCGv va, vb, vc, tmp, tmp2;
1456     TCGv_i32 t32;
1457     DisasJumpType ret;
1458 
1459     /* Decode all instruction fields */
1460     opc = extract32(insn, 26, 6);
1461     ra = extract32(insn, 21, 5);
1462     rb = extract32(insn, 16, 5);
1463     rc = extract32(insn, 0, 5);
1464     real_islit = islit = extract32(insn, 12, 1);
1465     lit = extract32(insn, 13, 8);
1466 
1467     disp21 = sextract32(insn, 0, 21);
1468     disp16 = sextract32(insn, 0, 16);
1469     disp12 = sextract32(insn, 0, 12);
1470 
1471     fn11 = extract32(insn, 5, 11);
1472     fpfn = extract32(insn, 5, 6);
1473     fn7 = extract32(insn, 5, 7);
1474 
1475     if (rb == 31 && !islit) {
1476         islit = true;
1477         lit = 0;
1478     }
1479 
1480     ret = DISAS_NEXT;
1481     switch (opc) {
1482     case 0x00:
1483         /* CALL_PAL */
1484         ret = gen_call_pal(ctx, insn & 0x03ffffff);
1485         break;
1486     case 0x01:
1487         /* OPC01 */
1488         goto invalid_opc;
1489     case 0x02:
1490         /* OPC02 */
1491         goto invalid_opc;
1492     case 0x03:
1493         /* OPC03 */
1494         goto invalid_opc;
1495     case 0x04:
1496         /* OPC04 */
1497         goto invalid_opc;
1498     case 0x05:
1499         /* OPC05 */
1500         goto invalid_opc;
1501     case 0x06:
1502         /* OPC06 */
1503         goto invalid_opc;
1504     case 0x07:
1505         /* OPC07 */
1506         goto invalid_opc;
1507 
1508     case 0x09:
1509         /* LDAH */
1510         disp16 = (uint32_t)disp16 << 16;
1511         /* fall through */
1512     case 0x08:
1513         /* LDA */
1514         va = dest_gpr(ctx, ra);
1515         /* It's worth special-casing immediate loads.  */
1516         if (rb == 31) {
1517             tcg_gen_movi_i64(va, disp16);
1518         } else {
1519             tcg_gen_addi_i64(va, load_gpr(ctx, rb), disp16);
1520         }
1521         break;
1522 
1523     case 0x0A:
1524         /* LDBU */
1525         REQUIRE_AMASK(BWX);
1526         gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
1527         break;
1528     case 0x0B:
1529         /* LDQ_U */
1530         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
1531         break;
1532     case 0x0C:
1533         /* LDWU */
1534         REQUIRE_AMASK(BWX);
1535         gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
1536         break;
1537     case 0x0D:
1538         /* STW */
1539         REQUIRE_AMASK(BWX);
1540         gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
1541         break;
1542     case 0x0E:
1543         /* STB */
1544         REQUIRE_AMASK(BWX);
1545         gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
1546         break;
1547     case 0x0F:
1548         /* STQ_U */
1549         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
1550         break;
1551 
1552     case 0x10:
1553         vc = dest_gpr(ctx, rc);
1554         vb = load_gpr_lit(ctx, rb, lit, islit);
1555 
1556         if (ra == 31) {
1557             if (fn7 == 0x00) {
1558                 /* Special case ADDL as SEXTL.  */
1559                 tcg_gen_ext32s_i64(vc, vb);
1560                 break;
1561             }
1562             if (fn7 == 0x29) {
1563                 /* Special case SUBQ as NEGQ.  */
1564                 tcg_gen_neg_i64(vc, vb);
1565                 break;
1566             }
1567         }
1568 
1569         va = load_gpr(ctx, ra);
1570         switch (fn7) {
1571         case 0x00:
1572             /* ADDL */
1573             tcg_gen_add_i64(vc, va, vb);
1574             tcg_gen_ext32s_i64(vc, vc);
1575             break;
1576         case 0x02:
1577             /* S4ADDL */
1578             tmp = tcg_temp_new();
1579             tcg_gen_shli_i64(tmp, va, 2);
1580             tcg_gen_add_i64(tmp, tmp, vb);
1581             tcg_gen_ext32s_i64(vc, tmp);
1582             tcg_temp_free(tmp);
1583             break;
1584         case 0x09:
1585             /* SUBL */
1586             tcg_gen_sub_i64(vc, va, vb);
1587             tcg_gen_ext32s_i64(vc, vc);
1588             break;
1589         case 0x0B:
1590             /* S4SUBL */
1591             tmp = tcg_temp_new();
1592             tcg_gen_shli_i64(tmp, va, 2);
1593             tcg_gen_sub_i64(tmp, tmp, vb);
1594             tcg_gen_ext32s_i64(vc, tmp);
1595             tcg_temp_free(tmp);
1596             break;
1597         case 0x0F:
1598             /* CMPBGE */
1599             if (ra == 31) {
1600                 /* Special case 0 >= X as X == 0.  */
1601                 gen_helper_cmpbe0(vc, vb);
1602             } else {
1603                 gen_helper_cmpbge(vc, va, vb);
1604             }
1605             break;
1606         case 0x12:
1607             /* S8ADDL */
1608             tmp = tcg_temp_new();
1609             tcg_gen_shli_i64(tmp, va, 3);
1610             tcg_gen_add_i64(tmp, tmp, vb);
1611             tcg_gen_ext32s_i64(vc, tmp);
1612             tcg_temp_free(tmp);
1613             break;
1614         case 0x1B:
1615             /* S8SUBL */
1616             tmp = tcg_temp_new();
1617             tcg_gen_shli_i64(tmp, va, 3);
1618             tcg_gen_sub_i64(tmp, tmp, vb);
1619             tcg_gen_ext32s_i64(vc, tmp);
1620             tcg_temp_free(tmp);
1621             break;
1622         case 0x1D:
1623             /* CMPULT */
1624             tcg_gen_setcond_i64(TCG_COND_LTU, vc, va, vb);
1625             break;
1626         case 0x20:
1627             /* ADDQ */
1628             tcg_gen_add_i64(vc, va, vb);
1629             break;
1630         case 0x22:
1631             /* S4ADDQ */
1632             tmp = tcg_temp_new();
1633             tcg_gen_shli_i64(tmp, va, 2);
1634             tcg_gen_add_i64(vc, tmp, vb);
1635             tcg_temp_free(tmp);
1636             break;
1637         case 0x29:
1638             /* SUBQ */
1639             tcg_gen_sub_i64(vc, va, vb);
1640             break;
1641         case 0x2B:
1642             /* S4SUBQ */
1643             tmp = tcg_temp_new();
1644             tcg_gen_shli_i64(tmp, va, 2);
1645             tcg_gen_sub_i64(vc, tmp, vb);
1646             tcg_temp_free(tmp);
1647             break;
1648         case 0x2D:
1649             /* CMPEQ */
1650             tcg_gen_setcond_i64(TCG_COND_EQ, vc, va, vb);
1651             break;
1652         case 0x32:
1653             /* S8ADDQ */
1654             tmp = tcg_temp_new();
1655             tcg_gen_shli_i64(tmp, va, 3);
1656             tcg_gen_add_i64(vc, tmp, vb);
1657             tcg_temp_free(tmp);
1658             break;
1659         case 0x3B:
1660             /* S8SUBQ */
1661             tmp = tcg_temp_new();
1662             tcg_gen_shli_i64(tmp, va, 3);
1663             tcg_gen_sub_i64(vc, tmp, vb);
1664             tcg_temp_free(tmp);
1665             break;
1666         case 0x3D:
1667             /* CMPULE */
1668             tcg_gen_setcond_i64(TCG_COND_LEU, vc, va, vb);
1669             break;
1670         case 0x40:
1671             /* ADDL/V */
1672             tmp = tcg_temp_new();
1673             tcg_gen_ext32s_i64(tmp, va);
1674             tcg_gen_ext32s_i64(vc, vb);
1675             tcg_gen_add_i64(tmp, tmp, vc);
1676             tcg_gen_ext32s_i64(vc, tmp);
1677             gen_helper_check_overflow(cpu_env, vc, tmp);
1678             tcg_temp_free(tmp);
1679             break;
1680         case 0x49:
1681             /* SUBL/V */
1682             tmp = tcg_temp_new();
1683             tcg_gen_ext32s_i64(tmp, va);
1684             tcg_gen_ext32s_i64(vc, vb);
1685             tcg_gen_sub_i64(tmp, tmp, vc);
1686             tcg_gen_ext32s_i64(vc, tmp);
1687             gen_helper_check_overflow(cpu_env, vc, tmp);
1688             tcg_temp_free(tmp);
1689             break;
1690         case 0x4D:
1691             /* CMPLT */
1692             tcg_gen_setcond_i64(TCG_COND_LT, vc, va, vb);
1693             break;
1694         case 0x60:
1695             /* ADDQ/V */
1696             tmp = tcg_temp_new();
1697             tmp2 = tcg_temp_new();
1698             tcg_gen_eqv_i64(tmp, va, vb);
1699             tcg_gen_mov_i64(tmp2, va);
1700             tcg_gen_add_i64(vc, va, vb);
1701             tcg_gen_xor_i64(tmp2, tmp2, vc);
1702             tcg_gen_and_i64(tmp, tmp, tmp2);
1703             tcg_gen_shri_i64(tmp, tmp, 63);
1704             tcg_gen_movi_i64(tmp2, 0);
1705             gen_helper_check_overflow(cpu_env, tmp, tmp2);
1706             tcg_temp_free(tmp);
1707             tcg_temp_free(tmp2);
1708             break;
1709         case 0x69:
1710             /* SUBQ/V */
1711             tmp = tcg_temp_new();
1712             tmp2 = tcg_temp_new();
1713             tcg_gen_xor_i64(tmp, va, vb);
1714             tcg_gen_mov_i64(tmp2, va);
1715             tcg_gen_sub_i64(vc, va, vb);
1716             tcg_gen_xor_i64(tmp2, tmp2, vc);
1717             tcg_gen_and_i64(tmp, tmp, tmp2);
1718             tcg_gen_shri_i64(tmp, tmp, 63);
1719             tcg_gen_movi_i64(tmp2, 0);
1720             gen_helper_check_overflow(cpu_env, tmp, tmp2);
1721             tcg_temp_free(tmp);
1722             tcg_temp_free(tmp2);
1723             break;
1724         case 0x6D:
1725             /* CMPLE */
1726             tcg_gen_setcond_i64(TCG_COND_LE, vc, va, vb);
1727             break;
1728         default:
1729             goto invalid_opc;
1730         }
1731         break;
1732 
1733     case 0x11:
1734         if (fn7 == 0x20) {
1735             if (rc == 31) {
1736                 /* Special case BIS as NOP.  */
1737                 break;
1738             }
1739             if (ra == 31) {
1740                 /* Special case BIS as MOV.  */
1741                 vc = dest_gpr(ctx, rc);
1742                 if (islit) {
1743                     tcg_gen_movi_i64(vc, lit);
1744                 } else {
1745                     tcg_gen_mov_i64(vc, load_gpr(ctx, rb));
1746                 }
1747                 break;
1748             }
1749         }
1750 
1751         vc = dest_gpr(ctx, rc);
1752         vb = load_gpr_lit(ctx, rb, lit, islit);
1753 
1754         if (fn7 == 0x28 && ra == 31) {
1755             /* Special case ORNOT as NOT.  */
1756             tcg_gen_not_i64(vc, vb);
1757             break;
1758         }
1759 
1760         va = load_gpr(ctx, ra);
1761         switch (fn7) {
1762         case 0x00:
1763             /* AND */
1764             tcg_gen_and_i64(vc, va, vb);
1765             break;
1766         case 0x08:
1767             /* BIC */
1768             tcg_gen_andc_i64(vc, va, vb);
1769             break;
1770         case 0x14:
1771             /* CMOVLBS */
1772             tmp = tcg_temp_new();
1773             tcg_gen_andi_i64(tmp, va, 1);
1774             tcg_gen_movcond_i64(TCG_COND_NE, vc, tmp, load_zero(ctx),
1775                                 vb, load_gpr(ctx, rc));
1776             tcg_temp_free(tmp);
1777             break;
1778         case 0x16:
1779             /* CMOVLBC */
1780             tmp = tcg_temp_new();
1781             tcg_gen_andi_i64(tmp, va, 1);
1782             tcg_gen_movcond_i64(TCG_COND_EQ, vc, tmp, load_zero(ctx),
1783                                 vb, load_gpr(ctx, rc));
1784             tcg_temp_free(tmp);
1785             break;
1786         case 0x20:
1787             /* BIS */
1788             tcg_gen_or_i64(vc, va, vb);
1789             break;
1790         case 0x24:
1791             /* CMOVEQ */
1792             tcg_gen_movcond_i64(TCG_COND_EQ, vc, va, load_zero(ctx),
1793                                 vb, load_gpr(ctx, rc));
1794             break;
1795         case 0x26:
1796             /* CMOVNE */
1797             tcg_gen_movcond_i64(TCG_COND_NE, vc, va, load_zero(ctx),
1798                                 vb, load_gpr(ctx, rc));
1799             break;
1800         case 0x28:
1801             /* ORNOT */
1802             tcg_gen_orc_i64(vc, va, vb);
1803             break;
1804         case 0x40:
1805             /* XOR */
1806             tcg_gen_xor_i64(vc, va, vb);
1807             break;
1808         case 0x44:
1809             /* CMOVLT */
1810             tcg_gen_movcond_i64(TCG_COND_LT, vc, va, load_zero(ctx),
1811                                 vb, load_gpr(ctx, rc));
1812             break;
1813         case 0x46:
1814             /* CMOVGE */
1815             tcg_gen_movcond_i64(TCG_COND_GE, vc, va, load_zero(ctx),
1816                                 vb, load_gpr(ctx, rc));
1817             break;
1818         case 0x48:
1819             /* EQV */
1820             tcg_gen_eqv_i64(vc, va, vb);
1821             break;
1822         case 0x61:
1823             /* AMASK */
1824             REQUIRE_REG_31(ra);
1825             tcg_gen_andi_i64(vc, vb, ~ctx->amask);
1826             break;
1827         case 0x64:
1828             /* CMOVLE */
1829             tcg_gen_movcond_i64(TCG_COND_LE, vc, va, load_zero(ctx),
1830                                 vb, load_gpr(ctx, rc));
1831             break;
1832         case 0x66:
1833             /* CMOVGT */
1834             tcg_gen_movcond_i64(TCG_COND_GT, vc, va, load_zero(ctx),
1835                                 vb, load_gpr(ctx, rc));
1836             break;
1837         case 0x6C:
1838             /* IMPLVER */
1839             REQUIRE_REG_31(ra);
1840             tcg_gen_movi_i64(vc, ctx->implver);
1841             break;
1842         default:
1843             goto invalid_opc;
1844         }
1845         break;
1846 
1847     case 0x12:
1848         vc = dest_gpr(ctx, rc);
1849         va = load_gpr(ctx, ra);
1850         switch (fn7) {
1851         case 0x02:
1852             /* MSKBL */
1853             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x01);
1854             break;
1855         case 0x06:
1856             /* EXTBL */
1857             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x01);
1858             break;
1859         case 0x0B:
1860             /* INSBL */
1861             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x01);
1862             break;
1863         case 0x12:
1864             /* MSKWL */
1865             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x03);
1866             break;
1867         case 0x16:
1868             /* EXTWL */
1869             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x03);
1870             break;
1871         case 0x1B:
1872             /* INSWL */
1873             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x03);
1874             break;
1875         case 0x22:
1876             /* MSKLL */
1877             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x0f);
1878             break;
1879         case 0x26:
1880             /* EXTLL */
1881             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x0f);
1882             break;
1883         case 0x2B:
1884             /* INSLL */
1885             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x0f);
1886             break;
1887         case 0x30:
1888             /* ZAP */
1889             if (islit) {
1890                 gen_zapnoti(vc, va, ~lit);
1891             } else {
1892                 gen_helper_zap(vc, va, load_gpr(ctx, rb));
1893             }
1894             break;
1895         case 0x31:
1896             /* ZAPNOT */
1897             if (islit) {
1898                 gen_zapnoti(vc, va, lit);
1899             } else {
1900                 gen_helper_zapnot(vc, va, load_gpr(ctx, rb));
1901             }
1902             break;
1903         case 0x32:
1904             /* MSKQL */
1905             gen_msk_l(ctx, vc, va, rb, islit, lit, 0xff);
1906             break;
1907         case 0x34:
1908             /* SRL */
1909             if (islit) {
1910                 tcg_gen_shri_i64(vc, va, lit & 0x3f);
1911             } else {
1912                 tmp = tcg_temp_new();
1913                 vb = load_gpr(ctx, rb);
1914                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1915                 tcg_gen_shr_i64(vc, va, tmp);
1916                 tcg_temp_free(tmp);
1917             }
1918             break;
1919         case 0x36:
1920             /* EXTQL */
1921             gen_ext_l(ctx, vc, va, rb, islit, lit, 0xff);
1922             break;
1923         case 0x39:
1924             /* SLL */
1925             if (islit) {
1926                 tcg_gen_shli_i64(vc, va, lit & 0x3f);
1927             } else {
1928                 tmp = tcg_temp_new();
1929                 vb = load_gpr(ctx, rb);
1930                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1931                 tcg_gen_shl_i64(vc, va, tmp);
1932                 tcg_temp_free(tmp);
1933             }
1934             break;
1935         case 0x3B:
1936             /* INSQL */
1937             gen_ins_l(ctx, vc, va, rb, islit, lit, 0xff);
1938             break;
1939         case 0x3C:
1940             /* SRA */
1941             if (islit) {
1942                 tcg_gen_sari_i64(vc, va, lit & 0x3f);
1943             } else {
1944                 tmp = tcg_temp_new();
1945                 vb = load_gpr(ctx, rb);
1946                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1947                 tcg_gen_sar_i64(vc, va, tmp);
1948                 tcg_temp_free(tmp);
1949             }
1950             break;
1951         case 0x52:
1952             /* MSKWH */
1953             gen_msk_h(ctx, vc, va, rb, islit, lit, 0x03);
1954             break;
1955         case 0x57:
1956             /* INSWH */
1957             gen_ins_h(ctx, vc, va, rb, islit, lit, 0x03);
1958             break;
1959         case 0x5A:
1960             /* EXTWH */
1961             gen_ext_h(ctx, vc, va, rb, islit, lit, 0x03);
1962             break;
1963         case 0x62:
1964             /* MSKLH */
1965             gen_msk_h(ctx, vc, va, rb, islit, lit, 0x0f);
1966             break;
1967         case 0x67:
1968             /* INSLH */
1969             gen_ins_h(ctx, vc, va, rb, islit, lit, 0x0f);
1970             break;
1971         case 0x6A:
1972             /* EXTLH */
1973             gen_ext_h(ctx, vc, va, rb, islit, lit, 0x0f);
1974             break;
1975         case 0x72:
1976             /* MSKQH */
1977             gen_msk_h(ctx, vc, va, rb, islit, lit, 0xff);
1978             break;
1979         case 0x77:
1980             /* INSQH */
1981             gen_ins_h(ctx, vc, va, rb, islit, lit, 0xff);
1982             break;
1983         case 0x7A:
1984             /* EXTQH */
1985             gen_ext_h(ctx, vc, va, rb, islit, lit, 0xff);
1986             break;
1987         default:
1988             goto invalid_opc;
1989         }
1990         break;
1991 
1992     case 0x13:
1993         vc = dest_gpr(ctx, rc);
1994         vb = load_gpr_lit(ctx, rb, lit, islit);
1995         va = load_gpr(ctx, ra);
1996         switch (fn7) {
1997         case 0x00:
1998             /* MULL */
1999             tcg_gen_mul_i64(vc, va, vb);
2000             tcg_gen_ext32s_i64(vc, vc);
2001             break;
2002         case 0x20:
2003             /* MULQ */
2004             tcg_gen_mul_i64(vc, va, vb);
2005             break;
2006         case 0x30:
2007             /* UMULH */
2008             tmp = tcg_temp_new();
2009             tcg_gen_mulu2_i64(tmp, vc, va, vb);
2010             tcg_temp_free(tmp);
2011             break;
2012         case 0x40:
2013             /* MULL/V */
2014             tmp = tcg_temp_new();
2015             tcg_gen_ext32s_i64(tmp, va);
2016             tcg_gen_ext32s_i64(vc, vb);
2017             tcg_gen_mul_i64(tmp, tmp, vc);
2018             tcg_gen_ext32s_i64(vc, tmp);
2019             gen_helper_check_overflow(cpu_env, vc, tmp);
2020             tcg_temp_free(tmp);
2021             break;
2022         case 0x60:
2023             /* MULQ/V */
2024             tmp = tcg_temp_new();
2025             tmp2 = tcg_temp_new();
2026             tcg_gen_muls2_i64(vc, tmp, va, vb);
2027             tcg_gen_sari_i64(tmp2, vc, 63);
2028             gen_helper_check_overflow(cpu_env, tmp, tmp2);
2029             tcg_temp_free(tmp);
2030             tcg_temp_free(tmp2);
2031             break;
2032         default:
2033             goto invalid_opc;
2034         }
2035         break;
2036 
2037     case 0x14:
2038         REQUIRE_AMASK(FIX);
2039         vc = dest_fpr(ctx, rc);
2040         switch (fpfn) { /* fn11 & 0x3F */
2041         case 0x04:
2042             /* ITOFS */
2043             REQUIRE_REG_31(rb);
2044             REQUIRE_FEN;
2045             t32 = tcg_temp_new_i32();
2046             va = load_gpr(ctx, ra);
2047             tcg_gen_extrl_i64_i32(t32, va);
2048             gen_helper_memory_to_s(vc, t32);
2049             tcg_temp_free_i32(t32);
2050             break;
2051         case 0x0A:
2052             /* SQRTF */
2053             REQUIRE_REG_31(ra);
2054             REQUIRE_FEN;
2055             vb = load_fpr(ctx, rb);
2056             gen_helper_sqrtf(vc, cpu_env, vb);
2057             break;
2058         case 0x0B:
2059             /* SQRTS */
2060             REQUIRE_REG_31(ra);
2061             REQUIRE_FEN;
2062             gen_sqrts(ctx, rb, rc, fn11);
2063             break;
2064         case 0x14:
2065             /* ITOFF */
2066             REQUIRE_REG_31(rb);
2067             REQUIRE_FEN;
2068             t32 = tcg_temp_new_i32();
2069             va = load_gpr(ctx, ra);
2070             tcg_gen_extrl_i64_i32(t32, va);
2071             gen_helper_memory_to_f(vc, t32);
2072             tcg_temp_free_i32(t32);
2073             break;
2074         case 0x24:
2075             /* ITOFT */
2076             REQUIRE_REG_31(rb);
2077             REQUIRE_FEN;
2078             va = load_gpr(ctx, ra);
2079             tcg_gen_mov_i64(vc, va);
2080             break;
2081         case 0x2A:
2082             /* SQRTG */
2083             REQUIRE_REG_31(ra);
2084             REQUIRE_FEN;
2085             vb = load_fpr(ctx, rb);
2086             gen_helper_sqrtg(vc, cpu_env, vb);
2087             break;
2088         case 0x02B:
2089             /* SQRTT */
2090             REQUIRE_REG_31(ra);
2091             REQUIRE_FEN;
2092             gen_sqrtt(ctx, rb, rc, fn11);
2093             break;
2094         default:
2095             goto invalid_opc;
2096         }
2097         break;
2098 
2099     case 0x15:
2100         /* VAX floating point */
2101         /* XXX: rounding mode and trap are ignored (!) */
2102         vc = dest_fpr(ctx, rc);
2103         vb = load_fpr(ctx, rb);
2104         va = load_fpr(ctx, ra);
2105         switch (fpfn) { /* fn11 & 0x3F */
2106         case 0x00:
2107             /* ADDF */
2108             REQUIRE_FEN;
2109             gen_helper_addf(vc, cpu_env, va, vb);
2110             break;
2111         case 0x01:
2112             /* SUBF */
2113             REQUIRE_FEN;
2114             gen_helper_subf(vc, cpu_env, va, vb);
2115             break;
2116         case 0x02:
2117             /* MULF */
2118             REQUIRE_FEN;
2119             gen_helper_mulf(vc, cpu_env, va, vb);
2120             break;
2121         case 0x03:
2122             /* DIVF */
2123             REQUIRE_FEN;
2124             gen_helper_divf(vc, cpu_env, va, vb);
2125             break;
2126         case 0x1E:
2127             /* CVTDG -- TODO */
2128             REQUIRE_REG_31(ra);
2129             goto invalid_opc;
2130         case 0x20:
2131             /* ADDG */
2132             REQUIRE_FEN;
2133             gen_helper_addg(vc, cpu_env, va, vb);
2134             break;
2135         case 0x21:
2136             /* SUBG */
2137             REQUIRE_FEN;
2138             gen_helper_subg(vc, cpu_env, va, vb);
2139             break;
2140         case 0x22:
2141             /* MULG */
2142             REQUIRE_FEN;
2143             gen_helper_mulg(vc, cpu_env, va, vb);
2144             break;
2145         case 0x23:
2146             /* DIVG */
2147             REQUIRE_FEN;
2148             gen_helper_divg(vc, cpu_env, va, vb);
2149             break;
2150         case 0x25:
2151             /* CMPGEQ */
2152             REQUIRE_FEN;
2153             gen_helper_cmpgeq(vc, cpu_env, va, vb);
2154             break;
2155         case 0x26:
2156             /* CMPGLT */
2157             REQUIRE_FEN;
2158             gen_helper_cmpglt(vc, cpu_env, va, vb);
2159             break;
2160         case 0x27:
2161             /* CMPGLE */
2162             REQUIRE_FEN;
2163             gen_helper_cmpgle(vc, cpu_env, va, vb);
2164             break;
2165         case 0x2C:
2166             /* CVTGF */
2167             REQUIRE_REG_31(ra);
2168             REQUIRE_FEN;
2169             gen_helper_cvtgf(vc, cpu_env, vb);
2170             break;
2171         case 0x2D:
2172             /* CVTGD -- TODO */
2173             REQUIRE_REG_31(ra);
2174             goto invalid_opc;
2175         case 0x2F:
2176             /* CVTGQ */
2177             REQUIRE_REG_31(ra);
2178             REQUIRE_FEN;
2179             gen_helper_cvtgq(vc, cpu_env, vb);
2180             break;
2181         case 0x3C:
2182             /* CVTQF */
2183             REQUIRE_REG_31(ra);
2184             REQUIRE_FEN;
2185             gen_helper_cvtqf(vc, cpu_env, vb);
2186             break;
2187         case 0x3E:
2188             /* CVTQG */
2189             REQUIRE_REG_31(ra);
2190             REQUIRE_FEN;
2191             gen_helper_cvtqg(vc, cpu_env, vb);
2192             break;
2193         default:
2194             goto invalid_opc;
2195         }
2196         break;
2197 
2198     case 0x16:
2199         /* IEEE floating-point */
2200         switch (fpfn) { /* fn11 & 0x3F */
2201         case 0x00:
2202             /* ADDS */
2203             REQUIRE_FEN;
2204             gen_adds(ctx, ra, rb, rc, fn11);
2205             break;
2206         case 0x01:
2207             /* SUBS */
2208             REQUIRE_FEN;
2209             gen_subs(ctx, ra, rb, rc, fn11);
2210             break;
2211         case 0x02:
2212             /* MULS */
2213             REQUIRE_FEN;
2214             gen_muls(ctx, ra, rb, rc, fn11);
2215             break;
2216         case 0x03:
2217             /* DIVS */
2218             REQUIRE_FEN;
2219             gen_divs(ctx, ra, rb, rc, fn11);
2220             break;
2221         case 0x20:
2222             /* ADDT */
2223             REQUIRE_FEN;
2224             gen_addt(ctx, ra, rb, rc, fn11);
2225             break;
2226         case 0x21:
2227             /* SUBT */
2228             REQUIRE_FEN;
2229             gen_subt(ctx, ra, rb, rc, fn11);
2230             break;
2231         case 0x22:
2232             /* MULT */
2233             REQUIRE_FEN;
2234             gen_mult(ctx, ra, rb, rc, fn11);
2235             break;
2236         case 0x23:
2237             /* DIVT */
2238             REQUIRE_FEN;
2239             gen_divt(ctx, ra, rb, rc, fn11);
2240             break;
2241         case 0x24:
2242             /* CMPTUN */
2243             REQUIRE_FEN;
2244             gen_cmptun(ctx, ra, rb, rc, fn11);
2245             break;
2246         case 0x25:
2247             /* CMPTEQ */
2248             REQUIRE_FEN;
2249             gen_cmpteq(ctx, ra, rb, rc, fn11);
2250             break;
2251         case 0x26:
2252             /* CMPTLT */
2253             REQUIRE_FEN;
2254             gen_cmptlt(ctx, ra, rb, rc, fn11);
2255             break;
2256         case 0x27:
2257             /* CMPTLE */
2258             REQUIRE_FEN;
2259             gen_cmptle(ctx, ra, rb, rc, fn11);
2260             break;
2261         case 0x2C:
2262             REQUIRE_REG_31(ra);
2263             REQUIRE_FEN;
2264             if (fn11 == 0x2AC || fn11 == 0x6AC) {
2265                 /* CVTST */
2266                 gen_cvtst(ctx, rb, rc, fn11);
2267             } else {
2268                 /* CVTTS */
2269                 gen_cvtts(ctx, rb, rc, fn11);
2270             }
2271             break;
2272         case 0x2F:
2273             /* CVTTQ */
2274             REQUIRE_REG_31(ra);
2275             REQUIRE_FEN;
2276             gen_cvttq(ctx, rb, rc, fn11);
2277             break;
2278         case 0x3C:
2279             /* CVTQS */
2280             REQUIRE_REG_31(ra);
2281             REQUIRE_FEN;
2282             gen_cvtqs(ctx, rb, rc, fn11);
2283             break;
2284         case 0x3E:
2285             /* CVTQT */
2286             REQUIRE_REG_31(ra);
2287             REQUIRE_FEN;
2288             gen_cvtqt(ctx, rb, rc, fn11);
2289             break;
2290         default:
2291             goto invalid_opc;
2292         }
2293         break;
2294 
2295     case 0x17:
2296         switch (fn11) {
2297         case 0x010:
2298             /* CVTLQ */
2299             REQUIRE_REG_31(ra);
2300             REQUIRE_FEN;
2301             vc = dest_fpr(ctx, rc);
2302             vb = load_fpr(ctx, rb);
2303             gen_cvtlq(vc, vb);
2304             break;
2305         case 0x020:
2306             /* CPYS */
2307             REQUIRE_FEN;
2308             if (rc == 31) {
2309                 /* Special case CPYS as FNOP.  */
2310             } else {
2311                 vc = dest_fpr(ctx, rc);
2312                 va = load_fpr(ctx, ra);
2313                 if (ra == rb) {
2314                     /* Special case CPYS as FMOV.  */
2315                     tcg_gen_mov_i64(vc, va);
2316                 } else {
2317                     vb = load_fpr(ctx, rb);
2318                     gen_cpy_mask(vc, va, vb, 0, 0x8000000000000000ULL);
2319                 }
2320             }
2321             break;
2322         case 0x021:
2323             /* CPYSN */
2324             REQUIRE_FEN;
2325             vc = dest_fpr(ctx, rc);
2326             vb = load_fpr(ctx, rb);
2327             va = load_fpr(ctx, ra);
2328             gen_cpy_mask(vc, va, vb, 1, 0x8000000000000000ULL);
2329             break;
2330         case 0x022:
2331             /* CPYSE */
2332             REQUIRE_FEN;
2333             vc = dest_fpr(ctx, rc);
2334             vb = load_fpr(ctx, rb);
2335             va = load_fpr(ctx, ra);
2336             gen_cpy_mask(vc, va, vb, 0, 0xFFF0000000000000ULL);
2337             break;
2338         case 0x024:
2339             /* MT_FPCR */
2340             REQUIRE_FEN;
2341             va = load_fpr(ctx, ra);
2342             gen_helper_store_fpcr(cpu_env, va);
2343             if (ctx->tb_rm == QUAL_RM_D) {
2344                 /* Re-do the copy of the rounding mode to fp_status
2345                    the next time we use dynamic rounding.  */
2346                 ctx->tb_rm = -1;
2347             }
2348             break;
2349         case 0x025:
2350             /* MF_FPCR */
2351             REQUIRE_FEN;
2352             va = dest_fpr(ctx, ra);
2353             gen_helper_load_fpcr(va, cpu_env);
2354             break;
2355         case 0x02A:
2356             /* FCMOVEQ */
2357             REQUIRE_FEN;
2358             gen_fcmov(ctx, TCG_COND_EQ, ra, rb, rc);
2359             break;
2360         case 0x02B:
2361             /* FCMOVNE */
2362             REQUIRE_FEN;
2363             gen_fcmov(ctx, TCG_COND_NE, ra, rb, rc);
2364             break;
2365         case 0x02C:
2366             /* FCMOVLT */
2367             REQUIRE_FEN;
2368             gen_fcmov(ctx, TCG_COND_LT, ra, rb, rc);
2369             break;
2370         case 0x02D:
2371             /* FCMOVGE */
2372             REQUIRE_FEN;
2373             gen_fcmov(ctx, TCG_COND_GE, ra, rb, rc);
2374             break;
2375         case 0x02E:
2376             /* FCMOVLE */
2377             REQUIRE_FEN;
2378             gen_fcmov(ctx, TCG_COND_LE, ra, rb, rc);
2379             break;
2380         case 0x02F:
2381             /* FCMOVGT */
2382             REQUIRE_FEN;
2383             gen_fcmov(ctx, TCG_COND_GT, ra, rb, rc);
2384             break;
2385         case 0x030: /* CVTQL */
2386         case 0x130: /* CVTQL/V */
2387         case 0x530: /* CVTQL/SV */
2388             REQUIRE_REG_31(ra);
2389             REQUIRE_FEN;
2390             vc = dest_fpr(ctx, rc);
2391             vb = load_fpr(ctx, rb);
2392             gen_helper_cvtql(vc, cpu_env, vb);
2393             gen_fp_exc_raise(rc, fn11);
2394             break;
2395         default:
2396             goto invalid_opc;
2397         }
2398         break;
2399 
2400     case 0x18:
2401         switch ((uint16_t)disp16) {
2402         case 0x0000:
2403             /* TRAPB */
2404             /* No-op.  */
2405             break;
2406         case 0x0400:
2407             /* EXCB */
2408             /* No-op.  */
2409             break;
2410         case 0x4000:
2411             /* MB */
2412             tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2413             break;
2414         case 0x4400:
2415             /* WMB */
2416             tcg_gen_mb(TCG_MO_ST_ST | TCG_BAR_SC);
2417             break;
2418         case 0x8000:
2419             /* FETCH */
2420             /* No-op */
2421             break;
2422         case 0xA000:
2423             /* FETCH_M */
2424             /* No-op */
2425             break;
2426         case 0xC000:
2427             /* RPCC */
2428             va = dest_gpr(ctx, ra);
2429             if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
2430                 gen_io_start();
2431                 gen_helper_load_pcc(va, cpu_env);
2432                 ret = DISAS_PC_STALE;
2433             } else {
2434                 gen_helper_load_pcc(va, cpu_env);
2435             }
2436             break;
2437         case 0xE000:
2438             /* RC */
2439             gen_rx(ctx, ra, 0);
2440             break;
2441         case 0xE800:
2442             /* ECB */
2443             break;
2444         case 0xF000:
2445             /* RS */
2446             gen_rx(ctx, ra, 1);
2447             break;
2448         case 0xF800:
2449             /* WH64 */
2450             /* No-op */
2451             break;
2452         case 0xFC00:
2453             /* WH64EN */
2454             /* No-op */
2455             break;
2456         default:
2457             goto invalid_opc;
2458         }
2459         break;
2460 
2461     case 0x19:
2462         /* HW_MFPR (PALcode) */
2463 #ifndef CONFIG_USER_ONLY
2464         REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2465         va = dest_gpr(ctx, ra);
2466         ret = gen_mfpr(ctx, va, insn & 0xffff);
2467         break;
2468 #else
2469         goto invalid_opc;
2470 #endif
2471 
2472     case 0x1A:
2473         /* JMP, JSR, RET, JSR_COROUTINE.  These only differ by the branch
2474            prediction stack action, which of course we don't implement.  */
2475         vb = load_gpr(ctx, rb);
2476         tcg_gen_andi_i64(cpu_pc, vb, ~3);
2477         if (ra != 31) {
2478             tcg_gen_movi_i64(ctx->ir[ra], ctx->base.pc_next);
2479         }
2480         ret = DISAS_PC_UPDATED;
2481         break;
2482 
2483     case 0x1B:
2484         /* HW_LD (PALcode) */
2485 #ifndef CONFIG_USER_ONLY
2486         REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2487         {
2488             TCGv addr = tcg_temp_new();
2489             vb = load_gpr(ctx, rb);
2490             va = dest_gpr(ctx, ra);
2491 
2492             tcg_gen_addi_i64(addr, vb, disp12);
2493             switch ((insn >> 12) & 0xF) {
2494             case 0x0:
2495                 /* Longword physical access (hw_ldl/p) */
2496                 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
2497                 break;
2498             case 0x1:
2499                 /* Quadword physical access (hw_ldq/p) */
2500                 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEQ);
2501                 break;
2502             case 0x2:
2503                 /* Longword physical access with lock (hw_ldl_l/p) */
2504                 gen_qemu_ldl_l(va, addr, MMU_PHYS_IDX);
2505                 break;
2506             case 0x3:
2507                 /* Quadword physical access with lock (hw_ldq_l/p) */
2508                 gen_qemu_ldq_l(va, addr, MMU_PHYS_IDX);
2509                 break;
2510             case 0x4:
2511                 /* Longword virtual PTE fetch (hw_ldl/v) */
2512                 goto invalid_opc;
2513             case 0x5:
2514                 /* Quadword virtual PTE fetch (hw_ldq/v) */
2515                 goto invalid_opc;
2516                 break;
2517             case 0x6:
2518                 /* Invalid */
2519                 goto invalid_opc;
2520             case 0x7:
2521                 /* Invaliid */
2522                 goto invalid_opc;
2523             case 0x8:
2524                 /* Longword virtual access (hw_ldl) */
2525                 goto invalid_opc;
2526             case 0x9:
2527                 /* Quadword virtual access (hw_ldq) */
2528                 goto invalid_opc;
2529             case 0xA:
2530                 /* Longword virtual access with protection check (hw_ldl/w) */
2531                 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
2532                 break;
2533             case 0xB:
2534                 /* Quadword virtual access with protection check (hw_ldq/w) */
2535                 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEQ);
2536                 break;
2537             case 0xC:
2538                 /* Longword virtual access with alt access mode (hw_ldl/a)*/
2539                 goto invalid_opc;
2540             case 0xD:
2541                 /* Quadword virtual access with alt access mode (hw_ldq/a) */
2542                 goto invalid_opc;
2543             case 0xE:
2544                 /* Longword virtual access with alternate access mode and
2545                    protection checks (hw_ldl/wa) */
2546                 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
2547                 break;
2548             case 0xF:
2549                 /* Quadword virtual access with alternate access mode and
2550                    protection checks (hw_ldq/wa) */
2551                 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEQ);
2552                 break;
2553             }
2554             tcg_temp_free(addr);
2555             break;
2556         }
2557 #else
2558         goto invalid_opc;
2559 #endif
2560 
2561     case 0x1C:
2562         vc = dest_gpr(ctx, rc);
2563         if (fn7 == 0x70) {
2564             /* FTOIT */
2565             REQUIRE_AMASK(FIX);
2566             REQUIRE_REG_31(rb);
2567             va = load_fpr(ctx, ra);
2568             tcg_gen_mov_i64(vc, va);
2569             break;
2570         } else if (fn7 == 0x78) {
2571             /* FTOIS */
2572             REQUIRE_AMASK(FIX);
2573             REQUIRE_REG_31(rb);
2574             t32 = tcg_temp_new_i32();
2575             va = load_fpr(ctx, ra);
2576             gen_helper_s_to_memory(t32, va);
2577             tcg_gen_ext_i32_i64(vc, t32);
2578             tcg_temp_free_i32(t32);
2579             break;
2580         }
2581 
2582         vb = load_gpr_lit(ctx, rb, lit, islit);
2583         switch (fn7) {
2584         case 0x00:
2585             /* SEXTB */
2586             REQUIRE_AMASK(BWX);
2587             REQUIRE_REG_31(ra);
2588             tcg_gen_ext8s_i64(vc, vb);
2589             break;
2590         case 0x01:
2591             /* SEXTW */
2592             REQUIRE_AMASK(BWX);
2593             REQUIRE_REG_31(ra);
2594             tcg_gen_ext16s_i64(vc, vb);
2595             break;
2596         case 0x30:
2597             /* CTPOP */
2598             REQUIRE_AMASK(CIX);
2599             REQUIRE_REG_31(ra);
2600             REQUIRE_NO_LIT;
2601             tcg_gen_ctpop_i64(vc, vb);
2602             break;
2603         case 0x31:
2604             /* PERR */
2605             REQUIRE_AMASK(MVI);
2606             REQUIRE_NO_LIT;
2607             va = load_gpr(ctx, ra);
2608             gen_helper_perr(vc, va, vb);
2609             break;
2610         case 0x32:
2611             /* CTLZ */
2612             REQUIRE_AMASK(CIX);
2613             REQUIRE_REG_31(ra);
2614             REQUIRE_NO_LIT;
2615             tcg_gen_clzi_i64(vc, vb, 64);
2616             break;
2617         case 0x33:
2618             /* CTTZ */
2619             REQUIRE_AMASK(CIX);
2620             REQUIRE_REG_31(ra);
2621             REQUIRE_NO_LIT;
2622             tcg_gen_ctzi_i64(vc, vb, 64);
2623             break;
2624         case 0x34:
2625             /* UNPKBW */
2626             REQUIRE_AMASK(MVI);
2627             REQUIRE_REG_31(ra);
2628             REQUIRE_NO_LIT;
2629             gen_helper_unpkbw(vc, vb);
2630             break;
2631         case 0x35:
2632             /* UNPKBL */
2633             REQUIRE_AMASK(MVI);
2634             REQUIRE_REG_31(ra);
2635             REQUIRE_NO_LIT;
2636             gen_helper_unpkbl(vc, vb);
2637             break;
2638         case 0x36:
2639             /* PKWB */
2640             REQUIRE_AMASK(MVI);
2641             REQUIRE_REG_31(ra);
2642             REQUIRE_NO_LIT;
2643             gen_helper_pkwb(vc, vb);
2644             break;
2645         case 0x37:
2646             /* PKLB */
2647             REQUIRE_AMASK(MVI);
2648             REQUIRE_REG_31(ra);
2649             REQUIRE_NO_LIT;
2650             gen_helper_pklb(vc, vb);
2651             break;
2652         case 0x38:
2653             /* MINSB8 */
2654             REQUIRE_AMASK(MVI);
2655             va = load_gpr(ctx, ra);
2656             gen_helper_minsb8(vc, va, vb);
2657             break;
2658         case 0x39:
2659             /* MINSW4 */
2660             REQUIRE_AMASK(MVI);
2661             va = load_gpr(ctx, ra);
2662             gen_helper_minsw4(vc, va, vb);
2663             break;
2664         case 0x3A:
2665             /* MINUB8 */
2666             REQUIRE_AMASK(MVI);
2667             va = load_gpr(ctx, ra);
2668             gen_helper_minub8(vc, va, vb);
2669             break;
2670         case 0x3B:
2671             /* MINUW4 */
2672             REQUIRE_AMASK(MVI);
2673             va = load_gpr(ctx, ra);
2674             gen_helper_minuw4(vc, va, vb);
2675             break;
2676         case 0x3C:
2677             /* MAXUB8 */
2678             REQUIRE_AMASK(MVI);
2679             va = load_gpr(ctx, ra);
2680             gen_helper_maxub8(vc, va, vb);
2681             break;
2682         case 0x3D:
2683             /* MAXUW4 */
2684             REQUIRE_AMASK(MVI);
2685             va = load_gpr(ctx, ra);
2686             gen_helper_maxuw4(vc, va, vb);
2687             break;
2688         case 0x3E:
2689             /* MAXSB8 */
2690             REQUIRE_AMASK(MVI);
2691             va = load_gpr(ctx, ra);
2692             gen_helper_maxsb8(vc, va, vb);
2693             break;
2694         case 0x3F:
2695             /* MAXSW4 */
2696             REQUIRE_AMASK(MVI);
2697             va = load_gpr(ctx, ra);
2698             gen_helper_maxsw4(vc, va, vb);
2699             break;
2700         default:
2701             goto invalid_opc;
2702         }
2703         break;
2704 
2705     case 0x1D:
2706         /* HW_MTPR (PALcode) */
2707 #ifndef CONFIG_USER_ONLY
2708         REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2709         vb = load_gpr(ctx, rb);
2710         ret = gen_mtpr(ctx, vb, insn & 0xffff);
2711         break;
2712 #else
2713         goto invalid_opc;
2714 #endif
2715 
2716     case 0x1E:
2717         /* HW_RET (PALcode) */
2718 #ifndef CONFIG_USER_ONLY
2719         REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2720         if (rb == 31) {
2721             /* Pre-EV6 CPUs interpreted this as HW_REI, loading the return
2722                address from EXC_ADDR.  This turns out to be useful for our
2723                emulation PALcode, so continue to accept it.  */
2724             ctx->lit = vb = tcg_temp_new();
2725             tcg_gen_ld_i64(vb, cpu_env, offsetof(CPUAlphaState, exc_addr));
2726         } else {
2727             vb = load_gpr(ctx, rb);
2728         }
2729         tcg_gen_movi_i64(cpu_lock_addr, -1);
2730         tmp = tcg_temp_new();
2731         tcg_gen_movi_i64(tmp, 0);
2732         st_flag_byte(tmp, ENV_FLAG_RX_SHIFT);
2733         tcg_gen_andi_i64(tmp, vb, 1);
2734         st_flag_byte(tmp, ENV_FLAG_PAL_SHIFT);
2735         tcg_temp_free(tmp);
2736         tcg_gen_andi_i64(cpu_pc, vb, ~3);
2737         /* Allow interrupts to be recognized right away.  */
2738         ret = DISAS_PC_UPDATED_NOCHAIN;
2739         break;
2740 #else
2741         goto invalid_opc;
2742 #endif
2743 
2744     case 0x1F:
2745         /* HW_ST (PALcode) */
2746 #ifndef CONFIG_USER_ONLY
2747         REQUIRE_TB_FLAG(ENV_FLAG_PAL_MODE);
2748         {
2749             switch ((insn >> 12) & 0xF) {
2750             case 0x0:
2751                 /* Longword physical access */
2752                 va = load_gpr(ctx, ra);
2753                 vb = load_gpr(ctx, rb);
2754                 tmp = tcg_temp_new();
2755                 tcg_gen_addi_i64(tmp, vb, disp12);
2756                 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL);
2757                 tcg_temp_free(tmp);
2758                 break;
2759             case 0x1:
2760                 /* Quadword physical access */
2761                 va = load_gpr(ctx, ra);
2762                 vb = load_gpr(ctx, rb);
2763                 tmp = tcg_temp_new();
2764                 tcg_gen_addi_i64(tmp, vb, disp12);
2765                 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEQ);
2766                 tcg_temp_free(tmp);
2767                 break;
2768             case 0x2:
2769                 /* Longword physical access with lock */
2770                 ret = gen_store_conditional(ctx, ra, rb, disp12,
2771                                             MMU_PHYS_IDX, MO_LESL);
2772                 break;
2773             case 0x3:
2774                 /* Quadword physical access with lock */
2775                 ret = gen_store_conditional(ctx, ra, rb, disp12,
2776                                             MMU_PHYS_IDX, MO_LEQ);
2777                 break;
2778             case 0x4:
2779                 /* Longword virtual access */
2780                 goto invalid_opc;
2781             case 0x5:
2782                 /* Quadword virtual access */
2783                 goto invalid_opc;
2784             case 0x6:
2785                 /* Invalid */
2786                 goto invalid_opc;
2787             case 0x7:
2788                 /* Invalid */
2789                 goto invalid_opc;
2790             case 0x8:
2791                 /* Invalid */
2792                 goto invalid_opc;
2793             case 0x9:
2794                 /* Invalid */
2795                 goto invalid_opc;
2796             case 0xA:
2797                 /* Invalid */
2798                 goto invalid_opc;
2799             case 0xB:
2800                 /* Invalid */
2801                 goto invalid_opc;
2802             case 0xC:
2803                 /* Longword virtual access with alternate access mode */
2804                 goto invalid_opc;
2805             case 0xD:
2806                 /* Quadword virtual access with alternate access mode */
2807                 goto invalid_opc;
2808             case 0xE:
2809                 /* Invalid */
2810                 goto invalid_opc;
2811             case 0xF:
2812                 /* Invalid */
2813                 goto invalid_opc;
2814             }
2815             break;
2816         }
2817 #else
2818         goto invalid_opc;
2819 #endif
2820     case 0x20:
2821         /* LDF */
2822         REQUIRE_FEN;
2823         gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2824         break;
2825     case 0x21:
2826         /* LDG */
2827         REQUIRE_FEN;
2828         gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2829         break;
2830     case 0x22:
2831         /* LDS */
2832         REQUIRE_FEN;
2833         gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2834         break;
2835     case 0x23:
2836         /* LDT */
2837         REQUIRE_FEN;
2838         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2839         break;
2840     case 0x24:
2841         /* STF */
2842         REQUIRE_FEN;
2843         gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
2844         break;
2845     case 0x25:
2846         /* STG */
2847         REQUIRE_FEN;
2848         gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
2849         break;
2850     case 0x26:
2851         /* STS */
2852         REQUIRE_FEN;
2853         gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
2854         break;
2855     case 0x27:
2856         /* STT */
2857         REQUIRE_FEN;
2858         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
2859         break;
2860     case 0x28:
2861         /* LDL */
2862         gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2863         break;
2864     case 0x29:
2865         /* LDQ */
2866         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2867         break;
2868     case 0x2A:
2869         /* LDL_L */
2870         gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2871         break;
2872     case 0x2B:
2873         /* LDQ_L */
2874         gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2875         break;
2876     case 0x2C:
2877         /* STL */
2878         gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
2879         break;
2880     case 0x2D:
2881         /* STQ */
2882         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
2883         break;
2884     case 0x2E:
2885         /* STL_C */
2886         ret = gen_store_conditional(ctx, ra, rb, disp16,
2887                                     ctx->mem_idx, MO_LESL);
2888         break;
2889     case 0x2F:
2890         /* STQ_C */
2891         ret = gen_store_conditional(ctx, ra, rb, disp16,
2892                                     ctx->mem_idx, MO_LEQ);
2893         break;
2894     case 0x30:
2895         /* BR */
2896         ret = gen_bdirect(ctx, ra, disp21);
2897         break;
2898     case 0x31: /* FBEQ */
2899         REQUIRE_FEN;
2900         ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
2901         break;
2902     case 0x32: /* FBLT */
2903         REQUIRE_FEN;
2904         ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
2905         break;
2906     case 0x33: /* FBLE */
2907         REQUIRE_FEN;
2908         ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
2909         break;
2910     case 0x34:
2911         /* BSR */
2912         ret = gen_bdirect(ctx, ra, disp21);
2913         break;
2914     case 0x35: /* FBNE */
2915         REQUIRE_FEN;
2916         ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
2917         break;
2918     case 0x36: /* FBGE */
2919         REQUIRE_FEN;
2920         ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
2921         break;
2922     case 0x37: /* FBGT */
2923         REQUIRE_FEN;
2924         ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
2925         break;
2926     case 0x38:
2927         /* BLBC */
2928         ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2929         break;
2930     case 0x39:
2931         /* BEQ */
2932         ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2933         break;
2934     case 0x3A:
2935         /* BLT */
2936         ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2937         break;
2938     case 0x3B:
2939         /* BLE */
2940         ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2941         break;
2942     case 0x3C:
2943         /* BLBS */
2944         ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2945         break;
2946     case 0x3D:
2947         /* BNE */
2948         ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2949         break;
2950     case 0x3E:
2951         /* BGE */
2952         ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2953         break;
2954     case 0x3F:
2955         /* BGT */
2956         ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2957         break;
2958     invalid_opc:
2959         ret = gen_invalid(ctx);
2960         break;
2961     raise_fen:
2962         ret = gen_excp(ctx, EXCP_FEN, 0);
2963         break;
2964     }
2965 
2966     return ret;
2967 }
2968 
2969 static void alpha_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cpu)
2970 {
2971     DisasContext *ctx = container_of(dcbase, DisasContext, base);
2972     CPUAlphaState *env = cpu->env_ptr;
2973     int64_t bound;
2974 
2975     ctx->tbflags = ctx->base.tb->flags;
2976     ctx->mem_idx = cpu_mmu_index(env, false);
2977     ctx->implver = env->implver;
2978     ctx->amask = env->amask;
2979 
2980 #ifdef CONFIG_USER_ONLY
2981     ctx->ir = cpu_std_ir;
2982 #else
2983     ctx->palbr = env->palbr;
2984     ctx->ir = (ctx->tbflags & ENV_FLAG_PAL_MODE ? cpu_pal_ir : cpu_std_ir);
2985 #endif
2986 
2987     /* ??? Every TB begins with unset rounding mode, to be initialized on
2988        the first fp insn of the TB.  Alternately we could define a proper
2989        default for every TB (e.g. QUAL_RM_N or QUAL_RM_D) and make sure
2990        to reset the FP_STATUS to that default at the end of any TB that
2991        changes the default.  We could even (gasp) dynamiclly figure out
2992        what default would be most efficient given the running program.  */
2993     ctx->tb_rm = -1;
2994     /* Similarly for flush-to-zero.  */
2995     ctx->tb_ftz = -1;
2996 
2997     ctx->zero = NULL;
2998     ctx->sink = NULL;
2999     ctx->lit = NULL;
3000 
3001     /* Bound the number of insns to execute to those left on the page.  */
3002     bound = -(ctx->base.pc_first | TARGET_PAGE_MASK) / 4;
3003     ctx->base.max_insns = MIN(ctx->base.max_insns, bound);
3004 }
3005 
3006 static void alpha_tr_tb_start(DisasContextBase *db, CPUState *cpu)
3007 {
3008 }
3009 
3010 static void alpha_tr_insn_start(DisasContextBase *dcbase, CPUState *cpu)
3011 {
3012     tcg_gen_insn_start(dcbase->pc_next);
3013 }
3014 
3015 static bool alpha_tr_breakpoint_check(DisasContextBase *dcbase, CPUState *cpu,
3016                                       const CPUBreakpoint *bp)
3017 {
3018     DisasContext *ctx = container_of(dcbase, DisasContext, base);
3019 
3020     ctx->base.is_jmp = gen_excp(ctx, EXCP_DEBUG, 0);
3021 
3022     /* The address covered by the breakpoint must be included in
3023        [tb->pc, tb->pc + tb->size) in order to for it to be
3024        properly cleared -- thus we increment the PC here so that
3025        the logic setting tb->size below does the right thing.  */
3026     ctx->base.pc_next += 4;
3027     return true;
3028 }
3029 
3030 static void alpha_tr_translate_insn(DisasContextBase *dcbase, CPUState *cpu)
3031 {
3032     DisasContext *ctx = container_of(dcbase, DisasContext, base);
3033     CPUAlphaState *env = cpu->env_ptr;
3034     uint32_t insn = translator_ldl(env, ctx->base.pc_next);
3035 
3036     ctx->base.pc_next += 4;
3037     ctx->base.is_jmp = translate_one(ctx, insn);
3038 
3039     free_context_temps(ctx);
3040     translator_loop_temp_check(&ctx->base);
3041 }
3042 
3043 static void alpha_tr_tb_stop(DisasContextBase *dcbase, CPUState *cpu)
3044 {
3045     DisasContext *ctx = container_of(dcbase, DisasContext, base);
3046 
3047     switch (ctx->base.is_jmp) {
3048     case DISAS_NORETURN:
3049         break;
3050     case DISAS_TOO_MANY:
3051         if (use_goto_tb(ctx, ctx->base.pc_next)) {
3052             tcg_gen_goto_tb(0);
3053             tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
3054             tcg_gen_exit_tb(ctx->base.tb, 0);
3055         }
3056         /* FALLTHRU */
3057     case DISAS_PC_STALE:
3058         tcg_gen_movi_i64(cpu_pc, ctx->base.pc_next);
3059         /* FALLTHRU */
3060     case DISAS_PC_UPDATED:
3061         if (!ctx->base.singlestep_enabled) {
3062             tcg_gen_lookup_and_goto_ptr();
3063             break;
3064         }
3065         /* FALLTHRU */
3066     case DISAS_PC_UPDATED_NOCHAIN:
3067         if (ctx->base.singlestep_enabled) {
3068             gen_excp_1(EXCP_DEBUG, 0);
3069         } else {
3070             tcg_gen_exit_tb(NULL, 0);
3071         }
3072         break;
3073     default:
3074         g_assert_not_reached();
3075     }
3076 }
3077 
3078 static void alpha_tr_disas_log(const DisasContextBase *dcbase, CPUState *cpu)
3079 {
3080     qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
3081     log_target_disas(cpu, dcbase->pc_first, dcbase->tb->size);
3082 }
3083 
3084 static const TranslatorOps alpha_tr_ops = {
3085     .init_disas_context = alpha_tr_init_disas_context,
3086     .tb_start           = alpha_tr_tb_start,
3087     .insn_start         = alpha_tr_insn_start,
3088     .breakpoint_check   = alpha_tr_breakpoint_check,
3089     .translate_insn     = alpha_tr_translate_insn,
3090     .tb_stop            = alpha_tr_tb_stop,
3091     .disas_log          = alpha_tr_disas_log,
3092 };
3093 
3094 void gen_intermediate_code(CPUState *cpu, TranslationBlock *tb, int max_insns)
3095 {
3096     DisasContext dc;
3097     translator_loop(&alpha_tr_ops, &dc.base, cpu, tb, max_insns);
3098 }
3099 
3100 void restore_state_to_opc(CPUAlphaState *env, TranslationBlock *tb,
3101                           target_ulong *data)
3102 {
3103     env->pc = data[0];
3104 }
3105