xref: /openbmc/qemu/target/alpha/translate.c (revision 646c5478)
1 /*
2  *  Alpha emulation cpu translation for qemu.
3  *
4  *  Copyright (c) 2007 Jocelyn Mayer
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18  */
19 
20 #include "qemu/osdep.h"
21 #include "cpu.h"
22 #include "disas/disas.h"
23 #include "qemu/host-utils.h"
24 #include "exec/exec-all.h"
25 #include "tcg-op.h"
26 #include "exec/cpu_ldst.h"
27 
28 #include "exec/helper-proto.h"
29 #include "exec/helper-gen.h"
30 
31 #include "trace-tcg.h"
32 #include "exec/log.h"
33 
34 
35 #undef ALPHA_DEBUG_DISAS
36 #define CONFIG_SOFTFLOAT_INLINE
37 
38 #ifdef ALPHA_DEBUG_DISAS
39 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
40 #else
41 #  define LOG_DISAS(...) do { } while (0)
42 #endif
43 
44 typedef struct DisasContext DisasContext;
45 struct DisasContext {
46     struct TranslationBlock *tb;
47     uint64_t pc;
48 #ifndef CONFIG_USER_ONLY
49     uint64_t palbr;
50 #endif
51     int mem_idx;
52 
53     /* Current rounding mode for this TB.  */
54     int tb_rm;
55     /* Current flush-to-zero setting for this TB.  */
56     int tb_ftz;
57 
58     /* implver value for this CPU.  */
59     int implver;
60 
61     /* The set of registers active in the current context.  */
62     TCGv *ir;
63 
64     /* Temporaries for $31 and $f31 as source and destination.  */
65     TCGv zero;
66     TCGv sink;
67     /* Temporary for immediate constants.  */
68     TCGv lit;
69 
70     bool singlestep_enabled;
71 };
72 
73 /* Return values from translate_one, indicating the state of the TB.
74    Note that zero indicates that we are not exiting the TB.  */
75 
76 typedef enum {
77     NO_EXIT,
78 
79     /* We have emitted one or more goto_tb.  No fixup required.  */
80     EXIT_GOTO_TB,
81 
82     /* We are not using a goto_tb (for whatever reason), but have updated
83        the PC (for whatever reason), so there's no need to do it again on
84        exiting the TB.  */
85     EXIT_PC_UPDATED,
86 
87     /* We are exiting the TB, but have neither emitted a goto_tb, nor
88        updated the PC for the next instruction to be executed.  */
89     EXIT_PC_STALE,
90 
91     /* We are ending the TB with a noreturn function call, e.g. longjmp.
92        No following code will be executed.  */
93     EXIT_NORETURN,
94 } ExitStatus;
95 
96 /* global register indexes */
97 static TCGv_env cpu_env;
98 static TCGv cpu_std_ir[31];
99 static TCGv cpu_fir[31];
100 static TCGv cpu_pc;
101 static TCGv cpu_lock_addr;
102 static TCGv cpu_lock_value;
103 
104 #ifndef CONFIG_USER_ONLY
105 static TCGv cpu_pal_ir[31];
106 #endif
107 
108 #include "exec/gen-icount.h"
109 
110 void alpha_translate_init(void)
111 {
112 #define DEF_VAR(V)  { &cpu_##V, #V, offsetof(CPUAlphaState, V) }
113 
114     typedef struct { TCGv *var; const char *name; int ofs; } GlobalVar;
115     static const GlobalVar vars[] = {
116         DEF_VAR(pc),
117         DEF_VAR(lock_addr),
118         DEF_VAR(lock_value),
119     };
120 
121 #undef DEF_VAR
122 
123     /* Use the symbolic register names that match the disassembler.  */
124     static const char greg_names[31][4] = {
125         "v0", "t0", "t1", "t2", "t3", "t4", "t5", "t6",
126         "t7", "s0", "s1", "s2", "s3", "s4", "s5", "fp",
127         "a0", "a1", "a2", "a3", "a4", "a5", "t8", "t9",
128         "t10", "t11", "ra", "t12", "at", "gp", "sp"
129     };
130     static const char freg_names[31][4] = {
131         "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
132         "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
133         "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
134         "f24", "f25", "f26", "f27", "f28", "f29", "f30"
135     };
136 #ifndef CONFIG_USER_ONLY
137     static const char shadow_names[8][8] = {
138         "pal_t7", "pal_s0", "pal_s1", "pal_s2",
139         "pal_s3", "pal_s4", "pal_s5", "pal_t11"
140     };
141 #endif
142 
143     static bool done_init = 0;
144     int i;
145 
146     if (done_init) {
147         return;
148     }
149     done_init = 1;
150 
151     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
152     tcg_ctx.tcg_env = cpu_env;
153 
154     for (i = 0; i < 31; i++) {
155         cpu_std_ir[i] = tcg_global_mem_new_i64(cpu_env,
156                                                offsetof(CPUAlphaState, ir[i]),
157                                                greg_names[i]);
158     }
159 
160     for (i = 0; i < 31; i++) {
161         cpu_fir[i] = tcg_global_mem_new_i64(cpu_env,
162                                             offsetof(CPUAlphaState, fir[i]),
163                                             freg_names[i]);
164     }
165 
166 #ifndef CONFIG_USER_ONLY
167     memcpy(cpu_pal_ir, cpu_std_ir, sizeof(cpu_pal_ir));
168     for (i = 0; i < 8; i++) {
169         int r = (i == 7 ? 25 : i + 8);
170         cpu_pal_ir[r] = tcg_global_mem_new_i64(cpu_env,
171                                                offsetof(CPUAlphaState,
172                                                         shadow[i]),
173                                                shadow_names[i]);
174     }
175 #endif
176 
177     for (i = 0; i < ARRAY_SIZE(vars); ++i) {
178         const GlobalVar *v = &vars[i];
179         *v->var = tcg_global_mem_new_i64(cpu_env, v->ofs, v->name);
180     }
181 }
182 
183 static TCGv load_zero(DisasContext *ctx)
184 {
185     if (TCGV_IS_UNUSED_I64(ctx->zero)) {
186         ctx->zero = tcg_const_i64(0);
187     }
188     return ctx->zero;
189 }
190 
191 static TCGv dest_sink(DisasContext *ctx)
192 {
193     if (TCGV_IS_UNUSED_I64(ctx->sink)) {
194         ctx->sink = tcg_temp_new();
195     }
196     return ctx->sink;
197 }
198 
199 static void free_context_temps(DisasContext *ctx)
200 {
201     if (!TCGV_IS_UNUSED_I64(ctx->sink)) {
202         tcg_gen_discard_i64(ctx->sink);
203         tcg_temp_free(ctx->sink);
204         TCGV_UNUSED_I64(ctx->sink);
205     }
206     if (!TCGV_IS_UNUSED_I64(ctx->zero)) {
207         tcg_temp_free(ctx->zero);
208         TCGV_UNUSED_I64(ctx->zero);
209     }
210     if (!TCGV_IS_UNUSED_I64(ctx->lit)) {
211         tcg_temp_free(ctx->lit);
212         TCGV_UNUSED_I64(ctx->lit);
213     }
214 }
215 
216 static TCGv load_gpr(DisasContext *ctx, unsigned reg)
217 {
218     if (likely(reg < 31)) {
219         return ctx->ir[reg];
220     } else {
221         return load_zero(ctx);
222     }
223 }
224 
225 static TCGv load_gpr_lit(DisasContext *ctx, unsigned reg,
226                          uint8_t lit, bool islit)
227 {
228     if (islit) {
229         ctx->lit = tcg_const_i64(lit);
230         return ctx->lit;
231     } else if (likely(reg < 31)) {
232         return ctx->ir[reg];
233     } else {
234         return load_zero(ctx);
235     }
236 }
237 
238 static TCGv dest_gpr(DisasContext *ctx, unsigned reg)
239 {
240     if (likely(reg < 31)) {
241         return ctx->ir[reg];
242     } else {
243         return dest_sink(ctx);
244     }
245 }
246 
247 static TCGv load_fpr(DisasContext *ctx, unsigned reg)
248 {
249     if (likely(reg < 31)) {
250         return cpu_fir[reg];
251     } else {
252         return load_zero(ctx);
253     }
254 }
255 
256 static TCGv dest_fpr(DisasContext *ctx, unsigned reg)
257 {
258     if (likely(reg < 31)) {
259         return cpu_fir[reg];
260     } else {
261         return dest_sink(ctx);
262     }
263 }
264 
265 static void gen_excp_1(int exception, int error_code)
266 {
267     TCGv_i32 tmp1, tmp2;
268 
269     tmp1 = tcg_const_i32(exception);
270     tmp2 = tcg_const_i32(error_code);
271     gen_helper_excp(cpu_env, tmp1, tmp2);
272     tcg_temp_free_i32(tmp2);
273     tcg_temp_free_i32(tmp1);
274 }
275 
276 static ExitStatus gen_excp(DisasContext *ctx, int exception, int error_code)
277 {
278     tcg_gen_movi_i64(cpu_pc, ctx->pc);
279     gen_excp_1(exception, error_code);
280     return EXIT_NORETURN;
281 }
282 
283 static inline ExitStatus gen_invalid(DisasContext *ctx)
284 {
285     return gen_excp(ctx, EXCP_OPCDEC, 0);
286 }
287 
288 static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
289 {
290     TCGv_i32 tmp32 = tcg_temp_new_i32();
291     tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
292     gen_helper_memory_to_f(t0, tmp32);
293     tcg_temp_free_i32(tmp32);
294 }
295 
296 static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
297 {
298     TCGv tmp = tcg_temp_new();
299     tcg_gen_qemu_ld_i64(tmp, t1, flags, MO_LEQ);
300     gen_helper_memory_to_g(t0, tmp);
301     tcg_temp_free(tmp);
302 }
303 
304 static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
305 {
306     TCGv_i32 tmp32 = tcg_temp_new_i32();
307     tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
308     gen_helper_memory_to_s(t0, tmp32);
309     tcg_temp_free_i32(tmp32);
310 }
311 
312 static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
313 {
314     tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LESL);
315     tcg_gen_mov_i64(cpu_lock_addr, t1);
316     tcg_gen_mov_i64(cpu_lock_value, t0);
317 }
318 
319 static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
320 {
321     tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LEQ);
322     tcg_gen_mov_i64(cpu_lock_addr, t1);
323     tcg_gen_mov_i64(cpu_lock_value, t0);
324 }
325 
326 static inline void gen_load_mem(DisasContext *ctx,
327                                 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
328                                                           int flags),
329                                 int ra, int rb, int32_t disp16, bool fp,
330                                 bool clear)
331 {
332     TCGv tmp, addr, va;
333 
334     /* LDQ_U with ra $31 is UNOP.  Other various loads are forms of
335        prefetches, which we can treat as nops.  No worries about
336        missed exceptions here.  */
337     if (unlikely(ra == 31)) {
338         return;
339     }
340 
341     tmp = tcg_temp_new();
342     addr = load_gpr(ctx, rb);
343 
344     if (disp16) {
345         tcg_gen_addi_i64(tmp, addr, disp16);
346         addr = tmp;
347     }
348     if (clear) {
349         tcg_gen_andi_i64(tmp, addr, ~0x7);
350         addr = tmp;
351     }
352 
353     va = (fp ? cpu_fir[ra] : ctx->ir[ra]);
354     tcg_gen_qemu_load(va, addr, ctx->mem_idx);
355 
356     tcg_temp_free(tmp);
357 }
358 
359 static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
360 {
361     TCGv_i32 tmp32 = tcg_temp_new_i32();
362     gen_helper_f_to_memory(tmp32, t0);
363     tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
364     tcg_temp_free_i32(tmp32);
365 }
366 
367 static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
368 {
369     TCGv tmp = tcg_temp_new();
370     gen_helper_g_to_memory(tmp, t0);
371     tcg_gen_qemu_st_i64(tmp, t1, flags, MO_LEQ);
372     tcg_temp_free(tmp);
373 }
374 
375 static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
376 {
377     TCGv_i32 tmp32 = tcg_temp_new_i32();
378     gen_helper_s_to_memory(tmp32, t0);
379     tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
380     tcg_temp_free_i32(tmp32);
381 }
382 
383 static inline void gen_store_mem(DisasContext *ctx,
384                                  void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
385                                                             int flags),
386                                  int ra, int rb, int32_t disp16, bool fp,
387                                  bool clear)
388 {
389     TCGv tmp, addr, va;
390 
391     tmp = tcg_temp_new();
392     addr = load_gpr(ctx, rb);
393 
394     if (disp16) {
395         tcg_gen_addi_i64(tmp, addr, disp16);
396         addr = tmp;
397     }
398     if (clear) {
399         tcg_gen_andi_i64(tmp, addr, ~0x7);
400         addr = tmp;
401     }
402 
403     va = (fp ? load_fpr(ctx, ra) : load_gpr(ctx, ra));
404     tcg_gen_qemu_store(va, addr, ctx->mem_idx);
405 
406     tcg_temp_free(tmp);
407 }
408 
409 static ExitStatus gen_store_conditional(DisasContext *ctx, int ra, int rb,
410                                         int32_t disp16, int mem_idx,
411                                         TCGMemOp op)
412 {
413     TCGLabel *lab_fail, *lab_done;
414     TCGv addr, val;
415 
416     addr = tcg_temp_new_i64();
417     tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
418     free_context_temps(ctx);
419 
420     lab_fail = gen_new_label();
421     lab_done = gen_new_label();
422     tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
423     tcg_temp_free_i64(addr);
424 
425     val = tcg_temp_new_i64();
426     tcg_gen_atomic_cmpxchg_i64(val, cpu_lock_addr, cpu_lock_value,
427                                load_gpr(ctx, ra), mem_idx, op);
428     free_context_temps(ctx);
429 
430     if (ra != 31) {
431         tcg_gen_setcond_i64(TCG_COND_EQ, ctx->ir[ra], val, cpu_lock_value);
432     }
433     tcg_temp_free_i64(val);
434     tcg_gen_br(lab_done);
435 
436     gen_set_label(lab_fail);
437     if (ra != 31) {
438         tcg_gen_movi_i64(ctx->ir[ra], 0);
439     }
440 
441     gen_set_label(lab_done);
442     tcg_gen_movi_i64(cpu_lock_addr, -1);
443     return NO_EXIT;
444 }
445 
446 static bool in_superpage(DisasContext *ctx, int64_t addr)
447 {
448 #ifndef CONFIG_USER_ONLY
449     return ((ctx->tb->flags & TB_FLAGS_USER_MODE) == 0
450             && addr >> TARGET_VIRT_ADDR_SPACE_BITS == -1
451             && ((addr >> 41) & 3) == 2);
452 #else
453     return false;
454 #endif
455 }
456 
457 static bool use_goto_tb(DisasContext *ctx, uint64_t dest)
458 {
459     /* Suppress goto_tb in the case of single-steping and IO.  */
460     if ((ctx->tb->cflags & CF_LAST_IO)
461         || ctx->singlestep_enabled || singlestep) {
462         return false;
463     }
464 #ifndef CONFIG_USER_ONLY
465     /* If the destination is in the superpage, the page perms can't change.  */
466     if (in_superpage(ctx, dest)) {
467         return true;
468     }
469     /* Check for the dest on the same page as the start of the TB.  */
470     return ((ctx->tb->pc ^ dest) & TARGET_PAGE_MASK) == 0;
471 #else
472     return true;
473 #endif
474 }
475 
476 static ExitStatus gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
477 {
478     uint64_t dest = ctx->pc + (disp << 2);
479 
480     if (ra != 31) {
481         tcg_gen_movi_i64(ctx->ir[ra], ctx->pc);
482     }
483 
484     /* Notice branch-to-next; used to initialize RA with the PC.  */
485     if (disp == 0) {
486         return 0;
487     } else if (use_goto_tb(ctx, dest)) {
488         tcg_gen_goto_tb(0);
489         tcg_gen_movi_i64(cpu_pc, dest);
490         tcg_gen_exit_tb((uintptr_t)ctx->tb);
491         return EXIT_GOTO_TB;
492     } else {
493         tcg_gen_movi_i64(cpu_pc, dest);
494         return EXIT_PC_UPDATED;
495     }
496 }
497 
498 static ExitStatus gen_bcond_internal(DisasContext *ctx, TCGCond cond,
499                                      TCGv cmp, int32_t disp)
500 {
501     uint64_t dest = ctx->pc + (disp << 2);
502     TCGLabel *lab_true = gen_new_label();
503 
504     if (use_goto_tb(ctx, dest)) {
505         tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
506 
507         tcg_gen_goto_tb(0);
508         tcg_gen_movi_i64(cpu_pc, ctx->pc);
509         tcg_gen_exit_tb((uintptr_t)ctx->tb);
510 
511         gen_set_label(lab_true);
512         tcg_gen_goto_tb(1);
513         tcg_gen_movi_i64(cpu_pc, dest);
514         tcg_gen_exit_tb((uintptr_t)ctx->tb + 1);
515 
516         return EXIT_GOTO_TB;
517     } else {
518         TCGv_i64 z = tcg_const_i64(0);
519         TCGv_i64 d = tcg_const_i64(dest);
520         TCGv_i64 p = tcg_const_i64(ctx->pc);
521 
522         tcg_gen_movcond_i64(cond, cpu_pc, cmp, z, d, p);
523 
524         tcg_temp_free_i64(z);
525         tcg_temp_free_i64(d);
526         tcg_temp_free_i64(p);
527         return EXIT_PC_UPDATED;
528     }
529 }
530 
531 static ExitStatus gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
532                             int32_t disp, int mask)
533 {
534     TCGv cmp_tmp;
535 
536     if (mask) {
537         cmp_tmp = tcg_temp_new();
538         tcg_gen_andi_i64(cmp_tmp, load_gpr(ctx, ra), 1);
539     } else {
540         cmp_tmp = load_gpr(ctx, ra);
541     }
542 
543     return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
544 }
545 
546 /* Fold -0.0 for comparison with COND.  */
547 
548 static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
549 {
550     uint64_t mzero = 1ull << 63;
551 
552     switch (cond) {
553     case TCG_COND_LE:
554     case TCG_COND_GT:
555         /* For <= or >, the -0.0 value directly compares the way we want.  */
556         tcg_gen_mov_i64(dest, src);
557         break;
558 
559     case TCG_COND_EQ:
560     case TCG_COND_NE:
561         /* For == or !=, we can simply mask off the sign bit and compare.  */
562         tcg_gen_andi_i64(dest, src, mzero - 1);
563         break;
564 
565     case TCG_COND_GE:
566     case TCG_COND_LT:
567         /* For >= or <, map -0.0 to +0.0 via comparison and mask.  */
568         tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
569         tcg_gen_neg_i64(dest, dest);
570         tcg_gen_and_i64(dest, dest, src);
571         break;
572 
573     default:
574         abort();
575     }
576 }
577 
578 static ExitStatus gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
579                              int32_t disp)
580 {
581     TCGv cmp_tmp = tcg_temp_new();
582     gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
583     return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
584 }
585 
586 static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
587 {
588     TCGv_i64 va, vb, z;
589 
590     z = load_zero(ctx);
591     vb = load_fpr(ctx, rb);
592     va = tcg_temp_new();
593     gen_fold_mzero(cond, va, load_fpr(ctx, ra));
594 
595     tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
596 
597     tcg_temp_free(va);
598 }
599 
600 #define QUAL_RM_N       0x080   /* Round mode nearest even */
601 #define QUAL_RM_C       0x000   /* Round mode chopped */
602 #define QUAL_RM_M       0x040   /* Round mode minus infinity */
603 #define QUAL_RM_D       0x0c0   /* Round mode dynamic */
604 #define QUAL_RM_MASK    0x0c0
605 
606 #define QUAL_U          0x100   /* Underflow enable (fp output) */
607 #define QUAL_V          0x100   /* Overflow enable (int output) */
608 #define QUAL_S          0x400   /* Software completion enable */
609 #define QUAL_I          0x200   /* Inexact detection enable */
610 
611 static void gen_qual_roundmode(DisasContext *ctx, int fn11)
612 {
613     TCGv_i32 tmp;
614 
615     fn11 &= QUAL_RM_MASK;
616     if (fn11 == ctx->tb_rm) {
617         return;
618     }
619     ctx->tb_rm = fn11;
620 
621     tmp = tcg_temp_new_i32();
622     switch (fn11) {
623     case QUAL_RM_N:
624         tcg_gen_movi_i32(tmp, float_round_nearest_even);
625         break;
626     case QUAL_RM_C:
627         tcg_gen_movi_i32(tmp, float_round_to_zero);
628         break;
629     case QUAL_RM_M:
630         tcg_gen_movi_i32(tmp, float_round_down);
631         break;
632     case QUAL_RM_D:
633         tcg_gen_ld8u_i32(tmp, cpu_env,
634                          offsetof(CPUAlphaState, fpcr_dyn_round));
635         break;
636     }
637 
638 #if defined(CONFIG_SOFTFLOAT_INLINE)
639     /* ??? The "fpu/softfloat.h" interface is to call set_float_rounding_mode.
640        With CONFIG_SOFTFLOAT that expands to an out-of-line call that just
641        sets the one field.  */
642     tcg_gen_st8_i32(tmp, cpu_env,
643                     offsetof(CPUAlphaState, fp_status.float_rounding_mode));
644 #else
645     gen_helper_setroundmode(tmp);
646 #endif
647 
648     tcg_temp_free_i32(tmp);
649 }
650 
651 static void gen_qual_flushzero(DisasContext *ctx, int fn11)
652 {
653     TCGv_i32 tmp;
654 
655     fn11 &= QUAL_U;
656     if (fn11 == ctx->tb_ftz) {
657         return;
658     }
659     ctx->tb_ftz = fn11;
660 
661     tmp = tcg_temp_new_i32();
662     if (fn11) {
663         /* Underflow is enabled, use the FPCR setting.  */
664         tcg_gen_ld8u_i32(tmp, cpu_env,
665                          offsetof(CPUAlphaState, fpcr_flush_to_zero));
666     } else {
667         /* Underflow is disabled, force flush-to-zero.  */
668         tcg_gen_movi_i32(tmp, 1);
669     }
670 
671 #if defined(CONFIG_SOFTFLOAT_INLINE)
672     tcg_gen_st8_i32(tmp, cpu_env,
673                     offsetof(CPUAlphaState, fp_status.flush_to_zero));
674 #else
675     gen_helper_setflushzero(tmp);
676 #endif
677 
678     tcg_temp_free_i32(tmp);
679 }
680 
681 static TCGv gen_ieee_input(DisasContext *ctx, int reg, int fn11, int is_cmp)
682 {
683     TCGv val;
684 
685     if (unlikely(reg == 31)) {
686         val = load_zero(ctx);
687     } else {
688         val = cpu_fir[reg];
689         if ((fn11 & QUAL_S) == 0) {
690             if (is_cmp) {
691                 gen_helper_ieee_input_cmp(cpu_env, val);
692             } else {
693                 gen_helper_ieee_input(cpu_env, val);
694             }
695         } else {
696 #ifndef CONFIG_USER_ONLY
697             /* In system mode, raise exceptions for denormals like real
698                hardware.  In user mode, proceed as if the OS completion
699                handler is handling the denormal as per spec.  */
700             gen_helper_ieee_input_s(cpu_env, val);
701 #endif
702         }
703     }
704     return val;
705 }
706 
707 static void gen_fp_exc_raise(int rc, int fn11)
708 {
709     /* ??? We ought to be able to do something with imprecise exceptions.
710        E.g. notice we're still in the trap shadow of something within the
711        TB and do not generate the code to signal the exception; end the TB
712        when an exception is forced to arrive, either by consumption of a
713        register value or TRAPB or EXCB.  */
714     TCGv_i32 reg, ign;
715     uint32_t ignore = 0;
716 
717     if (!(fn11 & QUAL_U)) {
718         /* Note that QUAL_U == QUAL_V, so ignore either.  */
719         ignore |= FPCR_UNF | FPCR_IOV;
720     }
721     if (!(fn11 & QUAL_I)) {
722         ignore |= FPCR_INE;
723     }
724     ign = tcg_const_i32(ignore);
725 
726     /* ??? Pass in the regno of the destination so that the helper can
727        set EXC_MASK, which contains a bitmask of destination registers
728        that have caused arithmetic traps.  A simple userspace emulation
729        does not require this.  We do need it for a guest kernel's entArith,
730        or if we were to do something clever with imprecise exceptions.  */
731     reg = tcg_const_i32(rc + 32);
732     if (fn11 & QUAL_S) {
733         gen_helper_fp_exc_raise_s(cpu_env, ign, reg);
734     } else {
735         gen_helper_fp_exc_raise(cpu_env, ign, reg);
736     }
737 
738     tcg_temp_free_i32(reg);
739     tcg_temp_free_i32(ign);
740 }
741 
742 static void gen_cvtlq(TCGv vc, TCGv vb)
743 {
744     TCGv tmp = tcg_temp_new();
745 
746     /* The arithmetic right shift here, plus the sign-extended mask below
747        yields a sign-extended result without an explicit ext32s_i64.  */
748     tcg_gen_sari_i64(tmp, vb, 32);
749     tcg_gen_shri_i64(vc, vb, 29);
750     tcg_gen_andi_i64(tmp, tmp, (int32_t)0xc0000000);
751     tcg_gen_andi_i64(vc, vc, 0x3fffffff);
752     tcg_gen_or_i64(vc, vc, tmp);
753 
754     tcg_temp_free(tmp);
755 }
756 
757 static void gen_ieee_arith2(DisasContext *ctx,
758                             void (*helper)(TCGv, TCGv_ptr, TCGv),
759                             int rb, int rc, int fn11)
760 {
761     TCGv vb;
762 
763     gen_qual_roundmode(ctx, fn11);
764     gen_qual_flushzero(ctx, fn11);
765 
766     vb = gen_ieee_input(ctx, rb, fn11, 0);
767     helper(dest_fpr(ctx, rc), cpu_env, vb);
768 
769     gen_fp_exc_raise(rc, fn11);
770 }
771 
772 #define IEEE_ARITH2(name)                                       \
773 static inline void glue(gen_, name)(DisasContext *ctx,          \
774                                     int rb, int rc, int fn11)   \
775 {                                                               \
776     gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11);      \
777 }
778 IEEE_ARITH2(sqrts)
779 IEEE_ARITH2(sqrtt)
780 IEEE_ARITH2(cvtst)
781 IEEE_ARITH2(cvtts)
782 
783 static void gen_cvttq(DisasContext *ctx, int rb, int rc, int fn11)
784 {
785     TCGv vb, vc;
786 
787     /* No need to set flushzero, since we have an integer output.  */
788     vb = gen_ieee_input(ctx, rb, fn11, 0);
789     vc = dest_fpr(ctx, rc);
790 
791     /* Almost all integer conversions use cropped rounding;
792        special case that.  */
793     if ((fn11 & QUAL_RM_MASK) == QUAL_RM_C) {
794         gen_helper_cvttq_c(vc, cpu_env, vb);
795     } else {
796         gen_qual_roundmode(ctx, fn11);
797         gen_helper_cvttq(vc, cpu_env, vb);
798     }
799     gen_fp_exc_raise(rc, fn11);
800 }
801 
802 static void gen_ieee_intcvt(DisasContext *ctx,
803                             void (*helper)(TCGv, TCGv_ptr, TCGv),
804 			    int rb, int rc, int fn11)
805 {
806     TCGv vb, vc;
807 
808     gen_qual_roundmode(ctx, fn11);
809     vb = load_fpr(ctx, rb);
810     vc = dest_fpr(ctx, rc);
811 
812     /* The only exception that can be raised by integer conversion
813        is inexact.  Thus we only need to worry about exceptions when
814        inexact handling is requested.  */
815     if (fn11 & QUAL_I) {
816         helper(vc, cpu_env, vb);
817         gen_fp_exc_raise(rc, fn11);
818     } else {
819         helper(vc, cpu_env, vb);
820     }
821 }
822 
823 #define IEEE_INTCVT(name)                                       \
824 static inline void glue(gen_, name)(DisasContext *ctx,          \
825                                     int rb, int rc, int fn11)   \
826 {                                                               \
827     gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11);      \
828 }
829 IEEE_INTCVT(cvtqs)
830 IEEE_INTCVT(cvtqt)
831 
832 static void gen_cpy_mask(TCGv vc, TCGv va, TCGv vb, bool inv_a, uint64_t mask)
833 {
834     TCGv vmask = tcg_const_i64(mask);
835     TCGv tmp = tcg_temp_new_i64();
836 
837     if (inv_a) {
838         tcg_gen_andc_i64(tmp, vmask, va);
839     } else {
840         tcg_gen_and_i64(tmp, va, vmask);
841     }
842 
843     tcg_gen_andc_i64(vc, vb, vmask);
844     tcg_gen_or_i64(vc, vc, tmp);
845 
846     tcg_temp_free(vmask);
847     tcg_temp_free(tmp);
848 }
849 
850 static void gen_ieee_arith3(DisasContext *ctx,
851                             void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
852                             int ra, int rb, int rc, int fn11)
853 {
854     TCGv va, vb, vc;
855 
856     gen_qual_roundmode(ctx, fn11);
857     gen_qual_flushzero(ctx, fn11);
858 
859     va = gen_ieee_input(ctx, ra, fn11, 0);
860     vb = gen_ieee_input(ctx, rb, fn11, 0);
861     vc = dest_fpr(ctx, rc);
862     helper(vc, cpu_env, va, vb);
863 
864     gen_fp_exc_raise(rc, fn11);
865 }
866 
867 #define IEEE_ARITH3(name)                                               \
868 static inline void glue(gen_, name)(DisasContext *ctx,                  \
869                                     int ra, int rb, int rc, int fn11)   \
870 {                                                                       \
871     gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11);          \
872 }
873 IEEE_ARITH3(adds)
874 IEEE_ARITH3(subs)
875 IEEE_ARITH3(muls)
876 IEEE_ARITH3(divs)
877 IEEE_ARITH3(addt)
878 IEEE_ARITH3(subt)
879 IEEE_ARITH3(mult)
880 IEEE_ARITH3(divt)
881 
882 static void gen_ieee_compare(DisasContext *ctx,
883                              void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
884                              int ra, int rb, int rc, int fn11)
885 {
886     TCGv va, vb, vc;
887 
888     va = gen_ieee_input(ctx, ra, fn11, 1);
889     vb = gen_ieee_input(ctx, rb, fn11, 1);
890     vc = dest_fpr(ctx, rc);
891     helper(vc, cpu_env, va, vb);
892 
893     gen_fp_exc_raise(rc, fn11);
894 }
895 
896 #define IEEE_CMP3(name)                                                 \
897 static inline void glue(gen_, name)(DisasContext *ctx,                  \
898                                     int ra, int rb, int rc, int fn11)   \
899 {                                                                       \
900     gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11);         \
901 }
902 IEEE_CMP3(cmptun)
903 IEEE_CMP3(cmpteq)
904 IEEE_CMP3(cmptlt)
905 IEEE_CMP3(cmptle)
906 
907 static inline uint64_t zapnot_mask(uint8_t lit)
908 {
909     uint64_t mask = 0;
910     int i;
911 
912     for (i = 0; i < 8; ++i) {
913         if ((lit >> i) & 1) {
914             mask |= 0xffull << (i * 8);
915         }
916     }
917     return mask;
918 }
919 
920 /* Implement zapnot with an immediate operand, which expands to some
921    form of immediate AND.  This is a basic building block in the
922    definition of many of the other byte manipulation instructions.  */
923 static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
924 {
925     switch (lit) {
926     case 0x00:
927         tcg_gen_movi_i64(dest, 0);
928         break;
929     case 0x01:
930         tcg_gen_ext8u_i64(dest, src);
931         break;
932     case 0x03:
933         tcg_gen_ext16u_i64(dest, src);
934         break;
935     case 0x0f:
936         tcg_gen_ext32u_i64(dest, src);
937         break;
938     case 0xff:
939         tcg_gen_mov_i64(dest, src);
940         break;
941     default:
942         tcg_gen_andi_i64(dest, src, zapnot_mask(lit));
943         break;
944     }
945 }
946 
947 /* EXTWH, EXTLH, EXTQH */
948 static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
949                       uint8_t lit, uint8_t byte_mask)
950 {
951     if (islit) {
952         tcg_gen_shli_i64(vc, va, (64 - lit * 8) & 0x3f);
953     } else {
954         TCGv tmp = tcg_temp_new();
955         tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
956         tcg_gen_neg_i64(tmp, tmp);
957         tcg_gen_andi_i64(tmp, tmp, 0x3f);
958         tcg_gen_shl_i64(vc, va, tmp);
959         tcg_temp_free(tmp);
960     }
961     gen_zapnoti(vc, vc, byte_mask);
962 }
963 
964 /* EXTBL, EXTWL, EXTLL, EXTQL */
965 static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
966                       uint8_t lit, uint8_t byte_mask)
967 {
968     if (islit) {
969         tcg_gen_shri_i64(vc, va, (lit & 7) * 8);
970     } else {
971         TCGv tmp = tcg_temp_new();
972         tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
973         tcg_gen_shli_i64(tmp, tmp, 3);
974         tcg_gen_shr_i64(vc, va, tmp);
975         tcg_temp_free(tmp);
976     }
977     gen_zapnoti(vc, vc, byte_mask);
978 }
979 
980 /* INSWH, INSLH, INSQH */
981 static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
982                       uint8_t lit, uint8_t byte_mask)
983 {
984     TCGv tmp = tcg_temp_new();
985 
986     /* The instruction description has us left-shift the byte mask and extract
987        bits <15:8> and apply that zap at the end.  This is equivalent to simply
988        performing the zap first and shifting afterward.  */
989     gen_zapnoti(tmp, va, byte_mask);
990 
991     if (islit) {
992         lit &= 7;
993         if (unlikely(lit == 0)) {
994             tcg_gen_movi_i64(vc, 0);
995         } else {
996             tcg_gen_shri_i64(vc, tmp, 64 - lit * 8);
997         }
998     } else {
999         TCGv shift = tcg_temp_new();
1000 
1001         /* If (B & 7) == 0, we need to shift by 64 and leave a zero.  Do this
1002            portably by splitting the shift into two parts: shift_count-1 and 1.
1003            Arrange for the -1 by using ones-complement instead of
1004            twos-complement in the negation: ~(B * 8) & 63.  */
1005 
1006         tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1007         tcg_gen_not_i64(shift, shift);
1008         tcg_gen_andi_i64(shift, shift, 0x3f);
1009 
1010         tcg_gen_shr_i64(vc, tmp, shift);
1011         tcg_gen_shri_i64(vc, vc, 1);
1012         tcg_temp_free(shift);
1013     }
1014     tcg_temp_free(tmp);
1015 }
1016 
1017 /* INSBL, INSWL, INSLL, INSQL */
1018 static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1019                       uint8_t lit, uint8_t byte_mask)
1020 {
1021     TCGv tmp = tcg_temp_new();
1022 
1023     /* The instruction description has us left-shift the byte mask
1024        the same number of byte slots as the data and apply the zap
1025        at the end.  This is equivalent to simply performing the zap
1026        first and shifting afterward.  */
1027     gen_zapnoti(tmp, va, byte_mask);
1028 
1029     if (islit) {
1030         tcg_gen_shli_i64(vc, tmp, (lit & 7) * 8);
1031     } else {
1032         TCGv shift = tcg_temp_new();
1033         tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1034         tcg_gen_shli_i64(shift, shift, 3);
1035         tcg_gen_shl_i64(vc, tmp, shift);
1036         tcg_temp_free(shift);
1037     }
1038     tcg_temp_free(tmp);
1039 }
1040 
1041 /* MSKWH, MSKLH, MSKQH */
1042 static void gen_msk_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1043                       uint8_t lit, uint8_t byte_mask)
1044 {
1045     if (islit) {
1046         gen_zapnoti(vc, va, ~((byte_mask << (lit & 7)) >> 8));
1047     } else {
1048         TCGv shift = tcg_temp_new();
1049         TCGv mask = tcg_temp_new();
1050 
1051         /* The instruction description is as above, where the byte_mask
1052            is shifted left, and then we extract bits <15:8>.  This can be
1053            emulated with a right-shift on the expanded byte mask.  This
1054            requires extra care because for an input <2:0> == 0 we need a
1055            shift of 64 bits in order to generate a zero.  This is done by
1056            splitting the shift into two parts, the variable shift - 1
1057            followed by a constant 1 shift.  The code we expand below is
1058            equivalent to ~(B * 8) & 63.  */
1059 
1060         tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1061         tcg_gen_not_i64(shift, shift);
1062         tcg_gen_andi_i64(shift, shift, 0x3f);
1063         tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1064         tcg_gen_shr_i64(mask, mask, shift);
1065         tcg_gen_shri_i64(mask, mask, 1);
1066 
1067         tcg_gen_andc_i64(vc, va, mask);
1068 
1069         tcg_temp_free(mask);
1070         tcg_temp_free(shift);
1071     }
1072 }
1073 
1074 /* MSKBL, MSKWL, MSKLL, MSKQL */
1075 static void gen_msk_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1076                       uint8_t lit, uint8_t byte_mask)
1077 {
1078     if (islit) {
1079         gen_zapnoti(vc, va, ~(byte_mask << (lit & 7)));
1080     } else {
1081         TCGv shift = tcg_temp_new();
1082         TCGv mask = tcg_temp_new();
1083 
1084         tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1085         tcg_gen_shli_i64(shift, shift, 3);
1086         tcg_gen_movi_i64(mask, zapnot_mask(byte_mask));
1087         tcg_gen_shl_i64(mask, mask, shift);
1088 
1089         tcg_gen_andc_i64(vc, va, mask);
1090 
1091         tcg_temp_free(mask);
1092         tcg_temp_free(shift);
1093     }
1094 }
1095 
1096 static void gen_rx(DisasContext *ctx, int ra, int set)
1097 {
1098     TCGv_i32 tmp;
1099 
1100     if (ra != 31) {
1101         tcg_gen_ld8u_i64(ctx->ir[ra], cpu_env,
1102                          offsetof(CPUAlphaState, intr_flag));
1103     }
1104 
1105     tmp = tcg_const_i32(set);
1106     tcg_gen_st8_i32(tmp, cpu_env, offsetof(CPUAlphaState, intr_flag));
1107     tcg_temp_free_i32(tmp);
1108 }
1109 
1110 static ExitStatus gen_call_pal(DisasContext *ctx, int palcode)
1111 {
1112     /* We're emulating OSF/1 PALcode.  Many of these are trivial access
1113        to internal cpu registers.  */
1114 
1115     /* Unprivileged PAL call */
1116     if (palcode >= 0x80 && palcode < 0xC0) {
1117         switch (palcode) {
1118         case 0x86:
1119             /* IMB */
1120             /* No-op inside QEMU.  */
1121             break;
1122         case 0x9E:
1123             /* RDUNIQUE */
1124             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1125                            offsetof(CPUAlphaState, unique));
1126             break;
1127         case 0x9F:
1128             /* WRUNIQUE */
1129             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1130                            offsetof(CPUAlphaState, unique));
1131             break;
1132         default:
1133             palcode &= 0xbf;
1134             goto do_call_pal;
1135         }
1136         return NO_EXIT;
1137     }
1138 
1139 #ifndef CONFIG_USER_ONLY
1140     /* Privileged PAL code */
1141     if (palcode < 0x40 && (ctx->tb->flags & TB_FLAGS_USER_MODE) == 0) {
1142         switch (palcode) {
1143         case 0x01:
1144             /* CFLUSH */
1145             /* No-op inside QEMU.  */
1146             break;
1147         case 0x02:
1148             /* DRAINA */
1149             /* No-op inside QEMU.  */
1150             break;
1151         case 0x2D:
1152             /* WRVPTPTR */
1153             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1154                            offsetof(CPUAlphaState, vptptr));
1155             break;
1156         case 0x31:
1157             /* WRVAL */
1158             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1159                            offsetof(CPUAlphaState, sysval));
1160             break;
1161         case 0x32:
1162             /* RDVAL */
1163             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1164                            offsetof(CPUAlphaState, sysval));
1165             break;
1166 
1167         case 0x35: {
1168             /* SWPIPL */
1169             TCGv tmp;
1170 
1171             /* Note that we already know we're in kernel mode, so we know
1172                that PS only contains the 3 IPL bits.  */
1173             tcg_gen_ld8u_i64(ctx->ir[IR_V0], cpu_env,
1174                              offsetof(CPUAlphaState, ps));
1175 
1176             /* But make sure and store only the 3 IPL bits from the user.  */
1177             tmp = tcg_temp_new();
1178             tcg_gen_andi_i64(tmp, ctx->ir[IR_A0], PS_INT_MASK);
1179             tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, ps));
1180             tcg_temp_free(tmp);
1181             break;
1182         }
1183 
1184         case 0x36:
1185             /* RDPS */
1186             tcg_gen_ld8u_i64(ctx->ir[IR_V0], cpu_env,
1187                              offsetof(CPUAlphaState, ps));
1188             break;
1189         case 0x38:
1190             /* WRUSP */
1191             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1192                            offsetof(CPUAlphaState, usp));
1193             break;
1194         case 0x3A:
1195             /* RDUSP */
1196             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1197                            offsetof(CPUAlphaState, usp));
1198             break;
1199         case 0x3C:
1200             /* WHAMI */
1201             tcg_gen_ld32s_i64(ctx->ir[IR_V0], cpu_env,
1202                 -offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
1203             break;
1204 
1205         default:
1206             palcode &= 0x3f;
1207             goto do_call_pal;
1208         }
1209         return NO_EXIT;
1210     }
1211 #endif
1212     return gen_invalid(ctx);
1213 
1214  do_call_pal:
1215 #ifdef CONFIG_USER_ONLY
1216     return gen_excp(ctx, EXCP_CALL_PAL, palcode);
1217 #else
1218     {
1219         TCGv tmp = tcg_temp_new();
1220         uint64_t exc_addr = ctx->pc;
1221         uint64_t entry = ctx->palbr;
1222 
1223         if (ctx->tb->flags & TB_FLAGS_PAL_MODE) {
1224             exc_addr |= 1;
1225         } else {
1226             tcg_gen_movi_i64(tmp, 1);
1227             tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, pal_mode));
1228         }
1229 
1230         tcg_gen_movi_i64(tmp, exc_addr);
1231         tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUAlphaState, exc_addr));
1232         tcg_temp_free(tmp);
1233 
1234         entry += (palcode & 0x80
1235                   ? 0x2000 + (palcode - 0x80) * 64
1236                   : 0x1000 + palcode * 64);
1237 
1238         /* Since the destination is running in PALmode, we don't really
1239            need the page permissions check.  We'll see the existence of
1240            the page when we create the TB, and we'll flush all TBs if
1241            we change the PAL base register.  */
1242         if (!ctx->singlestep_enabled && !(ctx->tb->cflags & CF_LAST_IO)) {
1243             tcg_gen_goto_tb(0);
1244             tcg_gen_movi_i64(cpu_pc, entry);
1245             tcg_gen_exit_tb((uintptr_t)ctx->tb);
1246             return EXIT_GOTO_TB;
1247         } else {
1248             tcg_gen_movi_i64(cpu_pc, entry);
1249             return EXIT_PC_UPDATED;
1250         }
1251     }
1252 #endif
1253 }
1254 
1255 #ifndef CONFIG_USER_ONLY
1256 
1257 #define PR_BYTE         0x100000
1258 #define PR_LONG         0x200000
1259 
1260 static int cpu_pr_data(int pr)
1261 {
1262     switch (pr) {
1263     case  0: return offsetof(CPUAlphaState, ps) | PR_BYTE;
1264     case  1: return offsetof(CPUAlphaState, fen) | PR_BYTE;
1265     case  2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
1266     case  3: return offsetof(CPUAlphaState, trap_arg0);
1267     case  4: return offsetof(CPUAlphaState, trap_arg1);
1268     case  5: return offsetof(CPUAlphaState, trap_arg2);
1269     case  6: return offsetof(CPUAlphaState, exc_addr);
1270     case  7: return offsetof(CPUAlphaState, palbr);
1271     case  8: return offsetof(CPUAlphaState, ptbr);
1272     case  9: return offsetof(CPUAlphaState, vptptr);
1273     case 10: return offsetof(CPUAlphaState, unique);
1274     case 11: return offsetof(CPUAlphaState, sysval);
1275     case 12: return offsetof(CPUAlphaState, usp);
1276 
1277     case 40 ... 63:
1278         return offsetof(CPUAlphaState, scratch[pr - 40]);
1279 
1280     case 251:
1281         return offsetof(CPUAlphaState, alarm_expire);
1282     }
1283     return 0;
1284 }
1285 
1286 static ExitStatus gen_mfpr(DisasContext *ctx, TCGv va, int regno)
1287 {
1288     void (*helper)(TCGv);
1289     int data;
1290 
1291     switch (regno) {
1292     case 32 ... 39:
1293         /* Accessing the "non-shadow" general registers.  */
1294         regno = regno == 39 ? 25 : regno - 32 + 8;
1295         tcg_gen_mov_i64(va, cpu_std_ir[regno]);
1296         break;
1297 
1298     case 250: /* WALLTIME */
1299         helper = gen_helper_get_walltime;
1300         goto do_helper;
1301     case 249: /* VMTIME */
1302         helper = gen_helper_get_vmtime;
1303     do_helper:
1304         if (use_icount) {
1305             gen_io_start();
1306             helper(va);
1307             gen_io_end();
1308             return EXIT_PC_STALE;
1309         } else {
1310             helper(va);
1311         }
1312         break;
1313 
1314     default:
1315         /* The basic registers are data only, and unknown registers
1316            are read-zero, write-ignore.  */
1317         data = cpu_pr_data(regno);
1318         if (data == 0) {
1319             tcg_gen_movi_i64(va, 0);
1320         } else if (data & PR_BYTE) {
1321             tcg_gen_ld8u_i64(va, cpu_env, data & ~PR_BYTE);
1322         } else if (data & PR_LONG) {
1323             tcg_gen_ld32s_i64(va, cpu_env, data & ~PR_LONG);
1324         } else {
1325             tcg_gen_ld_i64(va, cpu_env, data);
1326         }
1327         break;
1328     }
1329 
1330     return NO_EXIT;
1331 }
1332 
1333 static ExitStatus gen_mtpr(DisasContext *ctx, TCGv vb, int regno)
1334 {
1335     TCGv tmp;
1336     int data;
1337 
1338     switch (regno) {
1339     case 255:
1340         /* TBIA */
1341         gen_helper_tbia(cpu_env);
1342         break;
1343 
1344     case 254:
1345         /* TBIS */
1346         gen_helper_tbis(cpu_env, vb);
1347         break;
1348 
1349     case 253:
1350         /* WAIT */
1351         tmp = tcg_const_i64(1);
1352         tcg_gen_st32_i64(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1353                                        offsetof(CPUState, halted));
1354         return gen_excp(ctx, EXCP_HLT, 0);
1355 
1356     case 252:
1357         /* HALT */
1358         gen_helper_halt(vb);
1359         return EXIT_PC_STALE;
1360 
1361     case 251:
1362         /* ALARM */
1363         gen_helper_set_alarm(cpu_env, vb);
1364         break;
1365 
1366     case 7:
1367         /* PALBR */
1368         tcg_gen_st_i64(vb, cpu_env, offsetof(CPUAlphaState, palbr));
1369         /* Changing the PAL base register implies un-chaining all of the TBs
1370            that ended with a CALL_PAL.  Since the base register usually only
1371            changes during boot, flushing everything works well.  */
1372         gen_helper_tb_flush(cpu_env);
1373         return EXIT_PC_STALE;
1374 
1375     case 32 ... 39:
1376         /* Accessing the "non-shadow" general registers.  */
1377         regno = regno == 39 ? 25 : regno - 32 + 8;
1378         tcg_gen_mov_i64(cpu_std_ir[regno], vb);
1379         break;
1380 
1381     default:
1382         /* The basic registers are data only, and unknown registers
1383            are read-zero, write-ignore.  */
1384         data = cpu_pr_data(regno);
1385         if (data != 0) {
1386             if (data & PR_BYTE) {
1387                 tcg_gen_st8_i64(vb, cpu_env, data & ~PR_BYTE);
1388             } else if (data & PR_LONG) {
1389                 tcg_gen_st32_i64(vb, cpu_env, data & ~PR_LONG);
1390             } else {
1391                 tcg_gen_st_i64(vb, cpu_env, data);
1392             }
1393         }
1394         break;
1395     }
1396 
1397     return NO_EXIT;
1398 }
1399 #endif /* !USER_ONLY*/
1400 
1401 #define REQUIRE_NO_LIT                          \
1402     do {                                        \
1403         if (real_islit) {                       \
1404             goto invalid_opc;                   \
1405         }                                       \
1406     } while (0)
1407 
1408 #define REQUIRE_TB_FLAG(FLAG)                   \
1409     do {                                        \
1410         if ((ctx->tb->flags & (FLAG)) == 0) {   \
1411             goto invalid_opc;                   \
1412         }                                       \
1413     } while (0)
1414 
1415 #define REQUIRE_REG_31(WHICH)                   \
1416     do {                                        \
1417         if (WHICH != 31) {                      \
1418             goto invalid_opc;                   \
1419         }                                       \
1420     } while (0)
1421 
1422 static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
1423 {
1424     int32_t disp21, disp16, disp12 __attribute__((unused));
1425     uint16_t fn11;
1426     uint8_t opc, ra, rb, rc, fpfn, fn7, lit;
1427     bool islit, real_islit;
1428     TCGv va, vb, vc, tmp, tmp2;
1429     TCGv_i32 t32;
1430     ExitStatus ret;
1431 
1432     /* Decode all instruction fields */
1433     opc = extract32(insn, 26, 6);
1434     ra = extract32(insn, 21, 5);
1435     rb = extract32(insn, 16, 5);
1436     rc = extract32(insn, 0, 5);
1437     real_islit = islit = extract32(insn, 12, 1);
1438     lit = extract32(insn, 13, 8);
1439 
1440     disp21 = sextract32(insn, 0, 21);
1441     disp16 = sextract32(insn, 0, 16);
1442     disp12 = sextract32(insn, 0, 12);
1443 
1444     fn11 = extract32(insn, 5, 11);
1445     fpfn = extract32(insn, 5, 6);
1446     fn7 = extract32(insn, 5, 7);
1447 
1448     if (rb == 31 && !islit) {
1449         islit = true;
1450         lit = 0;
1451     }
1452 
1453     ret = NO_EXIT;
1454     switch (opc) {
1455     case 0x00:
1456         /* CALL_PAL */
1457         ret = gen_call_pal(ctx, insn & 0x03ffffff);
1458         break;
1459     case 0x01:
1460         /* OPC01 */
1461         goto invalid_opc;
1462     case 0x02:
1463         /* OPC02 */
1464         goto invalid_opc;
1465     case 0x03:
1466         /* OPC03 */
1467         goto invalid_opc;
1468     case 0x04:
1469         /* OPC04 */
1470         goto invalid_opc;
1471     case 0x05:
1472         /* OPC05 */
1473         goto invalid_opc;
1474     case 0x06:
1475         /* OPC06 */
1476         goto invalid_opc;
1477     case 0x07:
1478         /* OPC07 */
1479         goto invalid_opc;
1480 
1481     case 0x09:
1482         /* LDAH */
1483         disp16 = (uint32_t)disp16 << 16;
1484         /* fall through */
1485     case 0x08:
1486         /* LDA */
1487         va = dest_gpr(ctx, ra);
1488         /* It's worth special-casing immediate loads.  */
1489         if (rb == 31) {
1490             tcg_gen_movi_i64(va, disp16);
1491         } else {
1492             tcg_gen_addi_i64(va, load_gpr(ctx, rb), disp16);
1493         }
1494         break;
1495 
1496     case 0x0A:
1497         /* LDBU */
1498         REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1499         gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
1500         break;
1501     case 0x0B:
1502         /* LDQ_U */
1503         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
1504         break;
1505     case 0x0C:
1506         /* LDWU */
1507         REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1508         gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
1509         break;
1510     case 0x0D:
1511         /* STW */
1512         REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1513         gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
1514         break;
1515     case 0x0E:
1516         /* STB */
1517         REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1518         gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
1519         break;
1520     case 0x0F:
1521         /* STQ_U */
1522         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
1523         break;
1524 
1525     case 0x10:
1526         vc = dest_gpr(ctx, rc);
1527         vb = load_gpr_lit(ctx, rb, lit, islit);
1528 
1529         if (ra == 31) {
1530             if (fn7 == 0x00) {
1531                 /* Special case ADDL as SEXTL.  */
1532                 tcg_gen_ext32s_i64(vc, vb);
1533                 break;
1534             }
1535             if (fn7 == 0x29) {
1536                 /* Special case SUBQ as NEGQ.  */
1537                 tcg_gen_neg_i64(vc, vb);
1538                 break;
1539             }
1540         }
1541 
1542         va = load_gpr(ctx, ra);
1543         switch (fn7) {
1544         case 0x00:
1545             /* ADDL */
1546             tcg_gen_add_i64(vc, va, vb);
1547             tcg_gen_ext32s_i64(vc, vc);
1548             break;
1549         case 0x02:
1550             /* S4ADDL */
1551             tmp = tcg_temp_new();
1552             tcg_gen_shli_i64(tmp, va, 2);
1553             tcg_gen_add_i64(tmp, tmp, vb);
1554             tcg_gen_ext32s_i64(vc, tmp);
1555             tcg_temp_free(tmp);
1556             break;
1557         case 0x09:
1558             /* SUBL */
1559             tcg_gen_sub_i64(vc, va, vb);
1560             tcg_gen_ext32s_i64(vc, vc);
1561             break;
1562         case 0x0B:
1563             /* S4SUBL */
1564             tmp = tcg_temp_new();
1565             tcg_gen_shli_i64(tmp, va, 2);
1566             tcg_gen_sub_i64(tmp, tmp, vb);
1567             tcg_gen_ext32s_i64(vc, tmp);
1568             tcg_temp_free(tmp);
1569             break;
1570         case 0x0F:
1571             /* CMPBGE */
1572             if (ra == 31) {
1573                 /* Special case 0 >= X as X == 0.  */
1574                 gen_helper_cmpbe0(vc, vb);
1575             } else {
1576                 gen_helper_cmpbge(vc, va, vb);
1577             }
1578             break;
1579         case 0x12:
1580             /* S8ADDL */
1581             tmp = tcg_temp_new();
1582             tcg_gen_shli_i64(tmp, va, 3);
1583             tcg_gen_add_i64(tmp, tmp, vb);
1584             tcg_gen_ext32s_i64(vc, tmp);
1585             tcg_temp_free(tmp);
1586             break;
1587         case 0x1B:
1588             /* S8SUBL */
1589             tmp = tcg_temp_new();
1590             tcg_gen_shli_i64(tmp, va, 3);
1591             tcg_gen_sub_i64(tmp, tmp, vb);
1592             tcg_gen_ext32s_i64(vc, tmp);
1593             tcg_temp_free(tmp);
1594             break;
1595         case 0x1D:
1596             /* CMPULT */
1597             tcg_gen_setcond_i64(TCG_COND_LTU, vc, va, vb);
1598             break;
1599         case 0x20:
1600             /* ADDQ */
1601             tcg_gen_add_i64(vc, va, vb);
1602             break;
1603         case 0x22:
1604             /* S4ADDQ */
1605             tmp = tcg_temp_new();
1606             tcg_gen_shli_i64(tmp, va, 2);
1607             tcg_gen_add_i64(vc, tmp, vb);
1608             tcg_temp_free(tmp);
1609             break;
1610         case 0x29:
1611             /* SUBQ */
1612             tcg_gen_sub_i64(vc, va, vb);
1613             break;
1614         case 0x2B:
1615             /* S4SUBQ */
1616             tmp = tcg_temp_new();
1617             tcg_gen_shli_i64(tmp, va, 2);
1618             tcg_gen_sub_i64(vc, tmp, vb);
1619             tcg_temp_free(tmp);
1620             break;
1621         case 0x2D:
1622             /* CMPEQ */
1623             tcg_gen_setcond_i64(TCG_COND_EQ, vc, va, vb);
1624             break;
1625         case 0x32:
1626             /* S8ADDQ */
1627             tmp = tcg_temp_new();
1628             tcg_gen_shli_i64(tmp, va, 3);
1629             tcg_gen_add_i64(vc, tmp, vb);
1630             tcg_temp_free(tmp);
1631             break;
1632         case 0x3B:
1633             /* S8SUBQ */
1634             tmp = tcg_temp_new();
1635             tcg_gen_shli_i64(tmp, va, 3);
1636             tcg_gen_sub_i64(vc, tmp, vb);
1637             tcg_temp_free(tmp);
1638             break;
1639         case 0x3D:
1640             /* CMPULE */
1641             tcg_gen_setcond_i64(TCG_COND_LEU, vc, va, vb);
1642             break;
1643         case 0x40:
1644             /* ADDL/V */
1645             tmp = tcg_temp_new();
1646             tcg_gen_ext32s_i64(tmp, va);
1647             tcg_gen_ext32s_i64(vc, vb);
1648             tcg_gen_add_i64(tmp, tmp, vc);
1649             tcg_gen_ext32s_i64(vc, tmp);
1650             gen_helper_check_overflow(cpu_env, vc, tmp);
1651             tcg_temp_free(tmp);
1652             break;
1653         case 0x49:
1654             /* SUBL/V */
1655             tmp = tcg_temp_new();
1656             tcg_gen_ext32s_i64(tmp, va);
1657             tcg_gen_ext32s_i64(vc, vb);
1658             tcg_gen_sub_i64(tmp, tmp, vc);
1659             tcg_gen_ext32s_i64(vc, tmp);
1660             gen_helper_check_overflow(cpu_env, vc, tmp);
1661             tcg_temp_free(tmp);
1662             break;
1663         case 0x4D:
1664             /* CMPLT */
1665             tcg_gen_setcond_i64(TCG_COND_LT, vc, va, vb);
1666             break;
1667         case 0x60:
1668             /* ADDQ/V */
1669             tmp = tcg_temp_new();
1670             tmp2 = tcg_temp_new();
1671             tcg_gen_eqv_i64(tmp, va, vb);
1672             tcg_gen_mov_i64(tmp2, va);
1673             tcg_gen_add_i64(vc, va, vb);
1674             tcg_gen_xor_i64(tmp2, tmp2, vc);
1675             tcg_gen_and_i64(tmp, tmp, tmp2);
1676             tcg_gen_shri_i64(tmp, tmp, 63);
1677             tcg_gen_movi_i64(tmp2, 0);
1678             gen_helper_check_overflow(cpu_env, tmp, tmp2);
1679             tcg_temp_free(tmp);
1680             tcg_temp_free(tmp2);
1681             break;
1682         case 0x69:
1683             /* SUBQ/V */
1684             tmp = tcg_temp_new();
1685             tmp2 = tcg_temp_new();
1686             tcg_gen_xor_i64(tmp, va, vb);
1687             tcg_gen_mov_i64(tmp2, va);
1688             tcg_gen_sub_i64(vc, va, vb);
1689             tcg_gen_xor_i64(tmp2, tmp2, vc);
1690             tcg_gen_and_i64(tmp, tmp, tmp2);
1691             tcg_gen_shri_i64(tmp, tmp, 63);
1692             tcg_gen_movi_i64(tmp2, 0);
1693             gen_helper_check_overflow(cpu_env, tmp, tmp2);
1694             tcg_temp_free(tmp);
1695             tcg_temp_free(tmp2);
1696             break;
1697         case 0x6D:
1698             /* CMPLE */
1699             tcg_gen_setcond_i64(TCG_COND_LE, vc, va, vb);
1700             break;
1701         default:
1702             goto invalid_opc;
1703         }
1704         break;
1705 
1706     case 0x11:
1707         if (fn7 == 0x20) {
1708             if (rc == 31) {
1709                 /* Special case BIS as NOP.  */
1710                 break;
1711             }
1712             if (ra == 31) {
1713                 /* Special case BIS as MOV.  */
1714                 vc = dest_gpr(ctx, rc);
1715                 if (islit) {
1716                     tcg_gen_movi_i64(vc, lit);
1717                 } else {
1718                     tcg_gen_mov_i64(vc, load_gpr(ctx, rb));
1719                 }
1720                 break;
1721             }
1722         }
1723 
1724         vc = dest_gpr(ctx, rc);
1725         vb = load_gpr_lit(ctx, rb, lit, islit);
1726 
1727         if (fn7 == 0x28 && ra == 31) {
1728             /* Special case ORNOT as NOT.  */
1729             tcg_gen_not_i64(vc, vb);
1730             break;
1731         }
1732 
1733         va = load_gpr(ctx, ra);
1734         switch (fn7) {
1735         case 0x00:
1736             /* AND */
1737             tcg_gen_and_i64(vc, va, vb);
1738             break;
1739         case 0x08:
1740             /* BIC */
1741             tcg_gen_andc_i64(vc, va, vb);
1742             break;
1743         case 0x14:
1744             /* CMOVLBS */
1745             tmp = tcg_temp_new();
1746             tcg_gen_andi_i64(tmp, va, 1);
1747             tcg_gen_movcond_i64(TCG_COND_NE, vc, tmp, load_zero(ctx),
1748                                 vb, load_gpr(ctx, rc));
1749             tcg_temp_free(tmp);
1750             break;
1751         case 0x16:
1752             /* CMOVLBC */
1753             tmp = tcg_temp_new();
1754             tcg_gen_andi_i64(tmp, va, 1);
1755             tcg_gen_movcond_i64(TCG_COND_EQ, vc, tmp, load_zero(ctx),
1756                                 vb, load_gpr(ctx, rc));
1757             tcg_temp_free(tmp);
1758             break;
1759         case 0x20:
1760             /* BIS */
1761             tcg_gen_or_i64(vc, va, vb);
1762             break;
1763         case 0x24:
1764             /* CMOVEQ */
1765             tcg_gen_movcond_i64(TCG_COND_EQ, vc, va, load_zero(ctx),
1766                                 vb, load_gpr(ctx, rc));
1767             break;
1768         case 0x26:
1769             /* CMOVNE */
1770             tcg_gen_movcond_i64(TCG_COND_NE, vc, va, load_zero(ctx),
1771                                 vb, load_gpr(ctx, rc));
1772             break;
1773         case 0x28:
1774             /* ORNOT */
1775             tcg_gen_orc_i64(vc, va, vb);
1776             break;
1777         case 0x40:
1778             /* XOR */
1779             tcg_gen_xor_i64(vc, va, vb);
1780             break;
1781         case 0x44:
1782             /* CMOVLT */
1783             tcg_gen_movcond_i64(TCG_COND_LT, vc, va, load_zero(ctx),
1784                                 vb, load_gpr(ctx, rc));
1785             break;
1786         case 0x46:
1787             /* CMOVGE */
1788             tcg_gen_movcond_i64(TCG_COND_GE, vc, va, load_zero(ctx),
1789                                 vb, load_gpr(ctx, rc));
1790             break;
1791         case 0x48:
1792             /* EQV */
1793             tcg_gen_eqv_i64(vc, va, vb);
1794             break;
1795         case 0x61:
1796             /* AMASK */
1797             REQUIRE_REG_31(ra);
1798             {
1799                 uint64_t amask = ctx->tb->flags >> TB_FLAGS_AMASK_SHIFT;
1800                 tcg_gen_andi_i64(vc, vb, ~amask);
1801             }
1802             break;
1803         case 0x64:
1804             /* CMOVLE */
1805             tcg_gen_movcond_i64(TCG_COND_LE, vc, va, load_zero(ctx),
1806                                 vb, load_gpr(ctx, rc));
1807             break;
1808         case 0x66:
1809             /* CMOVGT */
1810             tcg_gen_movcond_i64(TCG_COND_GT, vc, va, load_zero(ctx),
1811                                 vb, load_gpr(ctx, rc));
1812             break;
1813         case 0x6C:
1814             /* IMPLVER */
1815             REQUIRE_REG_31(ra);
1816             tcg_gen_movi_i64(vc, ctx->implver);
1817             break;
1818         default:
1819             goto invalid_opc;
1820         }
1821         break;
1822 
1823     case 0x12:
1824         vc = dest_gpr(ctx, rc);
1825         va = load_gpr(ctx, ra);
1826         switch (fn7) {
1827         case 0x02:
1828             /* MSKBL */
1829             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x01);
1830             break;
1831         case 0x06:
1832             /* EXTBL */
1833             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x01);
1834             break;
1835         case 0x0B:
1836             /* INSBL */
1837             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x01);
1838             break;
1839         case 0x12:
1840             /* MSKWL */
1841             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x03);
1842             break;
1843         case 0x16:
1844             /* EXTWL */
1845             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x03);
1846             break;
1847         case 0x1B:
1848             /* INSWL */
1849             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x03);
1850             break;
1851         case 0x22:
1852             /* MSKLL */
1853             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x0f);
1854             break;
1855         case 0x26:
1856             /* EXTLL */
1857             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x0f);
1858             break;
1859         case 0x2B:
1860             /* INSLL */
1861             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x0f);
1862             break;
1863         case 0x30:
1864             /* ZAP */
1865             if (islit) {
1866                 gen_zapnoti(vc, va, ~lit);
1867             } else {
1868                 gen_helper_zap(vc, va, load_gpr(ctx, rb));
1869             }
1870             break;
1871         case 0x31:
1872             /* ZAPNOT */
1873             if (islit) {
1874                 gen_zapnoti(vc, va, lit);
1875             } else {
1876                 gen_helper_zapnot(vc, va, load_gpr(ctx, rb));
1877             }
1878             break;
1879         case 0x32:
1880             /* MSKQL */
1881             gen_msk_l(ctx, vc, va, rb, islit, lit, 0xff);
1882             break;
1883         case 0x34:
1884             /* SRL */
1885             if (islit) {
1886                 tcg_gen_shri_i64(vc, va, lit & 0x3f);
1887             } else {
1888                 tmp = tcg_temp_new();
1889                 vb = load_gpr(ctx, rb);
1890                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1891                 tcg_gen_shr_i64(vc, va, tmp);
1892                 tcg_temp_free(tmp);
1893             }
1894             break;
1895         case 0x36:
1896             /* EXTQL */
1897             gen_ext_l(ctx, vc, va, rb, islit, lit, 0xff);
1898             break;
1899         case 0x39:
1900             /* SLL */
1901             if (islit) {
1902                 tcg_gen_shli_i64(vc, va, lit & 0x3f);
1903             } else {
1904                 tmp = tcg_temp_new();
1905                 vb = load_gpr(ctx, rb);
1906                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1907                 tcg_gen_shl_i64(vc, va, tmp);
1908                 tcg_temp_free(tmp);
1909             }
1910             break;
1911         case 0x3B:
1912             /* INSQL */
1913             gen_ins_l(ctx, vc, va, rb, islit, lit, 0xff);
1914             break;
1915         case 0x3C:
1916             /* SRA */
1917             if (islit) {
1918                 tcg_gen_sari_i64(vc, va, lit & 0x3f);
1919             } else {
1920                 tmp = tcg_temp_new();
1921                 vb = load_gpr(ctx, rb);
1922                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1923                 tcg_gen_sar_i64(vc, va, tmp);
1924                 tcg_temp_free(tmp);
1925             }
1926             break;
1927         case 0x52:
1928             /* MSKWH */
1929             gen_msk_h(ctx, vc, va, rb, islit, lit, 0x03);
1930             break;
1931         case 0x57:
1932             /* INSWH */
1933             gen_ins_h(ctx, vc, va, rb, islit, lit, 0x03);
1934             break;
1935         case 0x5A:
1936             /* EXTWH */
1937             gen_ext_h(ctx, vc, va, rb, islit, lit, 0x03);
1938             break;
1939         case 0x62:
1940             /* MSKLH */
1941             gen_msk_h(ctx, vc, va, rb, islit, lit, 0x0f);
1942             break;
1943         case 0x67:
1944             /* INSLH */
1945             gen_ins_h(ctx, vc, va, rb, islit, lit, 0x0f);
1946             break;
1947         case 0x6A:
1948             /* EXTLH */
1949             gen_ext_h(ctx, vc, va, rb, islit, lit, 0x0f);
1950             break;
1951         case 0x72:
1952             /* MSKQH */
1953             gen_msk_h(ctx, vc, va, rb, islit, lit, 0xff);
1954             break;
1955         case 0x77:
1956             /* INSQH */
1957             gen_ins_h(ctx, vc, va, rb, islit, lit, 0xff);
1958             break;
1959         case 0x7A:
1960             /* EXTQH */
1961             gen_ext_h(ctx, vc, va, rb, islit, lit, 0xff);
1962             break;
1963         default:
1964             goto invalid_opc;
1965         }
1966         break;
1967 
1968     case 0x13:
1969         vc = dest_gpr(ctx, rc);
1970         vb = load_gpr_lit(ctx, rb, lit, islit);
1971         va = load_gpr(ctx, ra);
1972         switch (fn7) {
1973         case 0x00:
1974             /* MULL */
1975             tcg_gen_mul_i64(vc, va, vb);
1976             tcg_gen_ext32s_i64(vc, vc);
1977             break;
1978         case 0x20:
1979             /* MULQ */
1980             tcg_gen_mul_i64(vc, va, vb);
1981             break;
1982         case 0x30:
1983             /* UMULH */
1984             tmp = tcg_temp_new();
1985             tcg_gen_mulu2_i64(tmp, vc, va, vb);
1986             tcg_temp_free(tmp);
1987             break;
1988         case 0x40:
1989             /* MULL/V */
1990             tmp = tcg_temp_new();
1991             tcg_gen_ext32s_i64(tmp, va);
1992             tcg_gen_ext32s_i64(vc, vb);
1993             tcg_gen_mul_i64(tmp, tmp, vc);
1994             tcg_gen_ext32s_i64(vc, tmp);
1995             gen_helper_check_overflow(cpu_env, vc, tmp);
1996             tcg_temp_free(tmp);
1997             break;
1998         case 0x60:
1999             /* MULQ/V */
2000             tmp = tcg_temp_new();
2001             tmp2 = tcg_temp_new();
2002             tcg_gen_muls2_i64(vc, tmp, va, vb);
2003             tcg_gen_sari_i64(tmp2, vc, 63);
2004             gen_helper_check_overflow(cpu_env, tmp, tmp2);
2005             tcg_temp_free(tmp);
2006             tcg_temp_free(tmp2);
2007             break;
2008         default:
2009             goto invalid_opc;
2010         }
2011         break;
2012 
2013     case 0x14:
2014         REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2015         vc = dest_fpr(ctx, rc);
2016         switch (fpfn) { /* fn11 & 0x3F */
2017         case 0x04:
2018             /* ITOFS */
2019             REQUIRE_REG_31(rb);
2020             t32 = tcg_temp_new_i32();
2021             va = load_gpr(ctx, ra);
2022             tcg_gen_extrl_i64_i32(t32, va);
2023             gen_helper_memory_to_s(vc, t32);
2024             tcg_temp_free_i32(t32);
2025             break;
2026         case 0x0A:
2027             /* SQRTF */
2028             REQUIRE_REG_31(ra);
2029             vb = load_fpr(ctx, rb);
2030             gen_helper_sqrtf(vc, cpu_env, vb);
2031             break;
2032         case 0x0B:
2033             /* SQRTS */
2034             REQUIRE_REG_31(ra);
2035             gen_sqrts(ctx, rb, rc, fn11);
2036             break;
2037         case 0x14:
2038             /* ITOFF */
2039             REQUIRE_REG_31(rb);
2040             t32 = tcg_temp_new_i32();
2041             va = load_gpr(ctx, ra);
2042             tcg_gen_extrl_i64_i32(t32, va);
2043             gen_helper_memory_to_f(vc, t32);
2044             tcg_temp_free_i32(t32);
2045             break;
2046         case 0x24:
2047             /* ITOFT */
2048             REQUIRE_REG_31(rb);
2049             va = load_gpr(ctx, ra);
2050             tcg_gen_mov_i64(vc, va);
2051             break;
2052         case 0x2A:
2053             /* SQRTG */
2054             REQUIRE_REG_31(ra);
2055             vb = load_fpr(ctx, rb);
2056             gen_helper_sqrtg(vc, cpu_env, vb);
2057             break;
2058         case 0x02B:
2059             /* SQRTT */
2060             REQUIRE_REG_31(ra);
2061             gen_sqrtt(ctx, rb, rc, fn11);
2062             break;
2063         default:
2064             goto invalid_opc;
2065         }
2066         break;
2067 
2068     case 0x15:
2069         /* VAX floating point */
2070         /* XXX: rounding mode and trap are ignored (!) */
2071         vc = dest_fpr(ctx, rc);
2072         vb = load_fpr(ctx, rb);
2073         va = load_fpr(ctx, ra);
2074         switch (fpfn) { /* fn11 & 0x3F */
2075         case 0x00:
2076             /* ADDF */
2077             gen_helper_addf(vc, cpu_env, va, vb);
2078             break;
2079         case 0x01:
2080             /* SUBF */
2081             gen_helper_subf(vc, cpu_env, va, vb);
2082             break;
2083         case 0x02:
2084             /* MULF */
2085             gen_helper_mulf(vc, cpu_env, va, vb);
2086             break;
2087         case 0x03:
2088             /* DIVF */
2089             gen_helper_divf(vc, cpu_env, va, vb);
2090             break;
2091         case 0x1E:
2092             /* CVTDG -- TODO */
2093             REQUIRE_REG_31(ra);
2094             goto invalid_opc;
2095         case 0x20:
2096             /* ADDG */
2097             gen_helper_addg(vc, cpu_env, va, vb);
2098             break;
2099         case 0x21:
2100             /* SUBG */
2101             gen_helper_subg(vc, cpu_env, va, vb);
2102             break;
2103         case 0x22:
2104             /* MULG */
2105             gen_helper_mulg(vc, cpu_env, va, vb);
2106             break;
2107         case 0x23:
2108             /* DIVG */
2109             gen_helper_divg(vc, cpu_env, va, vb);
2110             break;
2111         case 0x25:
2112             /* CMPGEQ */
2113             gen_helper_cmpgeq(vc, cpu_env, va, vb);
2114             break;
2115         case 0x26:
2116             /* CMPGLT */
2117             gen_helper_cmpglt(vc, cpu_env, va, vb);
2118             break;
2119         case 0x27:
2120             /* CMPGLE */
2121             gen_helper_cmpgle(vc, cpu_env, va, vb);
2122             break;
2123         case 0x2C:
2124             /* CVTGF */
2125             REQUIRE_REG_31(ra);
2126             gen_helper_cvtgf(vc, cpu_env, vb);
2127             break;
2128         case 0x2D:
2129             /* CVTGD -- TODO */
2130             REQUIRE_REG_31(ra);
2131             goto invalid_opc;
2132         case 0x2F:
2133             /* CVTGQ */
2134             REQUIRE_REG_31(ra);
2135             gen_helper_cvtgq(vc, cpu_env, vb);
2136             break;
2137         case 0x3C:
2138             /* CVTQF */
2139             REQUIRE_REG_31(ra);
2140             gen_helper_cvtqf(vc, cpu_env, vb);
2141             break;
2142         case 0x3E:
2143             /* CVTQG */
2144             REQUIRE_REG_31(ra);
2145             gen_helper_cvtqg(vc, cpu_env, vb);
2146             break;
2147         default:
2148             goto invalid_opc;
2149         }
2150         break;
2151 
2152     case 0x16:
2153         /* IEEE floating-point */
2154         switch (fpfn) { /* fn11 & 0x3F */
2155         case 0x00:
2156             /* ADDS */
2157             gen_adds(ctx, ra, rb, rc, fn11);
2158             break;
2159         case 0x01:
2160             /* SUBS */
2161             gen_subs(ctx, ra, rb, rc, fn11);
2162             break;
2163         case 0x02:
2164             /* MULS */
2165             gen_muls(ctx, ra, rb, rc, fn11);
2166             break;
2167         case 0x03:
2168             /* DIVS */
2169             gen_divs(ctx, ra, rb, rc, fn11);
2170             break;
2171         case 0x20:
2172             /* ADDT */
2173             gen_addt(ctx, ra, rb, rc, fn11);
2174             break;
2175         case 0x21:
2176             /* SUBT */
2177             gen_subt(ctx, ra, rb, rc, fn11);
2178             break;
2179         case 0x22:
2180             /* MULT */
2181             gen_mult(ctx, ra, rb, rc, fn11);
2182             break;
2183         case 0x23:
2184             /* DIVT */
2185             gen_divt(ctx, ra, rb, rc, fn11);
2186             break;
2187         case 0x24:
2188             /* CMPTUN */
2189             gen_cmptun(ctx, ra, rb, rc, fn11);
2190             break;
2191         case 0x25:
2192             /* CMPTEQ */
2193             gen_cmpteq(ctx, ra, rb, rc, fn11);
2194             break;
2195         case 0x26:
2196             /* CMPTLT */
2197             gen_cmptlt(ctx, ra, rb, rc, fn11);
2198             break;
2199         case 0x27:
2200             /* CMPTLE */
2201             gen_cmptle(ctx, ra, rb, rc, fn11);
2202             break;
2203         case 0x2C:
2204             REQUIRE_REG_31(ra);
2205             if (fn11 == 0x2AC || fn11 == 0x6AC) {
2206                 /* CVTST */
2207                 gen_cvtst(ctx, rb, rc, fn11);
2208             } else {
2209                 /* CVTTS */
2210                 gen_cvtts(ctx, rb, rc, fn11);
2211             }
2212             break;
2213         case 0x2F:
2214             /* CVTTQ */
2215             REQUIRE_REG_31(ra);
2216             gen_cvttq(ctx, rb, rc, fn11);
2217             break;
2218         case 0x3C:
2219             /* CVTQS */
2220             REQUIRE_REG_31(ra);
2221             gen_cvtqs(ctx, rb, rc, fn11);
2222             break;
2223         case 0x3E:
2224             /* CVTQT */
2225             REQUIRE_REG_31(ra);
2226             gen_cvtqt(ctx, rb, rc, fn11);
2227             break;
2228         default:
2229             goto invalid_opc;
2230         }
2231         break;
2232 
2233     case 0x17:
2234         switch (fn11) {
2235         case 0x010:
2236             /* CVTLQ */
2237             REQUIRE_REG_31(ra);
2238             vc = dest_fpr(ctx, rc);
2239             vb = load_fpr(ctx, rb);
2240             gen_cvtlq(vc, vb);
2241             break;
2242         case 0x020:
2243             /* CPYS */
2244             if (rc == 31) {
2245                 /* Special case CPYS as FNOP.  */
2246             } else {
2247                 vc = dest_fpr(ctx, rc);
2248                 va = load_fpr(ctx, ra);
2249                 if (ra == rb) {
2250                     /* Special case CPYS as FMOV.  */
2251                     tcg_gen_mov_i64(vc, va);
2252                 } else {
2253                     vb = load_fpr(ctx, rb);
2254                     gen_cpy_mask(vc, va, vb, 0, 0x8000000000000000ULL);
2255                 }
2256             }
2257             break;
2258         case 0x021:
2259             /* CPYSN */
2260             vc = dest_fpr(ctx, rc);
2261             vb = load_fpr(ctx, rb);
2262             va = load_fpr(ctx, ra);
2263             gen_cpy_mask(vc, va, vb, 1, 0x8000000000000000ULL);
2264             break;
2265         case 0x022:
2266             /* CPYSE */
2267             vc = dest_fpr(ctx, rc);
2268             vb = load_fpr(ctx, rb);
2269             va = load_fpr(ctx, ra);
2270             gen_cpy_mask(vc, va, vb, 0, 0xFFF0000000000000ULL);
2271             break;
2272         case 0x024:
2273             /* MT_FPCR */
2274             va = load_fpr(ctx, ra);
2275             gen_helper_store_fpcr(cpu_env, va);
2276             if (ctx->tb_rm == QUAL_RM_D) {
2277                 /* Re-do the copy of the rounding mode to fp_status
2278                    the next time we use dynamic rounding.  */
2279                 ctx->tb_rm = -1;
2280             }
2281             break;
2282         case 0x025:
2283             /* MF_FPCR */
2284             va = dest_fpr(ctx, ra);
2285             gen_helper_load_fpcr(va, cpu_env);
2286             break;
2287         case 0x02A:
2288             /* FCMOVEQ */
2289             gen_fcmov(ctx, TCG_COND_EQ, ra, rb, rc);
2290             break;
2291         case 0x02B:
2292             /* FCMOVNE */
2293             gen_fcmov(ctx, TCG_COND_NE, ra, rb, rc);
2294             break;
2295         case 0x02C:
2296             /* FCMOVLT */
2297             gen_fcmov(ctx, TCG_COND_LT, ra, rb, rc);
2298             break;
2299         case 0x02D:
2300             /* FCMOVGE */
2301             gen_fcmov(ctx, TCG_COND_GE, ra, rb, rc);
2302             break;
2303         case 0x02E:
2304             /* FCMOVLE */
2305             gen_fcmov(ctx, TCG_COND_LE, ra, rb, rc);
2306             break;
2307         case 0x02F:
2308             /* FCMOVGT */
2309             gen_fcmov(ctx, TCG_COND_GT, ra, rb, rc);
2310             break;
2311         case 0x030: /* CVTQL */
2312         case 0x130: /* CVTQL/V */
2313         case 0x530: /* CVTQL/SV */
2314             REQUIRE_REG_31(ra);
2315             vc = dest_fpr(ctx, rc);
2316             vb = load_fpr(ctx, rb);
2317             gen_helper_cvtql(vc, cpu_env, vb);
2318             gen_fp_exc_raise(rc, fn11);
2319             break;
2320         default:
2321             goto invalid_opc;
2322         }
2323         break;
2324 
2325     case 0x18:
2326         switch ((uint16_t)disp16) {
2327         case 0x0000:
2328             /* TRAPB */
2329             /* No-op.  */
2330             break;
2331         case 0x0400:
2332             /* EXCB */
2333             /* No-op.  */
2334             break;
2335         case 0x4000:
2336             /* MB */
2337             tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2338             break;
2339         case 0x4400:
2340             /* WMB */
2341             tcg_gen_mb(TCG_MO_ST_ST | TCG_BAR_SC);
2342             break;
2343         case 0x8000:
2344             /* FETCH */
2345             /* No-op */
2346             break;
2347         case 0xA000:
2348             /* FETCH_M */
2349             /* No-op */
2350             break;
2351         case 0xC000:
2352             /* RPCC */
2353             va = dest_gpr(ctx, ra);
2354             if (ctx->tb->cflags & CF_USE_ICOUNT) {
2355                 gen_io_start();
2356                 gen_helper_load_pcc(va, cpu_env);
2357                 gen_io_end();
2358                 ret = EXIT_PC_STALE;
2359             } else {
2360                 gen_helper_load_pcc(va, cpu_env);
2361             }
2362             break;
2363         case 0xE000:
2364             /* RC */
2365             gen_rx(ctx, ra, 0);
2366             break;
2367         case 0xE800:
2368             /* ECB */
2369             break;
2370         case 0xF000:
2371             /* RS */
2372             gen_rx(ctx, ra, 1);
2373             break;
2374         case 0xF800:
2375             /* WH64 */
2376             /* No-op */
2377             break;
2378         case 0xFC00:
2379             /* WH64EN */
2380             /* No-op */
2381             break;
2382         default:
2383             goto invalid_opc;
2384         }
2385         break;
2386 
2387     case 0x19:
2388         /* HW_MFPR (PALcode) */
2389 #ifndef CONFIG_USER_ONLY
2390         REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2391         va = dest_gpr(ctx, ra);
2392         ret = gen_mfpr(ctx, va, insn & 0xffff);
2393         break;
2394 #else
2395         goto invalid_opc;
2396 #endif
2397 
2398     case 0x1A:
2399         /* JMP, JSR, RET, JSR_COROUTINE.  These only differ by the branch
2400            prediction stack action, which of course we don't implement.  */
2401         vb = load_gpr(ctx, rb);
2402         tcg_gen_andi_i64(cpu_pc, vb, ~3);
2403         if (ra != 31) {
2404             tcg_gen_movi_i64(ctx->ir[ra], ctx->pc);
2405         }
2406         ret = EXIT_PC_UPDATED;
2407         break;
2408 
2409     case 0x1B:
2410         /* HW_LD (PALcode) */
2411 #ifndef CONFIG_USER_ONLY
2412         REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2413         {
2414             TCGv addr = tcg_temp_new();
2415             vb = load_gpr(ctx, rb);
2416             va = dest_gpr(ctx, ra);
2417 
2418             tcg_gen_addi_i64(addr, vb, disp12);
2419             switch ((insn >> 12) & 0xF) {
2420             case 0x0:
2421                 /* Longword physical access (hw_ldl/p) */
2422                 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
2423                 break;
2424             case 0x1:
2425                 /* Quadword physical access (hw_ldq/p) */
2426                 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEQ);
2427                 break;
2428             case 0x2:
2429                 /* Longword physical access with lock (hw_ldl_l/p) */
2430                 gen_qemu_ldl_l(va, addr, MMU_PHYS_IDX);
2431                 break;
2432             case 0x3:
2433                 /* Quadword physical access with lock (hw_ldq_l/p) */
2434                 gen_qemu_ldq_l(va, addr, MMU_PHYS_IDX);
2435                 break;
2436             case 0x4:
2437                 /* Longword virtual PTE fetch (hw_ldl/v) */
2438                 goto invalid_opc;
2439             case 0x5:
2440                 /* Quadword virtual PTE fetch (hw_ldq/v) */
2441                 goto invalid_opc;
2442                 break;
2443             case 0x6:
2444                 /* Invalid */
2445                 goto invalid_opc;
2446             case 0x7:
2447                 /* Invaliid */
2448                 goto invalid_opc;
2449             case 0x8:
2450                 /* Longword virtual access (hw_ldl) */
2451                 goto invalid_opc;
2452             case 0x9:
2453                 /* Quadword virtual access (hw_ldq) */
2454                 goto invalid_opc;
2455             case 0xA:
2456                 /* Longword virtual access with protection check (hw_ldl/w) */
2457                 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
2458                 break;
2459             case 0xB:
2460                 /* Quadword virtual access with protection check (hw_ldq/w) */
2461                 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEQ);
2462                 break;
2463             case 0xC:
2464                 /* Longword virtual access with alt access mode (hw_ldl/a)*/
2465                 goto invalid_opc;
2466             case 0xD:
2467                 /* Quadword virtual access with alt access mode (hw_ldq/a) */
2468                 goto invalid_opc;
2469             case 0xE:
2470                 /* Longword virtual access with alternate access mode and
2471                    protection checks (hw_ldl/wa) */
2472                 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
2473                 break;
2474             case 0xF:
2475                 /* Quadword virtual access with alternate access mode and
2476                    protection checks (hw_ldq/wa) */
2477                 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEQ);
2478                 break;
2479             }
2480             tcg_temp_free(addr);
2481             break;
2482         }
2483 #else
2484         goto invalid_opc;
2485 #endif
2486 
2487     case 0x1C:
2488         vc = dest_gpr(ctx, rc);
2489         if (fn7 == 0x70) {
2490             /* FTOIT */
2491             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2492             REQUIRE_REG_31(rb);
2493             va = load_fpr(ctx, ra);
2494             tcg_gen_mov_i64(vc, va);
2495             break;
2496         } else if (fn7 == 0x78) {
2497             /* FTOIS */
2498             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2499             REQUIRE_REG_31(rb);
2500             t32 = tcg_temp_new_i32();
2501             va = load_fpr(ctx, ra);
2502             gen_helper_s_to_memory(t32, va);
2503             tcg_gen_ext_i32_i64(vc, t32);
2504             tcg_temp_free_i32(t32);
2505             break;
2506         }
2507 
2508         vb = load_gpr_lit(ctx, rb, lit, islit);
2509         switch (fn7) {
2510         case 0x00:
2511             /* SEXTB */
2512             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
2513             REQUIRE_REG_31(ra);
2514             tcg_gen_ext8s_i64(vc, vb);
2515             break;
2516         case 0x01:
2517             /* SEXTW */
2518             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
2519             REQUIRE_REG_31(ra);
2520             tcg_gen_ext16s_i64(vc, vb);
2521             break;
2522         case 0x30:
2523             /* CTPOP */
2524             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2525             REQUIRE_REG_31(ra);
2526             REQUIRE_NO_LIT;
2527             gen_helper_ctpop(vc, vb);
2528             break;
2529         case 0x31:
2530             /* PERR */
2531             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2532             REQUIRE_NO_LIT;
2533             va = load_gpr(ctx, ra);
2534             gen_helper_perr(vc, va, vb);
2535             break;
2536         case 0x32:
2537             /* CTLZ */
2538             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2539             REQUIRE_REG_31(ra);
2540             REQUIRE_NO_LIT;
2541             gen_helper_ctlz(vc, vb);
2542             break;
2543         case 0x33:
2544             /* CTTZ */
2545             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2546             REQUIRE_REG_31(ra);
2547             REQUIRE_NO_LIT;
2548             gen_helper_cttz(vc, vb);
2549             break;
2550         case 0x34:
2551             /* UNPKBW */
2552             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2553             REQUIRE_REG_31(ra);
2554             REQUIRE_NO_LIT;
2555             gen_helper_unpkbw(vc, vb);
2556             break;
2557         case 0x35:
2558             /* UNPKBL */
2559             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2560             REQUIRE_REG_31(ra);
2561             REQUIRE_NO_LIT;
2562             gen_helper_unpkbl(vc, vb);
2563             break;
2564         case 0x36:
2565             /* PKWB */
2566             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2567             REQUIRE_REG_31(ra);
2568             REQUIRE_NO_LIT;
2569             gen_helper_pkwb(vc, vb);
2570             break;
2571         case 0x37:
2572             /* PKLB */
2573             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2574             REQUIRE_REG_31(ra);
2575             REQUIRE_NO_LIT;
2576             gen_helper_pklb(vc, vb);
2577             break;
2578         case 0x38:
2579             /* MINSB8 */
2580             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2581             va = load_gpr(ctx, ra);
2582             gen_helper_minsb8(vc, va, vb);
2583             break;
2584         case 0x39:
2585             /* MINSW4 */
2586             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2587             va = load_gpr(ctx, ra);
2588             gen_helper_minsw4(vc, va, vb);
2589             break;
2590         case 0x3A:
2591             /* MINUB8 */
2592             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2593             va = load_gpr(ctx, ra);
2594             gen_helper_minub8(vc, va, vb);
2595             break;
2596         case 0x3B:
2597             /* MINUW4 */
2598             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2599             va = load_gpr(ctx, ra);
2600             gen_helper_minuw4(vc, va, vb);
2601             break;
2602         case 0x3C:
2603             /* MAXUB8 */
2604             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2605             va = load_gpr(ctx, ra);
2606             gen_helper_maxub8(vc, va, vb);
2607             break;
2608         case 0x3D:
2609             /* MAXUW4 */
2610             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2611             va = load_gpr(ctx, ra);
2612             gen_helper_maxuw4(vc, va, vb);
2613             break;
2614         case 0x3E:
2615             /* MAXSB8 */
2616             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2617             va = load_gpr(ctx, ra);
2618             gen_helper_maxsb8(vc, va, vb);
2619             break;
2620         case 0x3F:
2621             /* MAXSW4 */
2622             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2623             va = load_gpr(ctx, ra);
2624             gen_helper_maxsw4(vc, va, vb);
2625             break;
2626         default:
2627             goto invalid_opc;
2628         }
2629         break;
2630 
2631     case 0x1D:
2632         /* HW_MTPR (PALcode) */
2633 #ifndef CONFIG_USER_ONLY
2634         REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2635         vb = load_gpr(ctx, rb);
2636         ret = gen_mtpr(ctx, vb, insn & 0xffff);
2637         break;
2638 #else
2639         goto invalid_opc;
2640 #endif
2641 
2642     case 0x1E:
2643         /* HW_RET (PALcode) */
2644 #ifndef CONFIG_USER_ONLY
2645         REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2646         if (rb == 31) {
2647             /* Pre-EV6 CPUs interpreted this as HW_REI, loading the return
2648                address from EXC_ADDR.  This turns out to be useful for our
2649                emulation PALcode, so continue to accept it.  */
2650             ctx->lit = vb = tcg_temp_new();
2651             tcg_gen_ld_i64(vb, cpu_env, offsetof(CPUAlphaState, exc_addr));
2652         } else {
2653             vb = load_gpr(ctx, rb);
2654         }
2655         tmp = tcg_temp_new();
2656         tcg_gen_movi_i64(tmp, 0);
2657         tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, intr_flag));
2658         tcg_gen_movi_i64(cpu_lock_addr, -1);
2659         tcg_gen_andi_i64(tmp, vb, 1);
2660         tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, pal_mode));
2661         tcg_gen_andi_i64(cpu_pc, vb, ~3);
2662         ret = EXIT_PC_UPDATED;
2663         break;
2664 #else
2665         goto invalid_opc;
2666 #endif
2667 
2668     case 0x1F:
2669         /* HW_ST (PALcode) */
2670 #ifndef CONFIG_USER_ONLY
2671         REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2672         {
2673             switch ((insn >> 12) & 0xF) {
2674             case 0x0:
2675                 /* Longword physical access */
2676                 va = load_gpr(ctx, ra);
2677                 vb = load_gpr(ctx, rb);
2678                 tmp = tcg_temp_new();
2679                 tcg_gen_addi_i64(tmp, vb, disp12);
2680                 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL);
2681                 tcg_temp_free(tmp);
2682                 break;
2683             case 0x1:
2684                 /* Quadword physical access */
2685                 va = load_gpr(ctx, ra);
2686                 vb = load_gpr(ctx, rb);
2687                 tmp = tcg_temp_new();
2688                 tcg_gen_addi_i64(tmp, vb, disp12);
2689                 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEQ);
2690                 tcg_temp_free(tmp);
2691                 break;
2692             case 0x2:
2693                 /* Longword physical access with lock */
2694                 ret = gen_store_conditional(ctx, ra, rb, disp12,
2695                                             MMU_PHYS_IDX, MO_LESL);
2696                 break;
2697             case 0x3:
2698                 /* Quadword physical access with lock */
2699                 ret = gen_store_conditional(ctx, ra, rb, disp12,
2700                                             MMU_PHYS_IDX, MO_LEQ);
2701                 break;
2702             case 0x4:
2703                 /* Longword virtual access */
2704                 goto invalid_opc;
2705             case 0x5:
2706                 /* Quadword virtual access */
2707                 goto invalid_opc;
2708             case 0x6:
2709                 /* Invalid */
2710                 goto invalid_opc;
2711             case 0x7:
2712                 /* Invalid */
2713                 goto invalid_opc;
2714             case 0x8:
2715                 /* Invalid */
2716                 goto invalid_opc;
2717             case 0x9:
2718                 /* Invalid */
2719                 goto invalid_opc;
2720             case 0xA:
2721                 /* Invalid */
2722                 goto invalid_opc;
2723             case 0xB:
2724                 /* Invalid */
2725                 goto invalid_opc;
2726             case 0xC:
2727                 /* Longword virtual access with alternate access mode */
2728                 goto invalid_opc;
2729             case 0xD:
2730                 /* Quadword virtual access with alternate access mode */
2731                 goto invalid_opc;
2732             case 0xE:
2733                 /* Invalid */
2734                 goto invalid_opc;
2735             case 0xF:
2736                 /* Invalid */
2737                 goto invalid_opc;
2738             }
2739             break;
2740         }
2741 #else
2742         goto invalid_opc;
2743 #endif
2744     case 0x20:
2745         /* LDF */
2746         gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2747         break;
2748     case 0x21:
2749         /* LDG */
2750         gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2751         break;
2752     case 0x22:
2753         /* LDS */
2754         gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2755         break;
2756     case 0x23:
2757         /* LDT */
2758         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2759         break;
2760     case 0x24:
2761         /* STF */
2762         gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
2763         break;
2764     case 0x25:
2765         /* STG */
2766         gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
2767         break;
2768     case 0x26:
2769         /* STS */
2770         gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
2771         break;
2772     case 0x27:
2773         /* STT */
2774         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
2775         break;
2776     case 0x28:
2777         /* LDL */
2778         gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2779         break;
2780     case 0x29:
2781         /* LDQ */
2782         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2783         break;
2784     case 0x2A:
2785         /* LDL_L */
2786         gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2787         break;
2788     case 0x2B:
2789         /* LDQ_L */
2790         gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2791         break;
2792     case 0x2C:
2793         /* STL */
2794         gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
2795         break;
2796     case 0x2D:
2797         /* STQ */
2798         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
2799         break;
2800     case 0x2E:
2801         /* STL_C */
2802         ret = gen_store_conditional(ctx, ra, rb, disp16,
2803                                     ctx->mem_idx, MO_LESL);
2804         break;
2805     case 0x2F:
2806         /* STQ_C */
2807         ret = gen_store_conditional(ctx, ra, rb, disp16,
2808                                     ctx->mem_idx, MO_LEQ);
2809         break;
2810     case 0x30:
2811         /* BR */
2812         ret = gen_bdirect(ctx, ra, disp21);
2813         break;
2814     case 0x31: /* FBEQ */
2815         ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
2816         break;
2817     case 0x32: /* FBLT */
2818         ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
2819         break;
2820     case 0x33: /* FBLE */
2821         ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
2822         break;
2823     case 0x34:
2824         /* BSR */
2825         ret = gen_bdirect(ctx, ra, disp21);
2826         break;
2827     case 0x35: /* FBNE */
2828         ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
2829         break;
2830     case 0x36: /* FBGE */
2831         ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
2832         break;
2833     case 0x37: /* FBGT */
2834         ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
2835         break;
2836     case 0x38:
2837         /* BLBC */
2838         ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2839         break;
2840     case 0x39:
2841         /* BEQ */
2842         ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2843         break;
2844     case 0x3A:
2845         /* BLT */
2846         ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2847         break;
2848     case 0x3B:
2849         /* BLE */
2850         ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2851         break;
2852     case 0x3C:
2853         /* BLBS */
2854         ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2855         break;
2856     case 0x3D:
2857         /* BNE */
2858         ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2859         break;
2860     case 0x3E:
2861         /* BGE */
2862         ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2863         break;
2864     case 0x3F:
2865         /* BGT */
2866         ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2867         break;
2868     invalid_opc:
2869         ret = gen_invalid(ctx);
2870         break;
2871     }
2872 
2873     return ret;
2874 }
2875 
2876 void gen_intermediate_code(CPUAlphaState *env, struct TranslationBlock *tb)
2877 {
2878     AlphaCPU *cpu = alpha_env_get_cpu(env);
2879     CPUState *cs = CPU(cpu);
2880     DisasContext ctx, *ctxp = &ctx;
2881     target_ulong pc_start;
2882     target_ulong pc_mask;
2883     uint32_t insn;
2884     ExitStatus ret;
2885     int num_insns;
2886     int max_insns;
2887 
2888     pc_start = tb->pc;
2889 
2890     ctx.tb = tb;
2891     ctx.pc = pc_start;
2892     ctx.mem_idx = cpu_mmu_index(env, false);
2893     ctx.implver = env->implver;
2894     ctx.singlestep_enabled = cs->singlestep_enabled;
2895 
2896 #ifdef CONFIG_USER_ONLY
2897     ctx.ir = cpu_std_ir;
2898 #else
2899     ctx.palbr = env->palbr;
2900     ctx.ir = (tb->flags & TB_FLAGS_PAL_MODE ? cpu_pal_ir : cpu_std_ir);
2901 #endif
2902 
2903     /* ??? Every TB begins with unset rounding mode, to be initialized on
2904        the first fp insn of the TB.  Alternately we could define a proper
2905        default for every TB (e.g. QUAL_RM_N or QUAL_RM_D) and make sure
2906        to reset the FP_STATUS to that default at the end of any TB that
2907        changes the default.  We could even (gasp) dynamiclly figure out
2908        what default would be most efficient given the running program.  */
2909     ctx.tb_rm = -1;
2910     /* Similarly for flush-to-zero.  */
2911     ctx.tb_ftz = -1;
2912 
2913     TCGV_UNUSED_I64(ctx.zero);
2914     TCGV_UNUSED_I64(ctx.sink);
2915     TCGV_UNUSED_I64(ctx.lit);
2916 
2917     num_insns = 0;
2918     max_insns = tb->cflags & CF_COUNT_MASK;
2919     if (max_insns == 0) {
2920         max_insns = CF_COUNT_MASK;
2921     }
2922     if (max_insns > TCG_MAX_INSNS) {
2923         max_insns = TCG_MAX_INSNS;
2924     }
2925 
2926     if (in_superpage(&ctx, pc_start)) {
2927         pc_mask = (1ULL << 41) - 1;
2928     } else {
2929         pc_mask = ~TARGET_PAGE_MASK;
2930     }
2931 
2932     gen_tb_start(tb);
2933     do {
2934         tcg_gen_insn_start(ctx.pc);
2935         num_insns++;
2936 
2937         if (unlikely(cpu_breakpoint_test(cs, ctx.pc, BP_ANY))) {
2938             ret = gen_excp(&ctx, EXCP_DEBUG, 0);
2939             /* The address covered by the breakpoint must be included in
2940                [tb->pc, tb->pc + tb->size) in order to for it to be
2941                properly cleared -- thus we increment the PC here so that
2942                the logic setting tb->size below does the right thing.  */
2943             ctx.pc += 4;
2944             break;
2945         }
2946         if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
2947             gen_io_start();
2948         }
2949         insn = cpu_ldl_code(env, ctx.pc);
2950 
2951         ctx.pc += 4;
2952         ret = translate_one(ctxp, insn);
2953         free_context_temps(ctxp);
2954 
2955         /* If we reach a page boundary, are single stepping,
2956            or exhaust instruction count, stop generation.  */
2957         if (ret == NO_EXIT
2958             && ((ctx.pc & pc_mask) == 0
2959                 || tcg_op_buf_full()
2960                 || num_insns >= max_insns
2961                 || singlestep
2962                 || ctx.singlestep_enabled)) {
2963             ret = EXIT_PC_STALE;
2964         }
2965     } while (ret == NO_EXIT);
2966 
2967     if (tb->cflags & CF_LAST_IO) {
2968         gen_io_end();
2969     }
2970 
2971     switch (ret) {
2972     case EXIT_GOTO_TB:
2973     case EXIT_NORETURN:
2974         break;
2975     case EXIT_PC_STALE:
2976         tcg_gen_movi_i64(cpu_pc, ctx.pc);
2977         /* FALLTHRU */
2978     case EXIT_PC_UPDATED:
2979         if (ctx.singlestep_enabled) {
2980             gen_excp_1(EXCP_DEBUG, 0);
2981         } else {
2982             tcg_gen_exit_tb(0);
2983         }
2984         break;
2985     default:
2986         abort();
2987     }
2988 
2989     gen_tb_end(tb, num_insns);
2990 
2991     tb->size = ctx.pc - pc_start;
2992     tb->icount = num_insns;
2993 
2994 #ifdef DEBUG_DISAS
2995     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
2996         && qemu_log_in_addr_range(pc_start)) {
2997         qemu_log_lock();
2998         qemu_log("IN: %s\n", lookup_symbol(pc_start));
2999         log_target_disas(cs, pc_start, ctx.pc - pc_start, 1);
3000         qemu_log("\n");
3001         qemu_log_unlock();
3002     }
3003 #endif
3004 }
3005 
3006 void restore_state_to_opc(CPUAlphaState *env, TranslationBlock *tb,
3007                           target_ulong *data)
3008 {
3009     env->pc = data[0];
3010 }
3011