xref: /openbmc/qemu/target/alpha/translate.c (revision 864a2178)
1 /*
2  *  Alpha emulation cpu translation for qemu.
3  *
4  *  Copyright (c) 2007 Jocelyn Mayer
5  *
6  * This library is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public
8  * License as published by the Free Software Foundation; either
9  * version 2 of the License, or (at your option) any later version.
10  *
11  * This library is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
14  * Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public
17  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
18  */
19 
20 #include "qemu/osdep.h"
21 #include "cpu.h"
22 #include "sysemu/cpus.h"
23 #include "disas/disas.h"
24 #include "qemu/host-utils.h"
25 #include "exec/exec-all.h"
26 #include "tcg-op.h"
27 #include "exec/cpu_ldst.h"
28 
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 
32 #include "trace-tcg.h"
33 #include "exec/log.h"
34 
35 
36 #undef ALPHA_DEBUG_DISAS
37 #define CONFIG_SOFTFLOAT_INLINE
38 
39 #ifdef ALPHA_DEBUG_DISAS
40 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
41 #else
42 #  define LOG_DISAS(...) do { } while (0)
43 #endif
44 
45 typedef struct DisasContext DisasContext;
46 struct DisasContext {
47     struct TranslationBlock *tb;
48     uint64_t pc;
49 #ifndef CONFIG_USER_ONLY
50     uint64_t palbr;
51 #endif
52     int mem_idx;
53 
54     /* Current rounding mode for this TB.  */
55     int tb_rm;
56     /* Current flush-to-zero setting for this TB.  */
57     int tb_ftz;
58 
59     /* implver value for this CPU.  */
60     int implver;
61 
62     /* The set of registers active in the current context.  */
63     TCGv *ir;
64 
65     /* Temporaries for $31 and $f31 as source and destination.  */
66     TCGv zero;
67     TCGv sink;
68     /* Temporary for immediate constants.  */
69     TCGv lit;
70 
71     bool singlestep_enabled;
72 };
73 
74 /* Return values from translate_one, indicating the state of the TB.
75    Note that zero indicates that we are not exiting the TB.  */
76 
77 typedef enum {
78     NO_EXIT,
79 
80     /* We have emitted one or more goto_tb.  No fixup required.  */
81     EXIT_GOTO_TB,
82 
83     /* We are not using a goto_tb (for whatever reason), but have updated
84        the PC (for whatever reason), so there's no need to do it again on
85        exiting the TB.  */
86     EXIT_PC_UPDATED,
87     EXIT_PC_UPDATED_NOCHAIN,
88 
89     /* We are exiting the TB, but have neither emitted a goto_tb, nor
90        updated the PC for the next instruction to be executed.  */
91     EXIT_PC_STALE,
92 
93     /* We are exiting the TB due to page crossing or space constraints.  */
94     EXIT_FALLTHRU,
95 
96     /* We are ending the TB with a noreturn function call, e.g. longjmp.
97        No following code will be executed.  */
98     EXIT_NORETURN,
99 } ExitStatus;
100 
101 /* global register indexes */
102 static TCGv_env cpu_env;
103 static TCGv cpu_std_ir[31];
104 static TCGv cpu_fir[31];
105 static TCGv cpu_pc;
106 static TCGv cpu_lock_addr;
107 static TCGv cpu_lock_value;
108 
109 #ifndef CONFIG_USER_ONLY
110 static TCGv cpu_pal_ir[31];
111 #endif
112 
113 #include "exec/gen-icount.h"
114 
115 void alpha_translate_init(void)
116 {
117 #define DEF_VAR(V)  { &cpu_##V, #V, offsetof(CPUAlphaState, V) }
118 
119     typedef struct { TCGv *var; const char *name; int ofs; } GlobalVar;
120     static const GlobalVar vars[] = {
121         DEF_VAR(pc),
122         DEF_VAR(lock_addr),
123         DEF_VAR(lock_value),
124     };
125 
126 #undef DEF_VAR
127 
128     /* Use the symbolic register names that match the disassembler.  */
129     static const char greg_names[31][4] = {
130         "v0", "t0", "t1", "t2", "t3", "t4", "t5", "t6",
131         "t7", "s0", "s1", "s2", "s3", "s4", "s5", "fp",
132         "a0", "a1", "a2", "a3", "a4", "a5", "t8", "t9",
133         "t10", "t11", "ra", "t12", "at", "gp", "sp"
134     };
135     static const char freg_names[31][4] = {
136         "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
137         "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
138         "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
139         "f24", "f25", "f26", "f27", "f28", "f29", "f30"
140     };
141 #ifndef CONFIG_USER_ONLY
142     static const char shadow_names[8][8] = {
143         "pal_t7", "pal_s0", "pal_s1", "pal_s2",
144         "pal_s3", "pal_s4", "pal_s5", "pal_t11"
145     };
146 #endif
147 
148     static bool done_init = 0;
149     int i;
150 
151     if (done_init) {
152         return;
153     }
154     done_init = 1;
155 
156     cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
157     tcg_ctx.tcg_env = cpu_env;
158 
159     for (i = 0; i < 31; i++) {
160         cpu_std_ir[i] = tcg_global_mem_new_i64(cpu_env,
161                                                offsetof(CPUAlphaState, ir[i]),
162                                                greg_names[i]);
163     }
164 
165     for (i = 0; i < 31; i++) {
166         cpu_fir[i] = tcg_global_mem_new_i64(cpu_env,
167                                             offsetof(CPUAlphaState, fir[i]),
168                                             freg_names[i]);
169     }
170 
171 #ifndef CONFIG_USER_ONLY
172     memcpy(cpu_pal_ir, cpu_std_ir, sizeof(cpu_pal_ir));
173     for (i = 0; i < 8; i++) {
174         int r = (i == 7 ? 25 : i + 8);
175         cpu_pal_ir[r] = tcg_global_mem_new_i64(cpu_env,
176                                                offsetof(CPUAlphaState,
177                                                         shadow[i]),
178                                                shadow_names[i]);
179     }
180 #endif
181 
182     for (i = 0; i < ARRAY_SIZE(vars); ++i) {
183         const GlobalVar *v = &vars[i];
184         *v->var = tcg_global_mem_new_i64(cpu_env, v->ofs, v->name);
185     }
186 }
187 
188 static TCGv load_zero(DisasContext *ctx)
189 {
190     if (TCGV_IS_UNUSED_I64(ctx->zero)) {
191         ctx->zero = tcg_const_i64(0);
192     }
193     return ctx->zero;
194 }
195 
196 static TCGv dest_sink(DisasContext *ctx)
197 {
198     if (TCGV_IS_UNUSED_I64(ctx->sink)) {
199         ctx->sink = tcg_temp_new();
200     }
201     return ctx->sink;
202 }
203 
204 static void free_context_temps(DisasContext *ctx)
205 {
206     if (!TCGV_IS_UNUSED_I64(ctx->sink)) {
207         tcg_gen_discard_i64(ctx->sink);
208         tcg_temp_free(ctx->sink);
209         TCGV_UNUSED_I64(ctx->sink);
210     }
211     if (!TCGV_IS_UNUSED_I64(ctx->zero)) {
212         tcg_temp_free(ctx->zero);
213         TCGV_UNUSED_I64(ctx->zero);
214     }
215     if (!TCGV_IS_UNUSED_I64(ctx->lit)) {
216         tcg_temp_free(ctx->lit);
217         TCGV_UNUSED_I64(ctx->lit);
218     }
219 }
220 
221 static TCGv load_gpr(DisasContext *ctx, unsigned reg)
222 {
223     if (likely(reg < 31)) {
224         return ctx->ir[reg];
225     } else {
226         return load_zero(ctx);
227     }
228 }
229 
230 static TCGv load_gpr_lit(DisasContext *ctx, unsigned reg,
231                          uint8_t lit, bool islit)
232 {
233     if (islit) {
234         ctx->lit = tcg_const_i64(lit);
235         return ctx->lit;
236     } else if (likely(reg < 31)) {
237         return ctx->ir[reg];
238     } else {
239         return load_zero(ctx);
240     }
241 }
242 
243 static TCGv dest_gpr(DisasContext *ctx, unsigned reg)
244 {
245     if (likely(reg < 31)) {
246         return ctx->ir[reg];
247     } else {
248         return dest_sink(ctx);
249     }
250 }
251 
252 static TCGv load_fpr(DisasContext *ctx, unsigned reg)
253 {
254     if (likely(reg < 31)) {
255         return cpu_fir[reg];
256     } else {
257         return load_zero(ctx);
258     }
259 }
260 
261 static TCGv dest_fpr(DisasContext *ctx, unsigned reg)
262 {
263     if (likely(reg < 31)) {
264         return cpu_fir[reg];
265     } else {
266         return dest_sink(ctx);
267     }
268 }
269 
270 static void gen_excp_1(int exception, int error_code)
271 {
272     TCGv_i32 tmp1, tmp2;
273 
274     tmp1 = tcg_const_i32(exception);
275     tmp2 = tcg_const_i32(error_code);
276     gen_helper_excp(cpu_env, tmp1, tmp2);
277     tcg_temp_free_i32(tmp2);
278     tcg_temp_free_i32(tmp1);
279 }
280 
281 static ExitStatus gen_excp(DisasContext *ctx, int exception, int error_code)
282 {
283     tcg_gen_movi_i64(cpu_pc, ctx->pc);
284     gen_excp_1(exception, error_code);
285     return EXIT_NORETURN;
286 }
287 
288 static inline ExitStatus gen_invalid(DisasContext *ctx)
289 {
290     return gen_excp(ctx, EXCP_OPCDEC, 0);
291 }
292 
293 static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
294 {
295     TCGv_i32 tmp32 = tcg_temp_new_i32();
296     tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
297     gen_helper_memory_to_f(t0, tmp32);
298     tcg_temp_free_i32(tmp32);
299 }
300 
301 static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
302 {
303     TCGv tmp = tcg_temp_new();
304     tcg_gen_qemu_ld_i64(tmp, t1, flags, MO_LEQ);
305     gen_helper_memory_to_g(t0, tmp);
306     tcg_temp_free(tmp);
307 }
308 
309 static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
310 {
311     TCGv_i32 tmp32 = tcg_temp_new_i32();
312     tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
313     gen_helper_memory_to_s(t0, tmp32);
314     tcg_temp_free_i32(tmp32);
315 }
316 
317 static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
318 {
319     tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LESL);
320     tcg_gen_mov_i64(cpu_lock_addr, t1);
321     tcg_gen_mov_i64(cpu_lock_value, t0);
322 }
323 
324 static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
325 {
326     tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LEQ);
327     tcg_gen_mov_i64(cpu_lock_addr, t1);
328     tcg_gen_mov_i64(cpu_lock_value, t0);
329 }
330 
331 static inline void gen_load_mem(DisasContext *ctx,
332                                 void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
333                                                           int flags),
334                                 int ra, int rb, int32_t disp16, bool fp,
335                                 bool clear)
336 {
337     TCGv tmp, addr, va;
338 
339     /* LDQ_U with ra $31 is UNOP.  Other various loads are forms of
340        prefetches, which we can treat as nops.  No worries about
341        missed exceptions here.  */
342     if (unlikely(ra == 31)) {
343         return;
344     }
345 
346     tmp = tcg_temp_new();
347     addr = load_gpr(ctx, rb);
348 
349     if (disp16) {
350         tcg_gen_addi_i64(tmp, addr, disp16);
351         addr = tmp;
352     }
353     if (clear) {
354         tcg_gen_andi_i64(tmp, addr, ~0x7);
355         addr = tmp;
356     }
357 
358     va = (fp ? cpu_fir[ra] : ctx->ir[ra]);
359     tcg_gen_qemu_load(va, addr, ctx->mem_idx);
360 
361     tcg_temp_free(tmp);
362 }
363 
364 static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
365 {
366     TCGv_i32 tmp32 = tcg_temp_new_i32();
367     gen_helper_f_to_memory(tmp32, t0);
368     tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
369     tcg_temp_free_i32(tmp32);
370 }
371 
372 static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
373 {
374     TCGv tmp = tcg_temp_new();
375     gen_helper_g_to_memory(tmp, t0);
376     tcg_gen_qemu_st_i64(tmp, t1, flags, MO_LEQ);
377     tcg_temp_free(tmp);
378 }
379 
380 static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
381 {
382     TCGv_i32 tmp32 = tcg_temp_new_i32();
383     gen_helper_s_to_memory(tmp32, t0);
384     tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
385     tcg_temp_free_i32(tmp32);
386 }
387 
388 static inline void gen_store_mem(DisasContext *ctx,
389                                  void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
390                                                             int flags),
391                                  int ra, int rb, int32_t disp16, bool fp,
392                                  bool clear)
393 {
394     TCGv tmp, addr, va;
395 
396     tmp = tcg_temp_new();
397     addr = load_gpr(ctx, rb);
398 
399     if (disp16) {
400         tcg_gen_addi_i64(tmp, addr, disp16);
401         addr = tmp;
402     }
403     if (clear) {
404         tcg_gen_andi_i64(tmp, addr, ~0x7);
405         addr = tmp;
406     }
407 
408     va = (fp ? load_fpr(ctx, ra) : load_gpr(ctx, ra));
409     tcg_gen_qemu_store(va, addr, ctx->mem_idx);
410 
411     tcg_temp_free(tmp);
412 }
413 
414 static ExitStatus gen_store_conditional(DisasContext *ctx, int ra, int rb,
415                                         int32_t disp16, int mem_idx,
416                                         TCGMemOp op)
417 {
418     TCGLabel *lab_fail, *lab_done;
419     TCGv addr, val;
420 
421     addr = tcg_temp_new_i64();
422     tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
423     free_context_temps(ctx);
424 
425     lab_fail = gen_new_label();
426     lab_done = gen_new_label();
427     tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
428     tcg_temp_free_i64(addr);
429 
430     val = tcg_temp_new_i64();
431     tcg_gen_atomic_cmpxchg_i64(val, cpu_lock_addr, cpu_lock_value,
432                                load_gpr(ctx, ra), mem_idx, op);
433     free_context_temps(ctx);
434 
435     if (ra != 31) {
436         tcg_gen_setcond_i64(TCG_COND_EQ, ctx->ir[ra], val, cpu_lock_value);
437     }
438     tcg_temp_free_i64(val);
439     tcg_gen_br(lab_done);
440 
441     gen_set_label(lab_fail);
442     if (ra != 31) {
443         tcg_gen_movi_i64(ctx->ir[ra], 0);
444     }
445 
446     gen_set_label(lab_done);
447     tcg_gen_movi_i64(cpu_lock_addr, -1);
448     return NO_EXIT;
449 }
450 
451 static bool in_superpage(DisasContext *ctx, int64_t addr)
452 {
453 #ifndef CONFIG_USER_ONLY
454     return ((ctx->tb->flags & TB_FLAGS_USER_MODE) == 0
455             && addr >> TARGET_VIRT_ADDR_SPACE_BITS == -1
456             && ((addr >> 41) & 3) == 2);
457 #else
458     return false;
459 #endif
460 }
461 
462 static bool use_exit_tb(DisasContext *ctx)
463 {
464     return ((ctx->tb->cflags & CF_LAST_IO)
465             || ctx->singlestep_enabled
466             || singlestep);
467 }
468 
469 static bool use_goto_tb(DisasContext *ctx, uint64_t dest)
470 {
471     /* Suppress goto_tb in the case of single-steping and IO.  */
472     if (unlikely(use_exit_tb(ctx))) {
473         return false;
474     }
475 #ifndef CONFIG_USER_ONLY
476     /* If the destination is in the superpage, the page perms can't change.  */
477     if (in_superpage(ctx, dest)) {
478         return true;
479     }
480     /* Check for the dest on the same page as the start of the TB.  */
481     return ((ctx->tb->pc ^ dest) & TARGET_PAGE_MASK) == 0;
482 #else
483     return true;
484 #endif
485 }
486 
487 static ExitStatus gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
488 {
489     uint64_t dest = ctx->pc + (disp << 2);
490 
491     if (ra != 31) {
492         tcg_gen_movi_i64(ctx->ir[ra], ctx->pc);
493     }
494 
495     /* Notice branch-to-next; used to initialize RA with the PC.  */
496     if (disp == 0) {
497         return 0;
498     } else if (use_goto_tb(ctx, dest)) {
499         tcg_gen_goto_tb(0);
500         tcg_gen_movi_i64(cpu_pc, dest);
501         tcg_gen_exit_tb((uintptr_t)ctx->tb);
502         return EXIT_GOTO_TB;
503     } else {
504         tcg_gen_movi_i64(cpu_pc, dest);
505         return EXIT_PC_UPDATED;
506     }
507 }
508 
509 static ExitStatus gen_bcond_internal(DisasContext *ctx, TCGCond cond,
510                                      TCGv cmp, int32_t disp)
511 {
512     uint64_t dest = ctx->pc + (disp << 2);
513     TCGLabel *lab_true = gen_new_label();
514 
515     if (use_goto_tb(ctx, dest)) {
516         tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
517 
518         tcg_gen_goto_tb(0);
519         tcg_gen_movi_i64(cpu_pc, ctx->pc);
520         tcg_gen_exit_tb((uintptr_t)ctx->tb);
521 
522         gen_set_label(lab_true);
523         tcg_gen_goto_tb(1);
524         tcg_gen_movi_i64(cpu_pc, dest);
525         tcg_gen_exit_tb((uintptr_t)ctx->tb + 1);
526 
527         return EXIT_GOTO_TB;
528     } else {
529         TCGv_i64 z = tcg_const_i64(0);
530         TCGv_i64 d = tcg_const_i64(dest);
531         TCGv_i64 p = tcg_const_i64(ctx->pc);
532 
533         tcg_gen_movcond_i64(cond, cpu_pc, cmp, z, d, p);
534 
535         tcg_temp_free_i64(z);
536         tcg_temp_free_i64(d);
537         tcg_temp_free_i64(p);
538         return EXIT_PC_UPDATED;
539     }
540 }
541 
542 static ExitStatus gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
543                             int32_t disp, int mask)
544 {
545     TCGv cmp_tmp;
546 
547     if (mask) {
548         cmp_tmp = tcg_temp_new();
549         tcg_gen_andi_i64(cmp_tmp, load_gpr(ctx, ra), 1);
550     } else {
551         cmp_tmp = load_gpr(ctx, ra);
552     }
553 
554     return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
555 }
556 
557 /* Fold -0.0 for comparison with COND.  */
558 
559 static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
560 {
561     uint64_t mzero = 1ull << 63;
562 
563     switch (cond) {
564     case TCG_COND_LE:
565     case TCG_COND_GT:
566         /* For <= or >, the -0.0 value directly compares the way we want.  */
567         tcg_gen_mov_i64(dest, src);
568         break;
569 
570     case TCG_COND_EQ:
571     case TCG_COND_NE:
572         /* For == or !=, we can simply mask off the sign bit and compare.  */
573         tcg_gen_andi_i64(dest, src, mzero - 1);
574         break;
575 
576     case TCG_COND_GE:
577     case TCG_COND_LT:
578         /* For >= or <, map -0.0 to +0.0 via comparison and mask.  */
579         tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
580         tcg_gen_neg_i64(dest, dest);
581         tcg_gen_and_i64(dest, dest, src);
582         break;
583 
584     default:
585         abort();
586     }
587 }
588 
589 static ExitStatus gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
590                              int32_t disp)
591 {
592     TCGv cmp_tmp = tcg_temp_new();
593     gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
594     return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
595 }
596 
597 static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
598 {
599     TCGv_i64 va, vb, z;
600 
601     z = load_zero(ctx);
602     vb = load_fpr(ctx, rb);
603     va = tcg_temp_new();
604     gen_fold_mzero(cond, va, load_fpr(ctx, ra));
605 
606     tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
607 
608     tcg_temp_free(va);
609 }
610 
611 #define QUAL_RM_N       0x080   /* Round mode nearest even */
612 #define QUAL_RM_C       0x000   /* Round mode chopped */
613 #define QUAL_RM_M       0x040   /* Round mode minus infinity */
614 #define QUAL_RM_D       0x0c0   /* Round mode dynamic */
615 #define QUAL_RM_MASK    0x0c0
616 
617 #define QUAL_U          0x100   /* Underflow enable (fp output) */
618 #define QUAL_V          0x100   /* Overflow enable (int output) */
619 #define QUAL_S          0x400   /* Software completion enable */
620 #define QUAL_I          0x200   /* Inexact detection enable */
621 
622 static void gen_qual_roundmode(DisasContext *ctx, int fn11)
623 {
624     TCGv_i32 tmp;
625 
626     fn11 &= QUAL_RM_MASK;
627     if (fn11 == ctx->tb_rm) {
628         return;
629     }
630     ctx->tb_rm = fn11;
631 
632     tmp = tcg_temp_new_i32();
633     switch (fn11) {
634     case QUAL_RM_N:
635         tcg_gen_movi_i32(tmp, float_round_nearest_even);
636         break;
637     case QUAL_RM_C:
638         tcg_gen_movi_i32(tmp, float_round_to_zero);
639         break;
640     case QUAL_RM_M:
641         tcg_gen_movi_i32(tmp, float_round_down);
642         break;
643     case QUAL_RM_D:
644         tcg_gen_ld8u_i32(tmp, cpu_env,
645                          offsetof(CPUAlphaState, fpcr_dyn_round));
646         break;
647     }
648 
649 #if defined(CONFIG_SOFTFLOAT_INLINE)
650     /* ??? The "fpu/softfloat.h" interface is to call set_float_rounding_mode.
651        With CONFIG_SOFTFLOAT that expands to an out-of-line call that just
652        sets the one field.  */
653     tcg_gen_st8_i32(tmp, cpu_env,
654                     offsetof(CPUAlphaState, fp_status.float_rounding_mode));
655 #else
656     gen_helper_setroundmode(tmp);
657 #endif
658 
659     tcg_temp_free_i32(tmp);
660 }
661 
662 static void gen_qual_flushzero(DisasContext *ctx, int fn11)
663 {
664     TCGv_i32 tmp;
665 
666     fn11 &= QUAL_U;
667     if (fn11 == ctx->tb_ftz) {
668         return;
669     }
670     ctx->tb_ftz = fn11;
671 
672     tmp = tcg_temp_new_i32();
673     if (fn11) {
674         /* Underflow is enabled, use the FPCR setting.  */
675         tcg_gen_ld8u_i32(tmp, cpu_env,
676                          offsetof(CPUAlphaState, fpcr_flush_to_zero));
677     } else {
678         /* Underflow is disabled, force flush-to-zero.  */
679         tcg_gen_movi_i32(tmp, 1);
680     }
681 
682 #if defined(CONFIG_SOFTFLOAT_INLINE)
683     tcg_gen_st8_i32(tmp, cpu_env,
684                     offsetof(CPUAlphaState, fp_status.flush_to_zero));
685 #else
686     gen_helper_setflushzero(tmp);
687 #endif
688 
689     tcg_temp_free_i32(tmp);
690 }
691 
692 static TCGv gen_ieee_input(DisasContext *ctx, int reg, int fn11, int is_cmp)
693 {
694     TCGv val;
695 
696     if (unlikely(reg == 31)) {
697         val = load_zero(ctx);
698     } else {
699         val = cpu_fir[reg];
700         if ((fn11 & QUAL_S) == 0) {
701             if (is_cmp) {
702                 gen_helper_ieee_input_cmp(cpu_env, val);
703             } else {
704                 gen_helper_ieee_input(cpu_env, val);
705             }
706         } else {
707 #ifndef CONFIG_USER_ONLY
708             /* In system mode, raise exceptions for denormals like real
709                hardware.  In user mode, proceed as if the OS completion
710                handler is handling the denormal as per spec.  */
711             gen_helper_ieee_input_s(cpu_env, val);
712 #endif
713         }
714     }
715     return val;
716 }
717 
718 static void gen_fp_exc_raise(int rc, int fn11)
719 {
720     /* ??? We ought to be able to do something with imprecise exceptions.
721        E.g. notice we're still in the trap shadow of something within the
722        TB and do not generate the code to signal the exception; end the TB
723        when an exception is forced to arrive, either by consumption of a
724        register value or TRAPB or EXCB.  */
725     TCGv_i32 reg, ign;
726     uint32_t ignore = 0;
727 
728     if (!(fn11 & QUAL_U)) {
729         /* Note that QUAL_U == QUAL_V, so ignore either.  */
730         ignore |= FPCR_UNF | FPCR_IOV;
731     }
732     if (!(fn11 & QUAL_I)) {
733         ignore |= FPCR_INE;
734     }
735     ign = tcg_const_i32(ignore);
736 
737     /* ??? Pass in the regno of the destination so that the helper can
738        set EXC_MASK, which contains a bitmask of destination registers
739        that have caused arithmetic traps.  A simple userspace emulation
740        does not require this.  We do need it for a guest kernel's entArith,
741        or if we were to do something clever with imprecise exceptions.  */
742     reg = tcg_const_i32(rc + 32);
743     if (fn11 & QUAL_S) {
744         gen_helper_fp_exc_raise_s(cpu_env, ign, reg);
745     } else {
746         gen_helper_fp_exc_raise(cpu_env, ign, reg);
747     }
748 
749     tcg_temp_free_i32(reg);
750     tcg_temp_free_i32(ign);
751 }
752 
753 static void gen_cvtlq(TCGv vc, TCGv vb)
754 {
755     TCGv tmp = tcg_temp_new();
756 
757     /* The arithmetic right shift here, plus the sign-extended mask below
758        yields a sign-extended result without an explicit ext32s_i64.  */
759     tcg_gen_sari_i64(tmp, vb, 32);
760     tcg_gen_shri_i64(vc, vb, 29);
761     tcg_gen_andi_i64(tmp, tmp, (int32_t)0xc0000000);
762     tcg_gen_andi_i64(vc, vc, 0x3fffffff);
763     tcg_gen_or_i64(vc, vc, tmp);
764 
765     tcg_temp_free(tmp);
766 }
767 
768 static void gen_ieee_arith2(DisasContext *ctx,
769                             void (*helper)(TCGv, TCGv_ptr, TCGv),
770                             int rb, int rc, int fn11)
771 {
772     TCGv vb;
773 
774     gen_qual_roundmode(ctx, fn11);
775     gen_qual_flushzero(ctx, fn11);
776 
777     vb = gen_ieee_input(ctx, rb, fn11, 0);
778     helper(dest_fpr(ctx, rc), cpu_env, vb);
779 
780     gen_fp_exc_raise(rc, fn11);
781 }
782 
783 #define IEEE_ARITH2(name)                                       \
784 static inline void glue(gen_, name)(DisasContext *ctx,          \
785                                     int rb, int rc, int fn11)   \
786 {                                                               \
787     gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11);      \
788 }
789 IEEE_ARITH2(sqrts)
790 IEEE_ARITH2(sqrtt)
791 IEEE_ARITH2(cvtst)
792 IEEE_ARITH2(cvtts)
793 
794 static void gen_cvttq(DisasContext *ctx, int rb, int rc, int fn11)
795 {
796     TCGv vb, vc;
797 
798     /* No need to set flushzero, since we have an integer output.  */
799     vb = gen_ieee_input(ctx, rb, fn11, 0);
800     vc = dest_fpr(ctx, rc);
801 
802     /* Almost all integer conversions use cropped rounding;
803        special case that.  */
804     if ((fn11 & QUAL_RM_MASK) == QUAL_RM_C) {
805         gen_helper_cvttq_c(vc, cpu_env, vb);
806     } else {
807         gen_qual_roundmode(ctx, fn11);
808         gen_helper_cvttq(vc, cpu_env, vb);
809     }
810     gen_fp_exc_raise(rc, fn11);
811 }
812 
813 static void gen_ieee_intcvt(DisasContext *ctx,
814                             void (*helper)(TCGv, TCGv_ptr, TCGv),
815 			    int rb, int rc, int fn11)
816 {
817     TCGv vb, vc;
818 
819     gen_qual_roundmode(ctx, fn11);
820     vb = load_fpr(ctx, rb);
821     vc = dest_fpr(ctx, rc);
822 
823     /* The only exception that can be raised by integer conversion
824        is inexact.  Thus we only need to worry about exceptions when
825        inexact handling is requested.  */
826     if (fn11 & QUAL_I) {
827         helper(vc, cpu_env, vb);
828         gen_fp_exc_raise(rc, fn11);
829     } else {
830         helper(vc, cpu_env, vb);
831     }
832 }
833 
834 #define IEEE_INTCVT(name)                                       \
835 static inline void glue(gen_, name)(DisasContext *ctx,          \
836                                     int rb, int rc, int fn11)   \
837 {                                                               \
838     gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11);      \
839 }
840 IEEE_INTCVT(cvtqs)
841 IEEE_INTCVT(cvtqt)
842 
843 static void gen_cpy_mask(TCGv vc, TCGv va, TCGv vb, bool inv_a, uint64_t mask)
844 {
845     TCGv vmask = tcg_const_i64(mask);
846     TCGv tmp = tcg_temp_new_i64();
847 
848     if (inv_a) {
849         tcg_gen_andc_i64(tmp, vmask, va);
850     } else {
851         tcg_gen_and_i64(tmp, va, vmask);
852     }
853 
854     tcg_gen_andc_i64(vc, vb, vmask);
855     tcg_gen_or_i64(vc, vc, tmp);
856 
857     tcg_temp_free(vmask);
858     tcg_temp_free(tmp);
859 }
860 
861 static void gen_ieee_arith3(DisasContext *ctx,
862                             void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
863                             int ra, int rb, int rc, int fn11)
864 {
865     TCGv va, vb, vc;
866 
867     gen_qual_roundmode(ctx, fn11);
868     gen_qual_flushzero(ctx, fn11);
869 
870     va = gen_ieee_input(ctx, ra, fn11, 0);
871     vb = gen_ieee_input(ctx, rb, fn11, 0);
872     vc = dest_fpr(ctx, rc);
873     helper(vc, cpu_env, va, vb);
874 
875     gen_fp_exc_raise(rc, fn11);
876 }
877 
878 #define IEEE_ARITH3(name)                                               \
879 static inline void glue(gen_, name)(DisasContext *ctx,                  \
880                                     int ra, int rb, int rc, int fn11)   \
881 {                                                                       \
882     gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11);          \
883 }
884 IEEE_ARITH3(adds)
885 IEEE_ARITH3(subs)
886 IEEE_ARITH3(muls)
887 IEEE_ARITH3(divs)
888 IEEE_ARITH3(addt)
889 IEEE_ARITH3(subt)
890 IEEE_ARITH3(mult)
891 IEEE_ARITH3(divt)
892 
893 static void gen_ieee_compare(DisasContext *ctx,
894                              void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
895                              int ra, int rb, int rc, int fn11)
896 {
897     TCGv va, vb, vc;
898 
899     va = gen_ieee_input(ctx, ra, fn11, 1);
900     vb = gen_ieee_input(ctx, rb, fn11, 1);
901     vc = dest_fpr(ctx, rc);
902     helper(vc, cpu_env, va, vb);
903 
904     gen_fp_exc_raise(rc, fn11);
905 }
906 
907 #define IEEE_CMP3(name)                                                 \
908 static inline void glue(gen_, name)(DisasContext *ctx,                  \
909                                     int ra, int rb, int rc, int fn11)   \
910 {                                                                       \
911     gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11);         \
912 }
913 IEEE_CMP3(cmptun)
914 IEEE_CMP3(cmpteq)
915 IEEE_CMP3(cmptlt)
916 IEEE_CMP3(cmptle)
917 
918 static inline uint64_t zapnot_mask(uint8_t lit)
919 {
920     uint64_t mask = 0;
921     int i;
922 
923     for (i = 0; i < 8; ++i) {
924         if ((lit >> i) & 1) {
925             mask |= 0xffull << (i * 8);
926         }
927     }
928     return mask;
929 }
930 
931 /* Implement zapnot with an immediate operand, which expands to some
932    form of immediate AND.  This is a basic building block in the
933    definition of many of the other byte manipulation instructions.  */
934 static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
935 {
936     switch (lit) {
937     case 0x00:
938         tcg_gen_movi_i64(dest, 0);
939         break;
940     case 0x01:
941         tcg_gen_ext8u_i64(dest, src);
942         break;
943     case 0x03:
944         tcg_gen_ext16u_i64(dest, src);
945         break;
946     case 0x0f:
947         tcg_gen_ext32u_i64(dest, src);
948         break;
949     case 0xff:
950         tcg_gen_mov_i64(dest, src);
951         break;
952     default:
953         tcg_gen_andi_i64(dest, src, zapnot_mask(lit));
954         break;
955     }
956 }
957 
958 /* EXTWH, EXTLH, EXTQH */
959 static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
960                       uint8_t lit, uint8_t byte_mask)
961 {
962     if (islit) {
963         int pos = (64 - lit * 8) & 0x3f;
964         int len = cto32(byte_mask) * 8;
965         if (pos < len) {
966             tcg_gen_deposit_z_i64(vc, va, pos, len - pos);
967         } else {
968             tcg_gen_movi_i64(vc, 0);
969         }
970     } else {
971         TCGv tmp = tcg_temp_new();
972         tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
973         tcg_gen_neg_i64(tmp, tmp);
974         tcg_gen_andi_i64(tmp, tmp, 0x3f);
975         tcg_gen_shl_i64(vc, va, tmp);
976         tcg_temp_free(tmp);
977     }
978     gen_zapnoti(vc, vc, byte_mask);
979 }
980 
981 /* EXTBL, EXTWL, EXTLL, EXTQL */
982 static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
983                       uint8_t lit, uint8_t byte_mask)
984 {
985     if (islit) {
986         int pos = (lit & 7) * 8;
987         int len = cto32(byte_mask) * 8;
988         if (pos + len >= 64) {
989             len = 64 - pos;
990         }
991         tcg_gen_extract_i64(vc, va, pos, len);
992     } else {
993         TCGv tmp = tcg_temp_new();
994         tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
995         tcg_gen_shli_i64(tmp, tmp, 3);
996         tcg_gen_shr_i64(vc, va, tmp);
997         tcg_temp_free(tmp);
998         gen_zapnoti(vc, vc, byte_mask);
999     }
1000 }
1001 
1002 /* INSWH, INSLH, INSQH */
1003 static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1004                       uint8_t lit, uint8_t byte_mask)
1005 {
1006     if (islit) {
1007         int pos = 64 - (lit & 7) * 8;
1008         int len = cto32(byte_mask) * 8;
1009         if (pos < len) {
1010             tcg_gen_extract_i64(vc, va, pos, len - pos);
1011         } else {
1012             tcg_gen_movi_i64(vc, 0);
1013         }
1014     } else {
1015         TCGv tmp = tcg_temp_new();
1016         TCGv shift = tcg_temp_new();
1017 
1018         /* The instruction description has us left-shift the byte mask
1019            and extract bits <15:8> and apply that zap at the end.  This
1020            is equivalent to simply performing the zap first and shifting
1021            afterward.  */
1022         gen_zapnoti(tmp, va, byte_mask);
1023 
1024         /* If (B & 7) == 0, we need to shift by 64 and leave a zero.  Do this
1025            portably by splitting the shift into two parts: shift_count-1 and 1.
1026            Arrange for the -1 by using ones-complement instead of
1027            twos-complement in the negation: ~(B * 8) & 63.  */
1028 
1029         tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1030         tcg_gen_not_i64(shift, shift);
1031         tcg_gen_andi_i64(shift, shift, 0x3f);
1032 
1033         tcg_gen_shr_i64(vc, tmp, shift);
1034         tcg_gen_shri_i64(vc, vc, 1);
1035         tcg_temp_free(shift);
1036         tcg_temp_free(tmp);
1037     }
1038 }
1039 
1040 /* INSBL, INSWL, INSLL, INSQL */
1041 static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1042                       uint8_t lit, uint8_t byte_mask)
1043 {
1044     if (islit) {
1045         int pos = (lit & 7) * 8;
1046         int len = cto32(byte_mask) * 8;
1047         if (pos + len > 64) {
1048             len = 64 - pos;
1049         }
1050         tcg_gen_deposit_z_i64(vc, va, pos, len);
1051     } else {
1052         TCGv tmp = tcg_temp_new();
1053         TCGv shift = tcg_temp_new();
1054 
1055         /* The instruction description has us left-shift the byte mask
1056            and extract bits <15:8> and apply that zap at the end.  This
1057            is equivalent to simply performing the zap first and shifting
1058            afterward.  */
1059         gen_zapnoti(tmp, va, byte_mask);
1060 
1061         tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1062         tcg_gen_shli_i64(shift, shift, 3);
1063         tcg_gen_shl_i64(vc, tmp, shift);
1064         tcg_temp_free(shift);
1065         tcg_temp_free(tmp);
1066     }
1067 }
1068 
1069 /* MSKWH, MSKLH, MSKQH */
1070 static void gen_msk_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1071                       uint8_t lit, uint8_t byte_mask)
1072 {
1073     if (islit) {
1074         gen_zapnoti(vc, va, ~((byte_mask << (lit & 7)) >> 8));
1075     } else {
1076         TCGv shift = tcg_temp_new();
1077         TCGv mask = tcg_temp_new();
1078 
1079         /* The instruction description is as above, where the byte_mask
1080            is shifted left, and then we extract bits <15:8>.  This can be
1081            emulated with a right-shift on the expanded byte mask.  This
1082            requires extra care because for an input <2:0> == 0 we need a
1083            shift of 64 bits in order to generate a zero.  This is done by
1084            splitting the shift into two parts, the variable shift - 1
1085            followed by a constant 1 shift.  The code we expand below is
1086            equivalent to ~(B * 8) & 63.  */
1087 
1088         tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
1089         tcg_gen_not_i64(shift, shift);
1090         tcg_gen_andi_i64(shift, shift, 0x3f);
1091         tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
1092         tcg_gen_shr_i64(mask, mask, shift);
1093         tcg_gen_shri_i64(mask, mask, 1);
1094 
1095         tcg_gen_andc_i64(vc, va, mask);
1096 
1097         tcg_temp_free(mask);
1098         tcg_temp_free(shift);
1099     }
1100 }
1101 
1102 /* MSKBL, MSKWL, MSKLL, MSKQL */
1103 static void gen_msk_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
1104                       uint8_t lit, uint8_t byte_mask)
1105 {
1106     if (islit) {
1107         gen_zapnoti(vc, va, ~(byte_mask << (lit & 7)));
1108     } else {
1109         TCGv shift = tcg_temp_new();
1110         TCGv mask = tcg_temp_new();
1111 
1112         tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
1113         tcg_gen_shli_i64(shift, shift, 3);
1114         tcg_gen_movi_i64(mask, zapnot_mask(byte_mask));
1115         tcg_gen_shl_i64(mask, mask, shift);
1116 
1117         tcg_gen_andc_i64(vc, va, mask);
1118 
1119         tcg_temp_free(mask);
1120         tcg_temp_free(shift);
1121     }
1122 }
1123 
1124 static void gen_rx(DisasContext *ctx, int ra, int set)
1125 {
1126     TCGv_i32 tmp;
1127 
1128     if (ra != 31) {
1129         tcg_gen_ld8u_i64(ctx->ir[ra], cpu_env,
1130                          offsetof(CPUAlphaState, intr_flag));
1131     }
1132 
1133     tmp = tcg_const_i32(set);
1134     tcg_gen_st8_i32(tmp, cpu_env, offsetof(CPUAlphaState, intr_flag));
1135     tcg_temp_free_i32(tmp);
1136 }
1137 
1138 static ExitStatus gen_call_pal(DisasContext *ctx, int palcode)
1139 {
1140     /* We're emulating OSF/1 PALcode.  Many of these are trivial access
1141        to internal cpu registers.  */
1142 
1143     /* Unprivileged PAL call */
1144     if (palcode >= 0x80 && palcode < 0xC0) {
1145         switch (palcode) {
1146         case 0x86:
1147             /* IMB */
1148             /* No-op inside QEMU.  */
1149             break;
1150         case 0x9E:
1151             /* RDUNIQUE */
1152             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1153                            offsetof(CPUAlphaState, unique));
1154             break;
1155         case 0x9F:
1156             /* WRUNIQUE */
1157             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1158                            offsetof(CPUAlphaState, unique));
1159             break;
1160         default:
1161             palcode &= 0xbf;
1162             goto do_call_pal;
1163         }
1164         return NO_EXIT;
1165     }
1166 
1167 #ifndef CONFIG_USER_ONLY
1168     /* Privileged PAL code */
1169     if (palcode < 0x40 && (ctx->tb->flags & TB_FLAGS_USER_MODE) == 0) {
1170         TCGv tmp;
1171         switch (palcode) {
1172         case 0x01:
1173             /* CFLUSH */
1174             /* No-op inside QEMU.  */
1175             break;
1176         case 0x02:
1177             /* DRAINA */
1178             /* No-op inside QEMU.  */
1179             break;
1180         case 0x2D:
1181             /* WRVPTPTR */
1182             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1183                            offsetof(CPUAlphaState, vptptr));
1184             break;
1185         case 0x31:
1186             /* WRVAL */
1187             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1188                            offsetof(CPUAlphaState, sysval));
1189             break;
1190         case 0x32:
1191             /* RDVAL */
1192             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1193                            offsetof(CPUAlphaState, sysval));
1194             break;
1195 
1196         case 0x35:
1197             /* SWPIPL */
1198             /* Note that we already know we're in kernel mode, so we know
1199                that PS only contains the 3 IPL bits.  */
1200             tcg_gen_ld8u_i64(ctx->ir[IR_V0], cpu_env,
1201                              offsetof(CPUAlphaState, ps));
1202 
1203             /* But make sure and store only the 3 IPL bits from the user.  */
1204             tmp = tcg_temp_new();
1205             tcg_gen_andi_i64(tmp, ctx->ir[IR_A0], PS_INT_MASK);
1206             tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, ps));
1207             tcg_temp_free(tmp);
1208 
1209             /* Allow interrupts to be recognized right away.  */
1210             tcg_gen_movi_i64(cpu_pc, ctx->pc);
1211             return EXIT_PC_UPDATED_NOCHAIN;
1212 
1213         case 0x36:
1214             /* RDPS */
1215             tcg_gen_ld8u_i64(ctx->ir[IR_V0], cpu_env,
1216                              offsetof(CPUAlphaState, ps));
1217             break;
1218         case 0x38:
1219             /* WRUSP */
1220             tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
1221                            offsetof(CPUAlphaState, usp));
1222             break;
1223         case 0x3A:
1224             /* RDUSP */
1225             tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
1226                            offsetof(CPUAlphaState, usp));
1227             break;
1228         case 0x3C:
1229             /* WHAMI */
1230             tcg_gen_ld32s_i64(ctx->ir[IR_V0], cpu_env,
1231                 -offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
1232             break;
1233 
1234         case 0x3E:
1235             /* WTINT */
1236             tmp = tcg_const_i64(1);
1237             tcg_gen_st32_i64(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1238                                            offsetof(CPUState, halted));
1239             tcg_gen_movi_i64(ctx->ir[IR_V0], 0);
1240             return gen_excp(ctx, EXCP_HALTED, 0);
1241 
1242         default:
1243             palcode &= 0x3f;
1244             goto do_call_pal;
1245         }
1246         return NO_EXIT;
1247     }
1248 #endif
1249     return gen_invalid(ctx);
1250 
1251  do_call_pal:
1252 #ifdef CONFIG_USER_ONLY
1253     return gen_excp(ctx, EXCP_CALL_PAL, palcode);
1254 #else
1255     {
1256         TCGv tmp = tcg_temp_new();
1257         uint64_t exc_addr = ctx->pc;
1258         uint64_t entry = ctx->palbr;
1259 
1260         if (ctx->tb->flags & TB_FLAGS_PAL_MODE) {
1261             exc_addr |= 1;
1262         } else {
1263             tcg_gen_movi_i64(tmp, 1);
1264             tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, pal_mode));
1265         }
1266 
1267         tcg_gen_movi_i64(tmp, exc_addr);
1268         tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUAlphaState, exc_addr));
1269         tcg_temp_free(tmp);
1270 
1271         entry += (palcode & 0x80
1272                   ? 0x2000 + (palcode - 0x80) * 64
1273                   : 0x1000 + palcode * 64);
1274 
1275         /* Since the destination is running in PALmode, we don't really
1276            need the page permissions check.  We'll see the existence of
1277            the page when we create the TB, and we'll flush all TBs if
1278            we change the PAL base register.  */
1279         if (!use_exit_tb(ctx)) {
1280             tcg_gen_goto_tb(0);
1281             tcg_gen_movi_i64(cpu_pc, entry);
1282             tcg_gen_exit_tb((uintptr_t)ctx->tb);
1283             return EXIT_GOTO_TB;
1284         } else {
1285             tcg_gen_movi_i64(cpu_pc, entry);
1286             return EXIT_PC_UPDATED;
1287         }
1288     }
1289 #endif
1290 }
1291 
1292 #ifndef CONFIG_USER_ONLY
1293 
1294 #define PR_BYTE         0x100000
1295 #define PR_LONG         0x200000
1296 
1297 static int cpu_pr_data(int pr)
1298 {
1299     switch (pr) {
1300     case  0: return offsetof(CPUAlphaState, ps) | PR_BYTE;
1301     case  1: return offsetof(CPUAlphaState, fen) | PR_BYTE;
1302     case  2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
1303     case  3: return offsetof(CPUAlphaState, trap_arg0);
1304     case  4: return offsetof(CPUAlphaState, trap_arg1);
1305     case  5: return offsetof(CPUAlphaState, trap_arg2);
1306     case  6: return offsetof(CPUAlphaState, exc_addr);
1307     case  7: return offsetof(CPUAlphaState, palbr);
1308     case  8: return offsetof(CPUAlphaState, ptbr);
1309     case  9: return offsetof(CPUAlphaState, vptptr);
1310     case 10: return offsetof(CPUAlphaState, unique);
1311     case 11: return offsetof(CPUAlphaState, sysval);
1312     case 12: return offsetof(CPUAlphaState, usp);
1313 
1314     case 40 ... 63:
1315         return offsetof(CPUAlphaState, scratch[pr - 40]);
1316 
1317     case 251:
1318         return offsetof(CPUAlphaState, alarm_expire);
1319     }
1320     return 0;
1321 }
1322 
1323 static ExitStatus gen_mfpr(DisasContext *ctx, TCGv va, int regno)
1324 {
1325     void (*helper)(TCGv);
1326     int data;
1327 
1328     switch (regno) {
1329     case 32 ... 39:
1330         /* Accessing the "non-shadow" general registers.  */
1331         regno = regno == 39 ? 25 : regno - 32 + 8;
1332         tcg_gen_mov_i64(va, cpu_std_ir[regno]);
1333         break;
1334 
1335     case 250: /* WALLTIME */
1336         helper = gen_helper_get_walltime;
1337         goto do_helper;
1338     case 249: /* VMTIME */
1339         helper = gen_helper_get_vmtime;
1340     do_helper:
1341         if (use_icount) {
1342             gen_io_start();
1343             helper(va);
1344             gen_io_end();
1345             return EXIT_PC_STALE;
1346         } else {
1347             helper(va);
1348         }
1349         break;
1350 
1351     default:
1352         /* The basic registers are data only, and unknown registers
1353            are read-zero, write-ignore.  */
1354         data = cpu_pr_data(regno);
1355         if (data == 0) {
1356             tcg_gen_movi_i64(va, 0);
1357         } else if (data & PR_BYTE) {
1358             tcg_gen_ld8u_i64(va, cpu_env, data & ~PR_BYTE);
1359         } else if (data & PR_LONG) {
1360             tcg_gen_ld32s_i64(va, cpu_env, data & ~PR_LONG);
1361         } else {
1362             tcg_gen_ld_i64(va, cpu_env, data);
1363         }
1364         break;
1365     }
1366 
1367     return NO_EXIT;
1368 }
1369 
1370 static ExitStatus gen_mtpr(DisasContext *ctx, TCGv vb, int regno)
1371 {
1372     TCGv tmp;
1373     int data;
1374 
1375     switch (regno) {
1376     case 255:
1377         /* TBIA */
1378         gen_helper_tbia(cpu_env);
1379         break;
1380 
1381     case 254:
1382         /* TBIS */
1383         gen_helper_tbis(cpu_env, vb);
1384         break;
1385 
1386     case 253:
1387         /* WAIT */
1388         tmp = tcg_const_i64(1);
1389         tcg_gen_st32_i64(tmp, cpu_env, -offsetof(AlphaCPU, env) +
1390                                        offsetof(CPUState, halted));
1391         return gen_excp(ctx, EXCP_HALTED, 0);
1392 
1393     case 252:
1394         /* HALT */
1395         gen_helper_halt(vb);
1396         return EXIT_PC_STALE;
1397 
1398     case 251:
1399         /* ALARM */
1400         gen_helper_set_alarm(cpu_env, vb);
1401         break;
1402 
1403     case 7:
1404         /* PALBR */
1405         tcg_gen_st_i64(vb, cpu_env, offsetof(CPUAlphaState, palbr));
1406         /* Changing the PAL base register implies un-chaining all of the TBs
1407            that ended with a CALL_PAL.  Since the base register usually only
1408            changes during boot, flushing everything works well.  */
1409         gen_helper_tb_flush(cpu_env);
1410         return EXIT_PC_STALE;
1411 
1412     case 32 ... 39:
1413         /* Accessing the "non-shadow" general registers.  */
1414         regno = regno == 39 ? 25 : regno - 32 + 8;
1415         tcg_gen_mov_i64(cpu_std_ir[regno], vb);
1416         break;
1417 
1418     default:
1419         /* The basic registers are data only, and unknown registers
1420            are read-zero, write-ignore.  */
1421         data = cpu_pr_data(regno);
1422         if (data != 0) {
1423             if (data & PR_BYTE) {
1424                 tcg_gen_st8_i64(vb, cpu_env, data & ~PR_BYTE);
1425             } else if (data & PR_LONG) {
1426                 tcg_gen_st32_i64(vb, cpu_env, data & ~PR_LONG);
1427             } else {
1428                 tcg_gen_st_i64(vb, cpu_env, data);
1429             }
1430         }
1431         break;
1432     }
1433 
1434     return NO_EXIT;
1435 }
1436 #endif /* !USER_ONLY*/
1437 
1438 #define REQUIRE_NO_LIT                          \
1439     do {                                        \
1440         if (real_islit) {                       \
1441             goto invalid_opc;                   \
1442         }                                       \
1443     } while (0)
1444 
1445 #define REQUIRE_TB_FLAG(FLAG)                   \
1446     do {                                        \
1447         if ((ctx->tb->flags & (FLAG)) == 0) {   \
1448             goto invalid_opc;                   \
1449         }                                       \
1450     } while (0)
1451 
1452 #define REQUIRE_REG_31(WHICH)                   \
1453     do {                                        \
1454         if (WHICH != 31) {                      \
1455             goto invalid_opc;                   \
1456         }                                       \
1457     } while (0)
1458 
1459 static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
1460 {
1461     int32_t disp21, disp16, disp12 __attribute__((unused));
1462     uint16_t fn11;
1463     uint8_t opc, ra, rb, rc, fpfn, fn7, lit;
1464     bool islit, real_islit;
1465     TCGv va, vb, vc, tmp, tmp2;
1466     TCGv_i32 t32;
1467     ExitStatus ret;
1468 
1469     /* Decode all instruction fields */
1470     opc = extract32(insn, 26, 6);
1471     ra = extract32(insn, 21, 5);
1472     rb = extract32(insn, 16, 5);
1473     rc = extract32(insn, 0, 5);
1474     real_islit = islit = extract32(insn, 12, 1);
1475     lit = extract32(insn, 13, 8);
1476 
1477     disp21 = sextract32(insn, 0, 21);
1478     disp16 = sextract32(insn, 0, 16);
1479     disp12 = sextract32(insn, 0, 12);
1480 
1481     fn11 = extract32(insn, 5, 11);
1482     fpfn = extract32(insn, 5, 6);
1483     fn7 = extract32(insn, 5, 7);
1484 
1485     if (rb == 31 && !islit) {
1486         islit = true;
1487         lit = 0;
1488     }
1489 
1490     ret = NO_EXIT;
1491     switch (opc) {
1492     case 0x00:
1493         /* CALL_PAL */
1494         ret = gen_call_pal(ctx, insn & 0x03ffffff);
1495         break;
1496     case 0x01:
1497         /* OPC01 */
1498         goto invalid_opc;
1499     case 0x02:
1500         /* OPC02 */
1501         goto invalid_opc;
1502     case 0x03:
1503         /* OPC03 */
1504         goto invalid_opc;
1505     case 0x04:
1506         /* OPC04 */
1507         goto invalid_opc;
1508     case 0x05:
1509         /* OPC05 */
1510         goto invalid_opc;
1511     case 0x06:
1512         /* OPC06 */
1513         goto invalid_opc;
1514     case 0x07:
1515         /* OPC07 */
1516         goto invalid_opc;
1517 
1518     case 0x09:
1519         /* LDAH */
1520         disp16 = (uint32_t)disp16 << 16;
1521         /* fall through */
1522     case 0x08:
1523         /* LDA */
1524         va = dest_gpr(ctx, ra);
1525         /* It's worth special-casing immediate loads.  */
1526         if (rb == 31) {
1527             tcg_gen_movi_i64(va, disp16);
1528         } else {
1529             tcg_gen_addi_i64(va, load_gpr(ctx, rb), disp16);
1530         }
1531         break;
1532 
1533     case 0x0A:
1534         /* LDBU */
1535         REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1536         gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
1537         break;
1538     case 0x0B:
1539         /* LDQ_U */
1540         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
1541         break;
1542     case 0x0C:
1543         /* LDWU */
1544         REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1545         gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
1546         break;
1547     case 0x0D:
1548         /* STW */
1549         REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1550         gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
1551         break;
1552     case 0x0E:
1553         /* STB */
1554         REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
1555         gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
1556         break;
1557     case 0x0F:
1558         /* STQ_U */
1559         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
1560         break;
1561 
1562     case 0x10:
1563         vc = dest_gpr(ctx, rc);
1564         vb = load_gpr_lit(ctx, rb, lit, islit);
1565 
1566         if (ra == 31) {
1567             if (fn7 == 0x00) {
1568                 /* Special case ADDL as SEXTL.  */
1569                 tcg_gen_ext32s_i64(vc, vb);
1570                 break;
1571             }
1572             if (fn7 == 0x29) {
1573                 /* Special case SUBQ as NEGQ.  */
1574                 tcg_gen_neg_i64(vc, vb);
1575                 break;
1576             }
1577         }
1578 
1579         va = load_gpr(ctx, ra);
1580         switch (fn7) {
1581         case 0x00:
1582             /* ADDL */
1583             tcg_gen_add_i64(vc, va, vb);
1584             tcg_gen_ext32s_i64(vc, vc);
1585             break;
1586         case 0x02:
1587             /* S4ADDL */
1588             tmp = tcg_temp_new();
1589             tcg_gen_shli_i64(tmp, va, 2);
1590             tcg_gen_add_i64(tmp, tmp, vb);
1591             tcg_gen_ext32s_i64(vc, tmp);
1592             tcg_temp_free(tmp);
1593             break;
1594         case 0x09:
1595             /* SUBL */
1596             tcg_gen_sub_i64(vc, va, vb);
1597             tcg_gen_ext32s_i64(vc, vc);
1598             break;
1599         case 0x0B:
1600             /* S4SUBL */
1601             tmp = tcg_temp_new();
1602             tcg_gen_shli_i64(tmp, va, 2);
1603             tcg_gen_sub_i64(tmp, tmp, vb);
1604             tcg_gen_ext32s_i64(vc, tmp);
1605             tcg_temp_free(tmp);
1606             break;
1607         case 0x0F:
1608             /* CMPBGE */
1609             if (ra == 31) {
1610                 /* Special case 0 >= X as X == 0.  */
1611                 gen_helper_cmpbe0(vc, vb);
1612             } else {
1613                 gen_helper_cmpbge(vc, va, vb);
1614             }
1615             break;
1616         case 0x12:
1617             /* S8ADDL */
1618             tmp = tcg_temp_new();
1619             tcg_gen_shli_i64(tmp, va, 3);
1620             tcg_gen_add_i64(tmp, tmp, vb);
1621             tcg_gen_ext32s_i64(vc, tmp);
1622             tcg_temp_free(tmp);
1623             break;
1624         case 0x1B:
1625             /* S8SUBL */
1626             tmp = tcg_temp_new();
1627             tcg_gen_shli_i64(tmp, va, 3);
1628             tcg_gen_sub_i64(tmp, tmp, vb);
1629             tcg_gen_ext32s_i64(vc, tmp);
1630             tcg_temp_free(tmp);
1631             break;
1632         case 0x1D:
1633             /* CMPULT */
1634             tcg_gen_setcond_i64(TCG_COND_LTU, vc, va, vb);
1635             break;
1636         case 0x20:
1637             /* ADDQ */
1638             tcg_gen_add_i64(vc, va, vb);
1639             break;
1640         case 0x22:
1641             /* S4ADDQ */
1642             tmp = tcg_temp_new();
1643             tcg_gen_shli_i64(tmp, va, 2);
1644             tcg_gen_add_i64(vc, tmp, vb);
1645             tcg_temp_free(tmp);
1646             break;
1647         case 0x29:
1648             /* SUBQ */
1649             tcg_gen_sub_i64(vc, va, vb);
1650             break;
1651         case 0x2B:
1652             /* S4SUBQ */
1653             tmp = tcg_temp_new();
1654             tcg_gen_shli_i64(tmp, va, 2);
1655             tcg_gen_sub_i64(vc, tmp, vb);
1656             tcg_temp_free(tmp);
1657             break;
1658         case 0x2D:
1659             /* CMPEQ */
1660             tcg_gen_setcond_i64(TCG_COND_EQ, vc, va, vb);
1661             break;
1662         case 0x32:
1663             /* S8ADDQ */
1664             tmp = tcg_temp_new();
1665             tcg_gen_shli_i64(tmp, va, 3);
1666             tcg_gen_add_i64(vc, tmp, vb);
1667             tcg_temp_free(tmp);
1668             break;
1669         case 0x3B:
1670             /* S8SUBQ */
1671             tmp = tcg_temp_new();
1672             tcg_gen_shli_i64(tmp, va, 3);
1673             tcg_gen_sub_i64(vc, tmp, vb);
1674             tcg_temp_free(tmp);
1675             break;
1676         case 0x3D:
1677             /* CMPULE */
1678             tcg_gen_setcond_i64(TCG_COND_LEU, vc, va, vb);
1679             break;
1680         case 0x40:
1681             /* ADDL/V */
1682             tmp = tcg_temp_new();
1683             tcg_gen_ext32s_i64(tmp, va);
1684             tcg_gen_ext32s_i64(vc, vb);
1685             tcg_gen_add_i64(tmp, tmp, vc);
1686             tcg_gen_ext32s_i64(vc, tmp);
1687             gen_helper_check_overflow(cpu_env, vc, tmp);
1688             tcg_temp_free(tmp);
1689             break;
1690         case 0x49:
1691             /* SUBL/V */
1692             tmp = tcg_temp_new();
1693             tcg_gen_ext32s_i64(tmp, va);
1694             tcg_gen_ext32s_i64(vc, vb);
1695             tcg_gen_sub_i64(tmp, tmp, vc);
1696             tcg_gen_ext32s_i64(vc, tmp);
1697             gen_helper_check_overflow(cpu_env, vc, tmp);
1698             tcg_temp_free(tmp);
1699             break;
1700         case 0x4D:
1701             /* CMPLT */
1702             tcg_gen_setcond_i64(TCG_COND_LT, vc, va, vb);
1703             break;
1704         case 0x60:
1705             /* ADDQ/V */
1706             tmp = tcg_temp_new();
1707             tmp2 = tcg_temp_new();
1708             tcg_gen_eqv_i64(tmp, va, vb);
1709             tcg_gen_mov_i64(tmp2, va);
1710             tcg_gen_add_i64(vc, va, vb);
1711             tcg_gen_xor_i64(tmp2, tmp2, vc);
1712             tcg_gen_and_i64(tmp, tmp, tmp2);
1713             tcg_gen_shri_i64(tmp, tmp, 63);
1714             tcg_gen_movi_i64(tmp2, 0);
1715             gen_helper_check_overflow(cpu_env, tmp, tmp2);
1716             tcg_temp_free(tmp);
1717             tcg_temp_free(tmp2);
1718             break;
1719         case 0x69:
1720             /* SUBQ/V */
1721             tmp = tcg_temp_new();
1722             tmp2 = tcg_temp_new();
1723             tcg_gen_xor_i64(tmp, va, vb);
1724             tcg_gen_mov_i64(tmp2, va);
1725             tcg_gen_sub_i64(vc, va, vb);
1726             tcg_gen_xor_i64(tmp2, tmp2, vc);
1727             tcg_gen_and_i64(tmp, tmp, tmp2);
1728             tcg_gen_shri_i64(tmp, tmp, 63);
1729             tcg_gen_movi_i64(tmp2, 0);
1730             gen_helper_check_overflow(cpu_env, tmp, tmp2);
1731             tcg_temp_free(tmp);
1732             tcg_temp_free(tmp2);
1733             break;
1734         case 0x6D:
1735             /* CMPLE */
1736             tcg_gen_setcond_i64(TCG_COND_LE, vc, va, vb);
1737             break;
1738         default:
1739             goto invalid_opc;
1740         }
1741         break;
1742 
1743     case 0x11:
1744         if (fn7 == 0x20) {
1745             if (rc == 31) {
1746                 /* Special case BIS as NOP.  */
1747                 break;
1748             }
1749             if (ra == 31) {
1750                 /* Special case BIS as MOV.  */
1751                 vc = dest_gpr(ctx, rc);
1752                 if (islit) {
1753                     tcg_gen_movi_i64(vc, lit);
1754                 } else {
1755                     tcg_gen_mov_i64(vc, load_gpr(ctx, rb));
1756                 }
1757                 break;
1758             }
1759         }
1760 
1761         vc = dest_gpr(ctx, rc);
1762         vb = load_gpr_lit(ctx, rb, lit, islit);
1763 
1764         if (fn7 == 0x28 && ra == 31) {
1765             /* Special case ORNOT as NOT.  */
1766             tcg_gen_not_i64(vc, vb);
1767             break;
1768         }
1769 
1770         va = load_gpr(ctx, ra);
1771         switch (fn7) {
1772         case 0x00:
1773             /* AND */
1774             tcg_gen_and_i64(vc, va, vb);
1775             break;
1776         case 0x08:
1777             /* BIC */
1778             tcg_gen_andc_i64(vc, va, vb);
1779             break;
1780         case 0x14:
1781             /* CMOVLBS */
1782             tmp = tcg_temp_new();
1783             tcg_gen_andi_i64(tmp, va, 1);
1784             tcg_gen_movcond_i64(TCG_COND_NE, vc, tmp, load_zero(ctx),
1785                                 vb, load_gpr(ctx, rc));
1786             tcg_temp_free(tmp);
1787             break;
1788         case 0x16:
1789             /* CMOVLBC */
1790             tmp = tcg_temp_new();
1791             tcg_gen_andi_i64(tmp, va, 1);
1792             tcg_gen_movcond_i64(TCG_COND_EQ, vc, tmp, load_zero(ctx),
1793                                 vb, load_gpr(ctx, rc));
1794             tcg_temp_free(tmp);
1795             break;
1796         case 0x20:
1797             /* BIS */
1798             tcg_gen_or_i64(vc, va, vb);
1799             break;
1800         case 0x24:
1801             /* CMOVEQ */
1802             tcg_gen_movcond_i64(TCG_COND_EQ, vc, va, load_zero(ctx),
1803                                 vb, load_gpr(ctx, rc));
1804             break;
1805         case 0x26:
1806             /* CMOVNE */
1807             tcg_gen_movcond_i64(TCG_COND_NE, vc, va, load_zero(ctx),
1808                                 vb, load_gpr(ctx, rc));
1809             break;
1810         case 0x28:
1811             /* ORNOT */
1812             tcg_gen_orc_i64(vc, va, vb);
1813             break;
1814         case 0x40:
1815             /* XOR */
1816             tcg_gen_xor_i64(vc, va, vb);
1817             break;
1818         case 0x44:
1819             /* CMOVLT */
1820             tcg_gen_movcond_i64(TCG_COND_LT, vc, va, load_zero(ctx),
1821                                 vb, load_gpr(ctx, rc));
1822             break;
1823         case 0x46:
1824             /* CMOVGE */
1825             tcg_gen_movcond_i64(TCG_COND_GE, vc, va, load_zero(ctx),
1826                                 vb, load_gpr(ctx, rc));
1827             break;
1828         case 0x48:
1829             /* EQV */
1830             tcg_gen_eqv_i64(vc, va, vb);
1831             break;
1832         case 0x61:
1833             /* AMASK */
1834             REQUIRE_REG_31(ra);
1835             {
1836                 uint64_t amask = ctx->tb->flags >> TB_FLAGS_AMASK_SHIFT;
1837                 tcg_gen_andi_i64(vc, vb, ~amask);
1838             }
1839             break;
1840         case 0x64:
1841             /* CMOVLE */
1842             tcg_gen_movcond_i64(TCG_COND_LE, vc, va, load_zero(ctx),
1843                                 vb, load_gpr(ctx, rc));
1844             break;
1845         case 0x66:
1846             /* CMOVGT */
1847             tcg_gen_movcond_i64(TCG_COND_GT, vc, va, load_zero(ctx),
1848                                 vb, load_gpr(ctx, rc));
1849             break;
1850         case 0x6C:
1851             /* IMPLVER */
1852             REQUIRE_REG_31(ra);
1853             tcg_gen_movi_i64(vc, ctx->implver);
1854             break;
1855         default:
1856             goto invalid_opc;
1857         }
1858         break;
1859 
1860     case 0x12:
1861         vc = dest_gpr(ctx, rc);
1862         va = load_gpr(ctx, ra);
1863         switch (fn7) {
1864         case 0x02:
1865             /* MSKBL */
1866             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x01);
1867             break;
1868         case 0x06:
1869             /* EXTBL */
1870             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x01);
1871             break;
1872         case 0x0B:
1873             /* INSBL */
1874             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x01);
1875             break;
1876         case 0x12:
1877             /* MSKWL */
1878             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x03);
1879             break;
1880         case 0x16:
1881             /* EXTWL */
1882             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x03);
1883             break;
1884         case 0x1B:
1885             /* INSWL */
1886             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x03);
1887             break;
1888         case 0x22:
1889             /* MSKLL */
1890             gen_msk_l(ctx, vc, va, rb, islit, lit, 0x0f);
1891             break;
1892         case 0x26:
1893             /* EXTLL */
1894             gen_ext_l(ctx, vc, va, rb, islit, lit, 0x0f);
1895             break;
1896         case 0x2B:
1897             /* INSLL */
1898             gen_ins_l(ctx, vc, va, rb, islit, lit, 0x0f);
1899             break;
1900         case 0x30:
1901             /* ZAP */
1902             if (islit) {
1903                 gen_zapnoti(vc, va, ~lit);
1904             } else {
1905                 gen_helper_zap(vc, va, load_gpr(ctx, rb));
1906             }
1907             break;
1908         case 0x31:
1909             /* ZAPNOT */
1910             if (islit) {
1911                 gen_zapnoti(vc, va, lit);
1912             } else {
1913                 gen_helper_zapnot(vc, va, load_gpr(ctx, rb));
1914             }
1915             break;
1916         case 0x32:
1917             /* MSKQL */
1918             gen_msk_l(ctx, vc, va, rb, islit, lit, 0xff);
1919             break;
1920         case 0x34:
1921             /* SRL */
1922             if (islit) {
1923                 tcg_gen_shri_i64(vc, va, lit & 0x3f);
1924             } else {
1925                 tmp = tcg_temp_new();
1926                 vb = load_gpr(ctx, rb);
1927                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1928                 tcg_gen_shr_i64(vc, va, tmp);
1929                 tcg_temp_free(tmp);
1930             }
1931             break;
1932         case 0x36:
1933             /* EXTQL */
1934             gen_ext_l(ctx, vc, va, rb, islit, lit, 0xff);
1935             break;
1936         case 0x39:
1937             /* SLL */
1938             if (islit) {
1939                 tcg_gen_shli_i64(vc, va, lit & 0x3f);
1940             } else {
1941                 tmp = tcg_temp_new();
1942                 vb = load_gpr(ctx, rb);
1943                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1944                 tcg_gen_shl_i64(vc, va, tmp);
1945                 tcg_temp_free(tmp);
1946             }
1947             break;
1948         case 0x3B:
1949             /* INSQL */
1950             gen_ins_l(ctx, vc, va, rb, islit, lit, 0xff);
1951             break;
1952         case 0x3C:
1953             /* SRA */
1954             if (islit) {
1955                 tcg_gen_sari_i64(vc, va, lit & 0x3f);
1956             } else {
1957                 tmp = tcg_temp_new();
1958                 vb = load_gpr(ctx, rb);
1959                 tcg_gen_andi_i64(tmp, vb, 0x3f);
1960                 tcg_gen_sar_i64(vc, va, tmp);
1961                 tcg_temp_free(tmp);
1962             }
1963             break;
1964         case 0x52:
1965             /* MSKWH */
1966             gen_msk_h(ctx, vc, va, rb, islit, lit, 0x03);
1967             break;
1968         case 0x57:
1969             /* INSWH */
1970             gen_ins_h(ctx, vc, va, rb, islit, lit, 0x03);
1971             break;
1972         case 0x5A:
1973             /* EXTWH */
1974             gen_ext_h(ctx, vc, va, rb, islit, lit, 0x03);
1975             break;
1976         case 0x62:
1977             /* MSKLH */
1978             gen_msk_h(ctx, vc, va, rb, islit, lit, 0x0f);
1979             break;
1980         case 0x67:
1981             /* INSLH */
1982             gen_ins_h(ctx, vc, va, rb, islit, lit, 0x0f);
1983             break;
1984         case 0x6A:
1985             /* EXTLH */
1986             gen_ext_h(ctx, vc, va, rb, islit, lit, 0x0f);
1987             break;
1988         case 0x72:
1989             /* MSKQH */
1990             gen_msk_h(ctx, vc, va, rb, islit, lit, 0xff);
1991             break;
1992         case 0x77:
1993             /* INSQH */
1994             gen_ins_h(ctx, vc, va, rb, islit, lit, 0xff);
1995             break;
1996         case 0x7A:
1997             /* EXTQH */
1998             gen_ext_h(ctx, vc, va, rb, islit, lit, 0xff);
1999             break;
2000         default:
2001             goto invalid_opc;
2002         }
2003         break;
2004 
2005     case 0x13:
2006         vc = dest_gpr(ctx, rc);
2007         vb = load_gpr_lit(ctx, rb, lit, islit);
2008         va = load_gpr(ctx, ra);
2009         switch (fn7) {
2010         case 0x00:
2011             /* MULL */
2012             tcg_gen_mul_i64(vc, va, vb);
2013             tcg_gen_ext32s_i64(vc, vc);
2014             break;
2015         case 0x20:
2016             /* MULQ */
2017             tcg_gen_mul_i64(vc, va, vb);
2018             break;
2019         case 0x30:
2020             /* UMULH */
2021             tmp = tcg_temp_new();
2022             tcg_gen_mulu2_i64(tmp, vc, va, vb);
2023             tcg_temp_free(tmp);
2024             break;
2025         case 0x40:
2026             /* MULL/V */
2027             tmp = tcg_temp_new();
2028             tcg_gen_ext32s_i64(tmp, va);
2029             tcg_gen_ext32s_i64(vc, vb);
2030             tcg_gen_mul_i64(tmp, tmp, vc);
2031             tcg_gen_ext32s_i64(vc, tmp);
2032             gen_helper_check_overflow(cpu_env, vc, tmp);
2033             tcg_temp_free(tmp);
2034             break;
2035         case 0x60:
2036             /* MULQ/V */
2037             tmp = tcg_temp_new();
2038             tmp2 = tcg_temp_new();
2039             tcg_gen_muls2_i64(vc, tmp, va, vb);
2040             tcg_gen_sari_i64(tmp2, vc, 63);
2041             gen_helper_check_overflow(cpu_env, tmp, tmp2);
2042             tcg_temp_free(tmp);
2043             tcg_temp_free(tmp2);
2044             break;
2045         default:
2046             goto invalid_opc;
2047         }
2048         break;
2049 
2050     case 0x14:
2051         REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2052         vc = dest_fpr(ctx, rc);
2053         switch (fpfn) { /* fn11 & 0x3F */
2054         case 0x04:
2055             /* ITOFS */
2056             REQUIRE_REG_31(rb);
2057             t32 = tcg_temp_new_i32();
2058             va = load_gpr(ctx, ra);
2059             tcg_gen_extrl_i64_i32(t32, va);
2060             gen_helper_memory_to_s(vc, t32);
2061             tcg_temp_free_i32(t32);
2062             break;
2063         case 0x0A:
2064             /* SQRTF */
2065             REQUIRE_REG_31(ra);
2066             vb = load_fpr(ctx, rb);
2067             gen_helper_sqrtf(vc, cpu_env, vb);
2068             break;
2069         case 0x0B:
2070             /* SQRTS */
2071             REQUIRE_REG_31(ra);
2072             gen_sqrts(ctx, rb, rc, fn11);
2073             break;
2074         case 0x14:
2075             /* ITOFF */
2076             REQUIRE_REG_31(rb);
2077             t32 = tcg_temp_new_i32();
2078             va = load_gpr(ctx, ra);
2079             tcg_gen_extrl_i64_i32(t32, va);
2080             gen_helper_memory_to_f(vc, t32);
2081             tcg_temp_free_i32(t32);
2082             break;
2083         case 0x24:
2084             /* ITOFT */
2085             REQUIRE_REG_31(rb);
2086             va = load_gpr(ctx, ra);
2087             tcg_gen_mov_i64(vc, va);
2088             break;
2089         case 0x2A:
2090             /* SQRTG */
2091             REQUIRE_REG_31(ra);
2092             vb = load_fpr(ctx, rb);
2093             gen_helper_sqrtg(vc, cpu_env, vb);
2094             break;
2095         case 0x02B:
2096             /* SQRTT */
2097             REQUIRE_REG_31(ra);
2098             gen_sqrtt(ctx, rb, rc, fn11);
2099             break;
2100         default:
2101             goto invalid_opc;
2102         }
2103         break;
2104 
2105     case 0x15:
2106         /* VAX floating point */
2107         /* XXX: rounding mode and trap are ignored (!) */
2108         vc = dest_fpr(ctx, rc);
2109         vb = load_fpr(ctx, rb);
2110         va = load_fpr(ctx, ra);
2111         switch (fpfn) { /* fn11 & 0x3F */
2112         case 0x00:
2113             /* ADDF */
2114             gen_helper_addf(vc, cpu_env, va, vb);
2115             break;
2116         case 0x01:
2117             /* SUBF */
2118             gen_helper_subf(vc, cpu_env, va, vb);
2119             break;
2120         case 0x02:
2121             /* MULF */
2122             gen_helper_mulf(vc, cpu_env, va, vb);
2123             break;
2124         case 0x03:
2125             /* DIVF */
2126             gen_helper_divf(vc, cpu_env, va, vb);
2127             break;
2128         case 0x1E:
2129             /* CVTDG -- TODO */
2130             REQUIRE_REG_31(ra);
2131             goto invalid_opc;
2132         case 0x20:
2133             /* ADDG */
2134             gen_helper_addg(vc, cpu_env, va, vb);
2135             break;
2136         case 0x21:
2137             /* SUBG */
2138             gen_helper_subg(vc, cpu_env, va, vb);
2139             break;
2140         case 0x22:
2141             /* MULG */
2142             gen_helper_mulg(vc, cpu_env, va, vb);
2143             break;
2144         case 0x23:
2145             /* DIVG */
2146             gen_helper_divg(vc, cpu_env, va, vb);
2147             break;
2148         case 0x25:
2149             /* CMPGEQ */
2150             gen_helper_cmpgeq(vc, cpu_env, va, vb);
2151             break;
2152         case 0x26:
2153             /* CMPGLT */
2154             gen_helper_cmpglt(vc, cpu_env, va, vb);
2155             break;
2156         case 0x27:
2157             /* CMPGLE */
2158             gen_helper_cmpgle(vc, cpu_env, va, vb);
2159             break;
2160         case 0x2C:
2161             /* CVTGF */
2162             REQUIRE_REG_31(ra);
2163             gen_helper_cvtgf(vc, cpu_env, vb);
2164             break;
2165         case 0x2D:
2166             /* CVTGD -- TODO */
2167             REQUIRE_REG_31(ra);
2168             goto invalid_opc;
2169         case 0x2F:
2170             /* CVTGQ */
2171             REQUIRE_REG_31(ra);
2172             gen_helper_cvtgq(vc, cpu_env, vb);
2173             break;
2174         case 0x3C:
2175             /* CVTQF */
2176             REQUIRE_REG_31(ra);
2177             gen_helper_cvtqf(vc, cpu_env, vb);
2178             break;
2179         case 0x3E:
2180             /* CVTQG */
2181             REQUIRE_REG_31(ra);
2182             gen_helper_cvtqg(vc, cpu_env, vb);
2183             break;
2184         default:
2185             goto invalid_opc;
2186         }
2187         break;
2188 
2189     case 0x16:
2190         /* IEEE floating-point */
2191         switch (fpfn) { /* fn11 & 0x3F */
2192         case 0x00:
2193             /* ADDS */
2194             gen_adds(ctx, ra, rb, rc, fn11);
2195             break;
2196         case 0x01:
2197             /* SUBS */
2198             gen_subs(ctx, ra, rb, rc, fn11);
2199             break;
2200         case 0x02:
2201             /* MULS */
2202             gen_muls(ctx, ra, rb, rc, fn11);
2203             break;
2204         case 0x03:
2205             /* DIVS */
2206             gen_divs(ctx, ra, rb, rc, fn11);
2207             break;
2208         case 0x20:
2209             /* ADDT */
2210             gen_addt(ctx, ra, rb, rc, fn11);
2211             break;
2212         case 0x21:
2213             /* SUBT */
2214             gen_subt(ctx, ra, rb, rc, fn11);
2215             break;
2216         case 0x22:
2217             /* MULT */
2218             gen_mult(ctx, ra, rb, rc, fn11);
2219             break;
2220         case 0x23:
2221             /* DIVT */
2222             gen_divt(ctx, ra, rb, rc, fn11);
2223             break;
2224         case 0x24:
2225             /* CMPTUN */
2226             gen_cmptun(ctx, ra, rb, rc, fn11);
2227             break;
2228         case 0x25:
2229             /* CMPTEQ */
2230             gen_cmpteq(ctx, ra, rb, rc, fn11);
2231             break;
2232         case 0x26:
2233             /* CMPTLT */
2234             gen_cmptlt(ctx, ra, rb, rc, fn11);
2235             break;
2236         case 0x27:
2237             /* CMPTLE */
2238             gen_cmptle(ctx, ra, rb, rc, fn11);
2239             break;
2240         case 0x2C:
2241             REQUIRE_REG_31(ra);
2242             if (fn11 == 0x2AC || fn11 == 0x6AC) {
2243                 /* CVTST */
2244                 gen_cvtst(ctx, rb, rc, fn11);
2245             } else {
2246                 /* CVTTS */
2247                 gen_cvtts(ctx, rb, rc, fn11);
2248             }
2249             break;
2250         case 0x2F:
2251             /* CVTTQ */
2252             REQUIRE_REG_31(ra);
2253             gen_cvttq(ctx, rb, rc, fn11);
2254             break;
2255         case 0x3C:
2256             /* CVTQS */
2257             REQUIRE_REG_31(ra);
2258             gen_cvtqs(ctx, rb, rc, fn11);
2259             break;
2260         case 0x3E:
2261             /* CVTQT */
2262             REQUIRE_REG_31(ra);
2263             gen_cvtqt(ctx, rb, rc, fn11);
2264             break;
2265         default:
2266             goto invalid_opc;
2267         }
2268         break;
2269 
2270     case 0x17:
2271         switch (fn11) {
2272         case 0x010:
2273             /* CVTLQ */
2274             REQUIRE_REG_31(ra);
2275             vc = dest_fpr(ctx, rc);
2276             vb = load_fpr(ctx, rb);
2277             gen_cvtlq(vc, vb);
2278             break;
2279         case 0x020:
2280             /* CPYS */
2281             if (rc == 31) {
2282                 /* Special case CPYS as FNOP.  */
2283             } else {
2284                 vc = dest_fpr(ctx, rc);
2285                 va = load_fpr(ctx, ra);
2286                 if (ra == rb) {
2287                     /* Special case CPYS as FMOV.  */
2288                     tcg_gen_mov_i64(vc, va);
2289                 } else {
2290                     vb = load_fpr(ctx, rb);
2291                     gen_cpy_mask(vc, va, vb, 0, 0x8000000000000000ULL);
2292                 }
2293             }
2294             break;
2295         case 0x021:
2296             /* CPYSN */
2297             vc = dest_fpr(ctx, rc);
2298             vb = load_fpr(ctx, rb);
2299             va = load_fpr(ctx, ra);
2300             gen_cpy_mask(vc, va, vb, 1, 0x8000000000000000ULL);
2301             break;
2302         case 0x022:
2303             /* CPYSE */
2304             vc = dest_fpr(ctx, rc);
2305             vb = load_fpr(ctx, rb);
2306             va = load_fpr(ctx, ra);
2307             gen_cpy_mask(vc, va, vb, 0, 0xFFF0000000000000ULL);
2308             break;
2309         case 0x024:
2310             /* MT_FPCR */
2311             va = load_fpr(ctx, ra);
2312             gen_helper_store_fpcr(cpu_env, va);
2313             if (ctx->tb_rm == QUAL_RM_D) {
2314                 /* Re-do the copy of the rounding mode to fp_status
2315                    the next time we use dynamic rounding.  */
2316                 ctx->tb_rm = -1;
2317             }
2318             break;
2319         case 0x025:
2320             /* MF_FPCR */
2321             va = dest_fpr(ctx, ra);
2322             gen_helper_load_fpcr(va, cpu_env);
2323             break;
2324         case 0x02A:
2325             /* FCMOVEQ */
2326             gen_fcmov(ctx, TCG_COND_EQ, ra, rb, rc);
2327             break;
2328         case 0x02B:
2329             /* FCMOVNE */
2330             gen_fcmov(ctx, TCG_COND_NE, ra, rb, rc);
2331             break;
2332         case 0x02C:
2333             /* FCMOVLT */
2334             gen_fcmov(ctx, TCG_COND_LT, ra, rb, rc);
2335             break;
2336         case 0x02D:
2337             /* FCMOVGE */
2338             gen_fcmov(ctx, TCG_COND_GE, ra, rb, rc);
2339             break;
2340         case 0x02E:
2341             /* FCMOVLE */
2342             gen_fcmov(ctx, TCG_COND_LE, ra, rb, rc);
2343             break;
2344         case 0x02F:
2345             /* FCMOVGT */
2346             gen_fcmov(ctx, TCG_COND_GT, ra, rb, rc);
2347             break;
2348         case 0x030: /* CVTQL */
2349         case 0x130: /* CVTQL/V */
2350         case 0x530: /* CVTQL/SV */
2351             REQUIRE_REG_31(ra);
2352             vc = dest_fpr(ctx, rc);
2353             vb = load_fpr(ctx, rb);
2354             gen_helper_cvtql(vc, cpu_env, vb);
2355             gen_fp_exc_raise(rc, fn11);
2356             break;
2357         default:
2358             goto invalid_opc;
2359         }
2360         break;
2361 
2362     case 0x18:
2363         switch ((uint16_t)disp16) {
2364         case 0x0000:
2365             /* TRAPB */
2366             /* No-op.  */
2367             break;
2368         case 0x0400:
2369             /* EXCB */
2370             /* No-op.  */
2371             break;
2372         case 0x4000:
2373             /* MB */
2374             tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2375             break;
2376         case 0x4400:
2377             /* WMB */
2378             tcg_gen_mb(TCG_MO_ST_ST | TCG_BAR_SC);
2379             break;
2380         case 0x8000:
2381             /* FETCH */
2382             /* No-op */
2383             break;
2384         case 0xA000:
2385             /* FETCH_M */
2386             /* No-op */
2387             break;
2388         case 0xC000:
2389             /* RPCC */
2390             va = dest_gpr(ctx, ra);
2391             if (ctx->tb->cflags & CF_USE_ICOUNT) {
2392                 gen_io_start();
2393                 gen_helper_load_pcc(va, cpu_env);
2394                 gen_io_end();
2395                 ret = EXIT_PC_STALE;
2396             } else {
2397                 gen_helper_load_pcc(va, cpu_env);
2398             }
2399             break;
2400         case 0xE000:
2401             /* RC */
2402             gen_rx(ctx, ra, 0);
2403             break;
2404         case 0xE800:
2405             /* ECB */
2406             break;
2407         case 0xF000:
2408             /* RS */
2409             gen_rx(ctx, ra, 1);
2410             break;
2411         case 0xF800:
2412             /* WH64 */
2413             /* No-op */
2414             break;
2415         case 0xFC00:
2416             /* WH64EN */
2417             /* No-op */
2418             break;
2419         default:
2420             goto invalid_opc;
2421         }
2422         break;
2423 
2424     case 0x19:
2425         /* HW_MFPR (PALcode) */
2426 #ifndef CONFIG_USER_ONLY
2427         REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2428         va = dest_gpr(ctx, ra);
2429         ret = gen_mfpr(ctx, va, insn & 0xffff);
2430         break;
2431 #else
2432         goto invalid_opc;
2433 #endif
2434 
2435     case 0x1A:
2436         /* JMP, JSR, RET, JSR_COROUTINE.  These only differ by the branch
2437            prediction stack action, which of course we don't implement.  */
2438         vb = load_gpr(ctx, rb);
2439         tcg_gen_andi_i64(cpu_pc, vb, ~3);
2440         if (ra != 31) {
2441             tcg_gen_movi_i64(ctx->ir[ra], ctx->pc);
2442         }
2443         ret = EXIT_PC_UPDATED;
2444         break;
2445 
2446     case 0x1B:
2447         /* HW_LD (PALcode) */
2448 #ifndef CONFIG_USER_ONLY
2449         REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2450         {
2451             TCGv addr = tcg_temp_new();
2452             vb = load_gpr(ctx, rb);
2453             va = dest_gpr(ctx, ra);
2454 
2455             tcg_gen_addi_i64(addr, vb, disp12);
2456             switch ((insn >> 12) & 0xF) {
2457             case 0x0:
2458                 /* Longword physical access (hw_ldl/p) */
2459                 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
2460                 break;
2461             case 0x1:
2462                 /* Quadword physical access (hw_ldq/p) */
2463                 tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEQ);
2464                 break;
2465             case 0x2:
2466                 /* Longword physical access with lock (hw_ldl_l/p) */
2467                 gen_qemu_ldl_l(va, addr, MMU_PHYS_IDX);
2468                 break;
2469             case 0x3:
2470                 /* Quadword physical access with lock (hw_ldq_l/p) */
2471                 gen_qemu_ldq_l(va, addr, MMU_PHYS_IDX);
2472                 break;
2473             case 0x4:
2474                 /* Longword virtual PTE fetch (hw_ldl/v) */
2475                 goto invalid_opc;
2476             case 0x5:
2477                 /* Quadword virtual PTE fetch (hw_ldq/v) */
2478                 goto invalid_opc;
2479                 break;
2480             case 0x6:
2481                 /* Invalid */
2482                 goto invalid_opc;
2483             case 0x7:
2484                 /* Invaliid */
2485                 goto invalid_opc;
2486             case 0x8:
2487                 /* Longword virtual access (hw_ldl) */
2488                 goto invalid_opc;
2489             case 0x9:
2490                 /* Quadword virtual access (hw_ldq) */
2491                 goto invalid_opc;
2492             case 0xA:
2493                 /* Longword virtual access with protection check (hw_ldl/w) */
2494                 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
2495                 break;
2496             case 0xB:
2497                 /* Quadword virtual access with protection check (hw_ldq/w) */
2498                 tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEQ);
2499                 break;
2500             case 0xC:
2501                 /* Longword virtual access with alt access mode (hw_ldl/a)*/
2502                 goto invalid_opc;
2503             case 0xD:
2504                 /* Quadword virtual access with alt access mode (hw_ldq/a) */
2505                 goto invalid_opc;
2506             case 0xE:
2507                 /* Longword virtual access with alternate access mode and
2508                    protection checks (hw_ldl/wa) */
2509                 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
2510                 break;
2511             case 0xF:
2512                 /* Quadword virtual access with alternate access mode and
2513                    protection checks (hw_ldq/wa) */
2514                 tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEQ);
2515                 break;
2516             }
2517             tcg_temp_free(addr);
2518             break;
2519         }
2520 #else
2521         goto invalid_opc;
2522 #endif
2523 
2524     case 0x1C:
2525         vc = dest_gpr(ctx, rc);
2526         if (fn7 == 0x70) {
2527             /* FTOIT */
2528             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2529             REQUIRE_REG_31(rb);
2530             va = load_fpr(ctx, ra);
2531             tcg_gen_mov_i64(vc, va);
2532             break;
2533         } else if (fn7 == 0x78) {
2534             /* FTOIS */
2535             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
2536             REQUIRE_REG_31(rb);
2537             t32 = tcg_temp_new_i32();
2538             va = load_fpr(ctx, ra);
2539             gen_helper_s_to_memory(t32, va);
2540             tcg_gen_ext_i32_i64(vc, t32);
2541             tcg_temp_free_i32(t32);
2542             break;
2543         }
2544 
2545         vb = load_gpr_lit(ctx, rb, lit, islit);
2546         switch (fn7) {
2547         case 0x00:
2548             /* SEXTB */
2549             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
2550             REQUIRE_REG_31(ra);
2551             tcg_gen_ext8s_i64(vc, vb);
2552             break;
2553         case 0x01:
2554             /* SEXTW */
2555             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
2556             REQUIRE_REG_31(ra);
2557             tcg_gen_ext16s_i64(vc, vb);
2558             break;
2559         case 0x30:
2560             /* CTPOP */
2561             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2562             REQUIRE_REG_31(ra);
2563             REQUIRE_NO_LIT;
2564             tcg_gen_ctpop_i64(vc, vb);
2565             break;
2566         case 0x31:
2567             /* PERR */
2568             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2569             REQUIRE_NO_LIT;
2570             va = load_gpr(ctx, ra);
2571             gen_helper_perr(vc, va, vb);
2572             break;
2573         case 0x32:
2574             /* CTLZ */
2575             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2576             REQUIRE_REG_31(ra);
2577             REQUIRE_NO_LIT;
2578             tcg_gen_clzi_i64(vc, vb, 64);
2579             break;
2580         case 0x33:
2581             /* CTTZ */
2582             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
2583             REQUIRE_REG_31(ra);
2584             REQUIRE_NO_LIT;
2585             tcg_gen_ctzi_i64(vc, vb, 64);
2586             break;
2587         case 0x34:
2588             /* UNPKBW */
2589             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2590             REQUIRE_REG_31(ra);
2591             REQUIRE_NO_LIT;
2592             gen_helper_unpkbw(vc, vb);
2593             break;
2594         case 0x35:
2595             /* UNPKBL */
2596             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2597             REQUIRE_REG_31(ra);
2598             REQUIRE_NO_LIT;
2599             gen_helper_unpkbl(vc, vb);
2600             break;
2601         case 0x36:
2602             /* PKWB */
2603             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2604             REQUIRE_REG_31(ra);
2605             REQUIRE_NO_LIT;
2606             gen_helper_pkwb(vc, vb);
2607             break;
2608         case 0x37:
2609             /* PKLB */
2610             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2611             REQUIRE_REG_31(ra);
2612             REQUIRE_NO_LIT;
2613             gen_helper_pklb(vc, vb);
2614             break;
2615         case 0x38:
2616             /* MINSB8 */
2617             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2618             va = load_gpr(ctx, ra);
2619             gen_helper_minsb8(vc, va, vb);
2620             break;
2621         case 0x39:
2622             /* MINSW4 */
2623             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2624             va = load_gpr(ctx, ra);
2625             gen_helper_minsw4(vc, va, vb);
2626             break;
2627         case 0x3A:
2628             /* MINUB8 */
2629             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2630             va = load_gpr(ctx, ra);
2631             gen_helper_minub8(vc, va, vb);
2632             break;
2633         case 0x3B:
2634             /* MINUW4 */
2635             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2636             va = load_gpr(ctx, ra);
2637             gen_helper_minuw4(vc, va, vb);
2638             break;
2639         case 0x3C:
2640             /* MAXUB8 */
2641             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2642             va = load_gpr(ctx, ra);
2643             gen_helper_maxub8(vc, va, vb);
2644             break;
2645         case 0x3D:
2646             /* MAXUW4 */
2647             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2648             va = load_gpr(ctx, ra);
2649             gen_helper_maxuw4(vc, va, vb);
2650             break;
2651         case 0x3E:
2652             /* MAXSB8 */
2653             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2654             va = load_gpr(ctx, ra);
2655             gen_helper_maxsb8(vc, va, vb);
2656             break;
2657         case 0x3F:
2658             /* MAXSW4 */
2659             REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
2660             va = load_gpr(ctx, ra);
2661             gen_helper_maxsw4(vc, va, vb);
2662             break;
2663         default:
2664             goto invalid_opc;
2665         }
2666         break;
2667 
2668     case 0x1D:
2669         /* HW_MTPR (PALcode) */
2670 #ifndef CONFIG_USER_ONLY
2671         REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2672         vb = load_gpr(ctx, rb);
2673         ret = gen_mtpr(ctx, vb, insn & 0xffff);
2674         break;
2675 #else
2676         goto invalid_opc;
2677 #endif
2678 
2679     case 0x1E:
2680         /* HW_RET (PALcode) */
2681 #ifndef CONFIG_USER_ONLY
2682         REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2683         if (rb == 31) {
2684             /* Pre-EV6 CPUs interpreted this as HW_REI, loading the return
2685                address from EXC_ADDR.  This turns out to be useful for our
2686                emulation PALcode, so continue to accept it.  */
2687             ctx->lit = vb = tcg_temp_new();
2688             tcg_gen_ld_i64(vb, cpu_env, offsetof(CPUAlphaState, exc_addr));
2689         } else {
2690             vb = load_gpr(ctx, rb);
2691         }
2692         tmp = tcg_temp_new();
2693         tcg_gen_movi_i64(tmp, 0);
2694         tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, intr_flag));
2695         tcg_gen_movi_i64(cpu_lock_addr, -1);
2696         tcg_gen_andi_i64(tmp, vb, 1);
2697         tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, pal_mode));
2698         tcg_gen_andi_i64(cpu_pc, vb, ~3);
2699         /* Allow interrupts to be recognized right away.  */
2700         ret = EXIT_PC_UPDATED_NOCHAIN;
2701         break;
2702 #else
2703         goto invalid_opc;
2704 #endif
2705 
2706     case 0x1F:
2707         /* HW_ST (PALcode) */
2708 #ifndef CONFIG_USER_ONLY
2709         REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
2710         {
2711             switch ((insn >> 12) & 0xF) {
2712             case 0x0:
2713                 /* Longword physical access */
2714                 va = load_gpr(ctx, ra);
2715                 vb = load_gpr(ctx, rb);
2716                 tmp = tcg_temp_new();
2717                 tcg_gen_addi_i64(tmp, vb, disp12);
2718                 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL);
2719                 tcg_temp_free(tmp);
2720                 break;
2721             case 0x1:
2722                 /* Quadword physical access */
2723                 va = load_gpr(ctx, ra);
2724                 vb = load_gpr(ctx, rb);
2725                 tmp = tcg_temp_new();
2726                 tcg_gen_addi_i64(tmp, vb, disp12);
2727                 tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEQ);
2728                 tcg_temp_free(tmp);
2729                 break;
2730             case 0x2:
2731                 /* Longword physical access with lock */
2732                 ret = gen_store_conditional(ctx, ra, rb, disp12,
2733                                             MMU_PHYS_IDX, MO_LESL);
2734                 break;
2735             case 0x3:
2736                 /* Quadword physical access with lock */
2737                 ret = gen_store_conditional(ctx, ra, rb, disp12,
2738                                             MMU_PHYS_IDX, MO_LEQ);
2739                 break;
2740             case 0x4:
2741                 /* Longword virtual access */
2742                 goto invalid_opc;
2743             case 0x5:
2744                 /* Quadword virtual access */
2745                 goto invalid_opc;
2746             case 0x6:
2747                 /* Invalid */
2748                 goto invalid_opc;
2749             case 0x7:
2750                 /* Invalid */
2751                 goto invalid_opc;
2752             case 0x8:
2753                 /* Invalid */
2754                 goto invalid_opc;
2755             case 0x9:
2756                 /* Invalid */
2757                 goto invalid_opc;
2758             case 0xA:
2759                 /* Invalid */
2760                 goto invalid_opc;
2761             case 0xB:
2762                 /* Invalid */
2763                 goto invalid_opc;
2764             case 0xC:
2765                 /* Longword virtual access with alternate access mode */
2766                 goto invalid_opc;
2767             case 0xD:
2768                 /* Quadword virtual access with alternate access mode */
2769                 goto invalid_opc;
2770             case 0xE:
2771                 /* Invalid */
2772                 goto invalid_opc;
2773             case 0xF:
2774                 /* Invalid */
2775                 goto invalid_opc;
2776             }
2777             break;
2778         }
2779 #else
2780         goto invalid_opc;
2781 #endif
2782     case 0x20:
2783         /* LDF */
2784         gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
2785         break;
2786     case 0x21:
2787         /* LDG */
2788         gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
2789         break;
2790     case 0x22:
2791         /* LDS */
2792         gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
2793         break;
2794     case 0x23:
2795         /* LDT */
2796         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
2797         break;
2798     case 0x24:
2799         /* STF */
2800         gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
2801         break;
2802     case 0x25:
2803         /* STG */
2804         gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
2805         break;
2806     case 0x26:
2807         /* STS */
2808         gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
2809         break;
2810     case 0x27:
2811         /* STT */
2812         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
2813         break;
2814     case 0x28:
2815         /* LDL */
2816         gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
2817         break;
2818     case 0x29:
2819         /* LDQ */
2820         gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
2821         break;
2822     case 0x2A:
2823         /* LDL_L */
2824         gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
2825         break;
2826     case 0x2B:
2827         /* LDQ_L */
2828         gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
2829         break;
2830     case 0x2C:
2831         /* STL */
2832         gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
2833         break;
2834     case 0x2D:
2835         /* STQ */
2836         gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
2837         break;
2838     case 0x2E:
2839         /* STL_C */
2840         ret = gen_store_conditional(ctx, ra, rb, disp16,
2841                                     ctx->mem_idx, MO_LESL);
2842         break;
2843     case 0x2F:
2844         /* STQ_C */
2845         ret = gen_store_conditional(ctx, ra, rb, disp16,
2846                                     ctx->mem_idx, MO_LEQ);
2847         break;
2848     case 0x30:
2849         /* BR */
2850         ret = gen_bdirect(ctx, ra, disp21);
2851         break;
2852     case 0x31: /* FBEQ */
2853         ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
2854         break;
2855     case 0x32: /* FBLT */
2856         ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
2857         break;
2858     case 0x33: /* FBLE */
2859         ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
2860         break;
2861     case 0x34:
2862         /* BSR */
2863         ret = gen_bdirect(ctx, ra, disp21);
2864         break;
2865     case 0x35: /* FBNE */
2866         ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
2867         break;
2868     case 0x36: /* FBGE */
2869         ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
2870         break;
2871     case 0x37: /* FBGT */
2872         ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
2873         break;
2874     case 0x38:
2875         /* BLBC */
2876         ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
2877         break;
2878     case 0x39:
2879         /* BEQ */
2880         ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
2881         break;
2882     case 0x3A:
2883         /* BLT */
2884         ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
2885         break;
2886     case 0x3B:
2887         /* BLE */
2888         ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
2889         break;
2890     case 0x3C:
2891         /* BLBS */
2892         ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
2893         break;
2894     case 0x3D:
2895         /* BNE */
2896         ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
2897         break;
2898     case 0x3E:
2899         /* BGE */
2900         ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
2901         break;
2902     case 0x3F:
2903         /* BGT */
2904         ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
2905         break;
2906     invalid_opc:
2907         ret = gen_invalid(ctx);
2908         break;
2909     }
2910 
2911     return ret;
2912 }
2913 
2914 void gen_intermediate_code(CPUAlphaState *env, struct TranslationBlock *tb)
2915 {
2916     AlphaCPU *cpu = alpha_env_get_cpu(env);
2917     CPUState *cs = CPU(cpu);
2918     DisasContext ctx, *ctxp = &ctx;
2919     target_ulong pc_start;
2920     target_ulong pc_mask;
2921     uint32_t insn;
2922     ExitStatus ret;
2923     int num_insns;
2924     int max_insns;
2925 
2926     pc_start = tb->pc;
2927 
2928     ctx.tb = tb;
2929     ctx.pc = pc_start;
2930     ctx.mem_idx = cpu_mmu_index(env, false);
2931     ctx.implver = env->implver;
2932     ctx.singlestep_enabled = cs->singlestep_enabled;
2933 
2934 #ifdef CONFIG_USER_ONLY
2935     ctx.ir = cpu_std_ir;
2936 #else
2937     ctx.palbr = env->palbr;
2938     ctx.ir = (tb->flags & TB_FLAGS_PAL_MODE ? cpu_pal_ir : cpu_std_ir);
2939 #endif
2940 
2941     /* ??? Every TB begins with unset rounding mode, to be initialized on
2942        the first fp insn of the TB.  Alternately we could define a proper
2943        default for every TB (e.g. QUAL_RM_N or QUAL_RM_D) and make sure
2944        to reset the FP_STATUS to that default at the end of any TB that
2945        changes the default.  We could even (gasp) dynamiclly figure out
2946        what default would be most efficient given the running program.  */
2947     ctx.tb_rm = -1;
2948     /* Similarly for flush-to-zero.  */
2949     ctx.tb_ftz = -1;
2950 
2951     TCGV_UNUSED_I64(ctx.zero);
2952     TCGV_UNUSED_I64(ctx.sink);
2953     TCGV_UNUSED_I64(ctx.lit);
2954 
2955     num_insns = 0;
2956     max_insns = tb->cflags & CF_COUNT_MASK;
2957     if (max_insns == 0) {
2958         max_insns = CF_COUNT_MASK;
2959     }
2960     if (max_insns > TCG_MAX_INSNS) {
2961         max_insns = TCG_MAX_INSNS;
2962     }
2963 
2964     if (in_superpage(&ctx, pc_start)) {
2965         pc_mask = (1ULL << 41) - 1;
2966     } else {
2967         pc_mask = ~TARGET_PAGE_MASK;
2968     }
2969 
2970     gen_tb_start(tb);
2971     do {
2972         tcg_gen_insn_start(ctx.pc);
2973         num_insns++;
2974 
2975         if (unlikely(cpu_breakpoint_test(cs, ctx.pc, BP_ANY))) {
2976             ret = gen_excp(&ctx, EXCP_DEBUG, 0);
2977             /* The address covered by the breakpoint must be included in
2978                [tb->pc, tb->pc + tb->size) in order to for it to be
2979                properly cleared -- thus we increment the PC here so that
2980                the logic setting tb->size below does the right thing.  */
2981             ctx.pc += 4;
2982             break;
2983         }
2984         if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
2985             gen_io_start();
2986         }
2987         insn = cpu_ldl_code(env, ctx.pc);
2988 
2989         ctx.pc += 4;
2990         ret = translate_one(ctxp, insn);
2991         free_context_temps(ctxp);
2992 
2993         /* If we reach a page boundary, are single stepping,
2994            or exhaust instruction count, stop generation.  */
2995         if (ret == NO_EXIT
2996             && ((ctx.pc & pc_mask) == 0
2997                 || tcg_op_buf_full()
2998                 || num_insns >= max_insns
2999                 || singlestep
3000                 || ctx.singlestep_enabled)) {
3001             ret = EXIT_FALLTHRU;
3002         }
3003     } while (ret == NO_EXIT);
3004 
3005     if (tb->cflags & CF_LAST_IO) {
3006         gen_io_end();
3007     }
3008 
3009     switch (ret) {
3010     case EXIT_GOTO_TB:
3011     case EXIT_NORETURN:
3012         break;
3013     case EXIT_FALLTHRU:
3014         if (use_goto_tb(&ctx, ctx.pc)) {
3015             tcg_gen_goto_tb(0);
3016             tcg_gen_movi_i64(cpu_pc, ctx.pc);
3017             tcg_gen_exit_tb((uintptr_t)ctx.tb);
3018         }
3019         /* FALLTHRU */
3020     case EXIT_PC_STALE:
3021         tcg_gen_movi_i64(cpu_pc, ctx.pc);
3022         /* FALLTHRU */
3023     case EXIT_PC_UPDATED:
3024         if (!use_exit_tb(&ctx)) {
3025             tcg_gen_lookup_and_goto_ptr(cpu_pc);
3026             break;
3027         }
3028         /* FALLTHRU */
3029     case EXIT_PC_UPDATED_NOCHAIN:
3030         if (ctx.singlestep_enabled) {
3031             gen_excp_1(EXCP_DEBUG, 0);
3032         } else {
3033             tcg_gen_exit_tb(0);
3034         }
3035         break;
3036     default:
3037         g_assert_not_reached();
3038     }
3039 
3040     gen_tb_end(tb, num_insns);
3041 
3042     tb->size = ctx.pc - pc_start;
3043     tb->icount = num_insns;
3044 
3045 #ifdef DEBUG_DISAS
3046     if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)
3047         && qemu_log_in_addr_range(pc_start)) {
3048         qemu_log_lock();
3049         qemu_log("IN: %s\n", lookup_symbol(pc_start));
3050         log_target_disas(cs, pc_start, ctx.pc - pc_start, 1);
3051         qemu_log("\n");
3052         qemu_log_unlock();
3053     }
3054 #endif
3055 }
3056 
3057 void restore_state_to_opc(CPUAlphaState *env, TranslationBlock *tb,
3058                           target_ulong *data)
3059 {
3060     env->pc = data[0];
3061 }
3062