xref: /openbmc/qemu/target/ppc/translate.c (revision 7c717367)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31 
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34 
35 #include "exec/translator.h"
36 #include "exec/log.h"
37 #include "qemu/atomic128.h"
38 #include "spr_common.h"
39 #include "power8-pmu.h"
40 
41 #include "qemu/qemu-print.h"
42 #include "qapi/error.h"
43 
44 #define CPU_SINGLE_STEP 0x1
45 #define CPU_BRANCH_STEP 0x2
46 
47 /* Include definitions for instructions classes and implementations flags */
48 /* #define PPC_DEBUG_DISAS */
49 
50 #ifdef PPC_DEBUG_DISAS
51 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
52 #else
53 #  define LOG_DISAS(...) do { } while (0)
54 #endif
55 /*****************************************************************************/
56 /* Code translation helpers                                                  */
57 
58 /* global register indexes */
59 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
60                           + 10 * 4 + 22 * 5 /* SPE GPRh */
61                           + 8 * 5           /* CRF */];
62 static TCGv cpu_gpr[32];
63 static TCGv cpu_gprh[32];
64 static TCGv_i32 cpu_crf[8];
65 static TCGv cpu_nip;
66 static TCGv cpu_msr;
67 static TCGv cpu_ctr;
68 static TCGv cpu_lr;
69 #if defined(TARGET_PPC64)
70 static TCGv cpu_cfar;
71 #endif
72 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
73 static TCGv cpu_reserve;
74 static TCGv cpu_reserve_val;
75 static TCGv cpu_reserve_val2;
76 static TCGv cpu_fpscr;
77 static TCGv_i32 cpu_access_type;
78 
79 #include "exec/gen-icount.h"
80 
81 void ppc_translate_init(void)
82 {
83     int i;
84     char *p;
85     size_t cpu_reg_names_size;
86 
87     p = cpu_reg_names;
88     cpu_reg_names_size = sizeof(cpu_reg_names);
89 
90     for (i = 0; i < 8; i++) {
91         snprintf(p, cpu_reg_names_size, "crf%d", i);
92         cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
93                                             offsetof(CPUPPCState, crf[i]), p);
94         p += 5;
95         cpu_reg_names_size -= 5;
96     }
97 
98     for (i = 0; i < 32; i++) {
99         snprintf(p, cpu_reg_names_size, "r%d", i);
100         cpu_gpr[i] = tcg_global_mem_new(cpu_env,
101                                         offsetof(CPUPPCState, gpr[i]), p);
102         p += (i < 10) ? 3 : 4;
103         cpu_reg_names_size -= (i < 10) ? 3 : 4;
104         snprintf(p, cpu_reg_names_size, "r%dH", i);
105         cpu_gprh[i] = tcg_global_mem_new(cpu_env,
106                                          offsetof(CPUPPCState, gprh[i]), p);
107         p += (i < 10) ? 4 : 5;
108         cpu_reg_names_size -= (i < 10) ? 4 : 5;
109     }
110 
111     cpu_nip = tcg_global_mem_new(cpu_env,
112                                  offsetof(CPUPPCState, nip), "nip");
113 
114     cpu_msr = tcg_global_mem_new(cpu_env,
115                                  offsetof(CPUPPCState, msr), "msr");
116 
117     cpu_ctr = tcg_global_mem_new(cpu_env,
118                                  offsetof(CPUPPCState, ctr), "ctr");
119 
120     cpu_lr = tcg_global_mem_new(cpu_env,
121                                 offsetof(CPUPPCState, lr), "lr");
122 
123 #if defined(TARGET_PPC64)
124     cpu_cfar = tcg_global_mem_new(cpu_env,
125                                   offsetof(CPUPPCState, cfar), "cfar");
126 #endif
127 
128     cpu_xer = tcg_global_mem_new(cpu_env,
129                                  offsetof(CPUPPCState, xer), "xer");
130     cpu_so = tcg_global_mem_new(cpu_env,
131                                 offsetof(CPUPPCState, so), "SO");
132     cpu_ov = tcg_global_mem_new(cpu_env,
133                                 offsetof(CPUPPCState, ov), "OV");
134     cpu_ca = tcg_global_mem_new(cpu_env,
135                                 offsetof(CPUPPCState, ca), "CA");
136     cpu_ov32 = tcg_global_mem_new(cpu_env,
137                                   offsetof(CPUPPCState, ov32), "OV32");
138     cpu_ca32 = tcg_global_mem_new(cpu_env,
139                                   offsetof(CPUPPCState, ca32), "CA32");
140 
141     cpu_reserve = tcg_global_mem_new(cpu_env,
142                                      offsetof(CPUPPCState, reserve_addr),
143                                      "reserve_addr");
144     cpu_reserve_val = tcg_global_mem_new(cpu_env,
145                                          offsetof(CPUPPCState, reserve_val),
146                                          "reserve_val");
147     cpu_reserve_val2 = tcg_global_mem_new(cpu_env,
148                                           offsetof(CPUPPCState, reserve_val2),
149                                           "reserve_val2");
150 
151     cpu_fpscr = tcg_global_mem_new(cpu_env,
152                                    offsetof(CPUPPCState, fpscr), "fpscr");
153 
154     cpu_access_type = tcg_global_mem_new_i32(cpu_env,
155                                              offsetof(CPUPPCState, access_type),
156                                              "access_type");
157 }
158 
159 /* internal defines */
160 struct DisasContext {
161     DisasContextBase base;
162     target_ulong cia;  /* current instruction address */
163     uint32_t opcode;
164     /* Routine used to access memory */
165     bool pr, hv, dr, le_mode;
166     bool lazy_tlb_flush;
167     bool need_access_type;
168     int mem_idx;
169     int access_type;
170     /* Translation flags */
171     MemOp default_tcg_memop_mask;
172 #if defined(TARGET_PPC64)
173     bool sf_mode;
174     bool has_cfar;
175 #endif
176     bool fpu_enabled;
177     bool altivec_enabled;
178     bool vsx_enabled;
179     bool spe_enabled;
180     bool tm_enabled;
181     bool gtse;
182     bool hr;
183     bool mmcr0_pmcc0;
184     bool mmcr0_pmcc1;
185     bool mmcr0_pmcjce;
186     bool pmc_other;
187     bool pmu_insn_cnt;
188     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
189     int singlestep_enabled;
190     uint32_t flags;
191     uint64_t insns_flags;
192     uint64_t insns_flags2;
193 };
194 
195 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
196 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
197 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
198 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
199 
200 /* Return true iff byteswap is needed in a scalar memop */
201 static inline bool need_byteswap(const DisasContext *ctx)
202 {
203 #if TARGET_BIG_ENDIAN
204      return ctx->le_mode;
205 #else
206      return !ctx->le_mode;
207 #endif
208 }
209 
210 /* True when active word size < size of target_long.  */
211 #ifdef TARGET_PPC64
212 # define NARROW_MODE(C)  (!(C)->sf_mode)
213 #else
214 # define NARROW_MODE(C)  0
215 #endif
216 
217 struct opc_handler_t {
218     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
219     uint32_t inval1;
220     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
221     uint32_t inval2;
222     /* instruction type */
223     uint64_t type;
224     /* extended instruction type */
225     uint64_t type2;
226     /* handler */
227     void (*handler)(DisasContext *ctx);
228 };
229 
230 /* SPR load/store helpers */
231 static inline void gen_load_spr(TCGv t, int reg)
232 {
233     tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
234 }
235 
236 static inline void gen_store_spr(int reg, TCGv t)
237 {
238     tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
239 }
240 
241 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
242 {
243     if (ctx->need_access_type && ctx->access_type != access_type) {
244         tcg_gen_movi_i32(cpu_access_type, access_type);
245         ctx->access_type = access_type;
246     }
247 }
248 
249 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
250 {
251     if (NARROW_MODE(ctx)) {
252         nip = (uint32_t)nip;
253     }
254     tcg_gen_movi_tl(cpu_nip, nip);
255 }
256 
257 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
258 {
259     TCGv_i32 t0, t1;
260 
261     /*
262      * These are all synchronous exceptions, we set the PC back to the
263      * faulting instruction
264      */
265     gen_update_nip(ctx, ctx->cia);
266     t0 = tcg_const_i32(excp);
267     t1 = tcg_const_i32(error);
268     gen_helper_raise_exception_err(cpu_env, t0, t1);
269     tcg_temp_free_i32(t0);
270     tcg_temp_free_i32(t1);
271     ctx->base.is_jmp = DISAS_NORETURN;
272 }
273 
274 static void gen_exception(DisasContext *ctx, uint32_t excp)
275 {
276     TCGv_i32 t0;
277 
278     /*
279      * These are all synchronous exceptions, we set the PC back to the
280      * faulting instruction
281      */
282     gen_update_nip(ctx, ctx->cia);
283     t0 = tcg_const_i32(excp);
284     gen_helper_raise_exception(cpu_env, t0);
285     tcg_temp_free_i32(t0);
286     ctx->base.is_jmp = DISAS_NORETURN;
287 }
288 
289 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
290                               target_ulong nip)
291 {
292     TCGv_i32 t0;
293 
294     gen_update_nip(ctx, nip);
295     t0 = tcg_const_i32(excp);
296     gen_helper_raise_exception(cpu_env, t0);
297     tcg_temp_free_i32(t0);
298     ctx->base.is_jmp = DISAS_NORETURN;
299 }
300 
301 static void gen_icount_io_start(DisasContext *ctx)
302 {
303     if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
304         gen_io_start();
305         /*
306          * An I/O instruction must be last in the TB.
307          * Chain to the next TB, and let the code from gen_tb_start
308          * decide if we need to return to the main loop.
309          * Doing this first also allows this value to be overridden.
310          */
311         ctx->base.is_jmp = DISAS_TOO_MANY;
312     }
313 }
314 
315 #if !defined(CONFIG_USER_ONLY)
316 static void gen_ppc_maybe_interrupt(DisasContext *ctx)
317 {
318     gen_icount_io_start(ctx);
319     gen_helper_ppc_maybe_interrupt(cpu_env);
320 }
321 #endif
322 
323 /*
324  * Tells the caller what is the appropriate exception to generate and prepares
325  * SPR registers for this exception.
326  *
327  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
328  * POWERPC_EXCP_DEBUG (on BookE).
329  */
330 static uint32_t gen_prep_dbgex(DisasContext *ctx)
331 {
332     if (ctx->flags & POWERPC_FLAG_DE) {
333         target_ulong dbsr = 0;
334         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
335             dbsr = DBCR0_ICMP;
336         } else {
337             /* Must have been branch */
338             dbsr = DBCR0_BRT;
339         }
340         TCGv t0 = tcg_temp_new();
341         gen_load_spr(t0, SPR_BOOKE_DBSR);
342         tcg_gen_ori_tl(t0, t0, dbsr);
343         gen_store_spr(SPR_BOOKE_DBSR, t0);
344         tcg_temp_free(t0);
345         return POWERPC_EXCP_DEBUG;
346     } else {
347         return POWERPC_EXCP_TRACE;
348     }
349 }
350 
351 static void gen_debug_exception(DisasContext *ctx)
352 {
353     gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx)));
354     ctx->base.is_jmp = DISAS_NORETURN;
355 }
356 
357 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
358 {
359     /* Will be converted to program check if needed */
360     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
361 }
362 
363 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
364 {
365     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
366 }
367 
368 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
369 {
370     /* Will be converted to program check if needed */
371     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
372 }
373 
374 /*****************************************************************************/
375 /* SPR READ/WRITE CALLBACKS */
376 
377 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
378 {
379 #if 0
380     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
381     printf("ERROR: try to access SPR %d !\n", sprn);
382 #endif
383 }
384 
385 /* #define PPC_DUMP_SPR_ACCESSES */
386 
387 /*
388  * Generic callbacks:
389  * do nothing but store/retrieve spr value
390  */
391 static void spr_load_dump_spr(int sprn)
392 {
393 #ifdef PPC_DUMP_SPR_ACCESSES
394     TCGv_i32 t0 = tcg_const_i32(sprn);
395     gen_helper_load_dump_spr(cpu_env, t0);
396     tcg_temp_free_i32(t0);
397 #endif
398 }
399 
400 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
401 {
402     gen_load_spr(cpu_gpr[gprn], sprn);
403     spr_load_dump_spr(sprn);
404 }
405 
406 static void spr_store_dump_spr(int sprn)
407 {
408 #ifdef PPC_DUMP_SPR_ACCESSES
409     TCGv_i32 t0 = tcg_const_i32(sprn);
410     gen_helper_store_dump_spr(cpu_env, t0);
411     tcg_temp_free_i32(t0);
412 #endif
413 }
414 
415 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
416 {
417     gen_store_spr(sprn, cpu_gpr[gprn]);
418     spr_store_dump_spr(sprn);
419 }
420 
421 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
422 {
423     spr_write_generic(ctx, sprn, gprn);
424 
425     /*
426      * SPR_CTRL writes must force a new translation block,
427      * allowing the PMU to calculate the run latch events with
428      * more accuracy.
429      */
430     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
431 }
432 
433 #if !defined(CONFIG_USER_ONLY)
434 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
435 {
436 #ifdef TARGET_PPC64
437     TCGv t0 = tcg_temp_new();
438     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
439     gen_store_spr(sprn, t0);
440     tcg_temp_free(t0);
441     spr_store_dump_spr(sprn);
442 #else
443     spr_write_generic(ctx, sprn, gprn);
444 #endif
445 }
446 
447 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
448 {
449     TCGv t0 = tcg_temp_new();
450     TCGv t1 = tcg_temp_new();
451     gen_load_spr(t0, sprn);
452     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
453     tcg_gen_and_tl(t0, t0, t1);
454     gen_store_spr(sprn, t0);
455     tcg_temp_free(t0);
456     tcg_temp_free(t1);
457 }
458 
459 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
460 {
461 }
462 
463 #endif
464 
465 /* SPR common to all PowerPC */
466 /* XER */
467 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
468 {
469     TCGv dst = cpu_gpr[gprn];
470     TCGv t0 = tcg_temp_new();
471     TCGv t1 = tcg_temp_new();
472     TCGv t2 = tcg_temp_new();
473     tcg_gen_mov_tl(dst, cpu_xer);
474     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
475     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
476     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
477     tcg_gen_or_tl(t0, t0, t1);
478     tcg_gen_or_tl(dst, dst, t2);
479     tcg_gen_or_tl(dst, dst, t0);
480     if (is_isa300(ctx)) {
481         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
482         tcg_gen_or_tl(dst, dst, t0);
483         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
484         tcg_gen_or_tl(dst, dst, t0);
485     }
486     tcg_temp_free(t0);
487     tcg_temp_free(t1);
488     tcg_temp_free(t2);
489 }
490 
491 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
492 {
493     TCGv src = cpu_gpr[gprn];
494     /* Write all flags, while reading back check for isa300 */
495     tcg_gen_andi_tl(cpu_xer, src,
496                     ~((1u << XER_SO) |
497                       (1u << XER_OV) | (1u << XER_OV32) |
498                       (1u << XER_CA) | (1u << XER_CA32)));
499     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
500     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
501     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
502     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
503     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
504 }
505 
506 /* LR */
507 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
508 {
509     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
510 }
511 
512 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
513 {
514     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
515 }
516 
517 /* CFAR */
518 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
519 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
520 {
521     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
522 }
523 
524 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
525 {
526     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
527 }
528 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
529 
530 /* CTR */
531 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
532 {
533     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
534 }
535 
536 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
537 {
538     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
539 }
540 
541 /* User read access to SPR */
542 /* USPRx */
543 /* UMMCRx */
544 /* UPMCx */
545 /* USIA */
546 /* UDECR */
547 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
548 {
549     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
550 }
551 
552 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
553 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
554 {
555     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
556 }
557 #endif
558 
559 /* SPR common to all non-embedded PowerPC */
560 /* DECR */
561 #if !defined(CONFIG_USER_ONLY)
562 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
563 {
564     gen_icount_io_start(ctx);
565     gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
566 }
567 
568 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
569 {
570     gen_icount_io_start(ctx);
571     gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
572 }
573 #endif
574 
575 /* SPR common to all non-embedded PowerPC, except 601 */
576 /* Time base */
577 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
578 {
579     gen_icount_io_start(ctx);
580     gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
581 }
582 
583 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
584 {
585     gen_icount_io_start(ctx);
586     gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
587 }
588 
589 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
590 {
591     gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
592 }
593 
594 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
595 {
596     gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
597 }
598 
599 #if !defined(CONFIG_USER_ONLY)
600 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
601 {
602     gen_icount_io_start(ctx);
603     gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
604 }
605 
606 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
607 {
608     gen_icount_io_start(ctx);
609     gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
610 }
611 
612 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
613 {
614     gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
615 }
616 
617 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
618 {
619     gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
620 }
621 
622 #if defined(TARGET_PPC64)
623 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
624 {
625     gen_icount_io_start(ctx);
626     gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
627 }
628 
629 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
630 {
631     gen_icount_io_start(ctx);
632     gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
633 }
634 
635 /* HDECR */
636 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
637 {
638     gen_icount_io_start(ctx);
639     gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
640 }
641 
642 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
643 {
644     gen_icount_io_start(ctx);
645     gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
646 }
647 
648 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
649 {
650     gen_icount_io_start(ctx);
651     gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
652 }
653 
654 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
655 {
656     gen_icount_io_start(ctx);
657     gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
658 }
659 
660 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
661 {
662     gen_icount_io_start(ctx);
663     gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
664 }
665 
666 #endif
667 #endif
668 
669 #if !defined(CONFIG_USER_ONLY)
670 /* IBAT0U...IBAT0U */
671 /* IBAT0L...IBAT7L */
672 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
673 {
674     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
675                   offsetof(CPUPPCState,
676                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
677 }
678 
679 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
680 {
681     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
682                   offsetof(CPUPPCState,
683                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
684 }
685 
686 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
687 {
688     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
689     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
690     tcg_temp_free_i32(t0);
691 }
692 
693 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
694 {
695     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
696     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
697     tcg_temp_free_i32(t0);
698 }
699 
700 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
701 {
702     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
703     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
704     tcg_temp_free_i32(t0);
705 }
706 
707 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
708 {
709     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
710     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
711     tcg_temp_free_i32(t0);
712 }
713 
714 /* DBAT0U...DBAT7U */
715 /* DBAT0L...DBAT7L */
716 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
717 {
718     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
719                   offsetof(CPUPPCState,
720                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
721 }
722 
723 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
724 {
725     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
726                   offsetof(CPUPPCState,
727                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
728 }
729 
730 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
731 {
732     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
733     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
734     tcg_temp_free_i32(t0);
735 }
736 
737 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
738 {
739     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
740     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
741     tcg_temp_free_i32(t0);
742 }
743 
744 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
745 {
746     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
747     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
748     tcg_temp_free_i32(t0);
749 }
750 
751 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
752 {
753     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
754     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
755     tcg_temp_free_i32(t0);
756 }
757 
758 /* SDR1 */
759 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
760 {
761     gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
762 }
763 
764 #if defined(TARGET_PPC64)
765 /* 64 bits PowerPC specific SPRs */
766 /* PIDR */
767 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
768 {
769     gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
770 }
771 
772 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
773 {
774     gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
775 }
776 
777 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
778 {
779     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
780 }
781 
782 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
783 {
784     TCGv t0 = tcg_temp_new();
785     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
786     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
787     tcg_temp_free(t0);
788 }
789 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
790 {
791     gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
792 }
793 
794 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
795 {
796     gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
797 }
798 
799 /* DPDES */
800 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
801 {
802     gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
803 }
804 
805 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
806 {
807     gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
808 }
809 #endif
810 #endif
811 
812 /* PowerPC 40x specific registers */
813 #if !defined(CONFIG_USER_ONLY)
814 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
815 {
816     gen_icount_io_start(ctx);
817     gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
818 }
819 
820 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
821 {
822     gen_icount_io_start(ctx);
823     gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
824 }
825 
826 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
827 {
828     gen_icount_io_start(ctx);
829     gen_store_spr(sprn, cpu_gpr[gprn]);
830     gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
831     /* We must stop translation as we may have rebooted */
832     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
833 }
834 
835 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
836 {
837     gen_icount_io_start(ctx);
838     gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
839 }
840 
841 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
842 {
843     gen_icount_io_start(ctx);
844     gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]);
845 }
846 
847 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
848 {
849     gen_icount_io_start(ctx);
850     gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]);
851 }
852 
853 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
854 {
855     TCGv t0 = tcg_temp_new();
856     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
857     gen_helper_store_40x_pid(cpu_env, t0);
858     tcg_temp_free(t0);
859 }
860 
861 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
862 {
863     gen_icount_io_start(ctx);
864     gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
865 }
866 
867 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
868 {
869     gen_icount_io_start(ctx);
870     gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
871 }
872 #endif
873 
874 /* PIR */
875 #if !defined(CONFIG_USER_ONLY)
876 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
877 {
878     TCGv t0 = tcg_temp_new();
879     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
880     gen_store_spr(SPR_PIR, t0);
881     tcg_temp_free(t0);
882 }
883 #endif
884 
885 /* SPE specific registers */
886 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
887 {
888     TCGv_i32 t0 = tcg_temp_new_i32();
889     tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
890     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
891     tcg_temp_free_i32(t0);
892 }
893 
894 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
895 {
896     TCGv_i32 t0 = tcg_temp_new_i32();
897     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
898     tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
899     tcg_temp_free_i32(t0);
900 }
901 
902 #if !defined(CONFIG_USER_ONLY)
903 /* Callback used to write the exception vector base */
904 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
905 {
906     TCGv t0 = tcg_temp_new();
907     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
908     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
909     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
910     gen_store_spr(sprn, t0);
911     tcg_temp_free(t0);
912 }
913 
914 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
915 {
916     int sprn_offs;
917 
918     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
919         sprn_offs = sprn - SPR_BOOKE_IVOR0;
920     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
921         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
922     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
923         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
924     } else {
925         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
926                       " vector 0x%03x\n", sprn);
927         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
928         return;
929     }
930 
931     TCGv t0 = tcg_temp_new();
932     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
933     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
934     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
935     gen_store_spr(sprn, t0);
936     tcg_temp_free(t0);
937 }
938 #endif
939 
940 #ifdef TARGET_PPC64
941 #ifndef CONFIG_USER_ONLY
942 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
943 {
944     TCGv t0 = tcg_temp_new();
945     TCGv t1 = tcg_temp_new();
946     TCGv t2 = tcg_temp_new();
947 
948     /*
949      * Note, the HV=1 PR=0 case is handled earlier by simply using
950      * spr_write_generic for HV mode in the SPR table
951      */
952 
953     /* Build insertion mask into t1 based on context */
954     if (ctx->pr) {
955         gen_load_spr(t1, SPR_UAMOR);
956     } else {
957         gen_load_spr(t1, SPR_AMOR);
958     }
959 
960     /* Mask new bits into t2 */
961     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
962 
963     /* Load AMR and clear new bits in t0 */
964     gen_load_spr(t0, SPR_AMR);
965     tcg_gen_andc_tl(t0, t0, t1);
966 
967     /* Or'in new bits and write it out */
968     tcg_gen_or_tl(t0, t0, t2);
969     gen_store_spr(SPR_AMR, t0);
970     spr_store_dump_spr(SPR_AMR);
971 
972     tcg_temp_free(t0);
973     tcg_temp_free(t1);
974     tcg_temp_free(t2);
975 }
976 
977 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
978 {
979     TCGv t0 = tcg_temp_new();
980     TCGv t1 = tcg_temp_new();
981     TCGv t2 = tcg_temp_new();
982 
983     /*
984      * Note, the HV=1 case is handled earlier by simply using
985      * spr_write_generic for HV mode in the SPR table
986      */
987 
988     /* Build insertion mask into t1 based on context */
989     gen_load_spr(t1, SPR_AMOR);
990 
991     /* Mask new bits into t2 */
992     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
993 
994     /* Load AMR and clear new bits in t0 */
995     gen_load_spr(t0, SPR_UAMOR);
996     tcg_gen_andc_tl(t0, t0, t1);
997 
998     /* Or'in new bits and write it out */
999     tcg_gen_or_tl(t0, t0, t2);
1000     gen_store_spr(SPR_UAMOR, t0);
1001     spr_store_dump_spr(SPR_UAMOR);
1002 
1003     tcg_temp_free(t0);
1004     tcg_temp_free(t1);
1005     tcg_temp_free(t2);
1006 }
1007 
1008 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1009 {
1010     TCGv t0 = tcg_temp_new();
1011     TCGv t1 = tcg_temp_new();
1012     TCGv t2 = tcg_temp_new();
1013 
1014     /*
1015      * Note, the HV=1 case is handled earlier by simply using
1016      * spr_write_generic for HV mode in the SPR table
1017      */
1018 
1019     /* Build insertion mask into t1 based on context */
1020     gen_load_spr(t1, SPR_AMOR);
1021 
1022     /* Mask new bits into t2 */
1023     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1024 
1025     /* Load AMR and clear new bits in t0 */
1026     gen_load_spr(t0, SPR_IAMR);
1027     tcg_gen_andc_tl(t0, t0, t1);
1028 
1029     /* Or'in new bits and write it out */
1030     tcg_gen_or_tl(t0, t0, t2);
1031     gen_store_spr(SPR_IAMR, t0);
1032     spr_store_dump_spr(SPR_IAMR);
1033 
1034     tcg_temp_free(t0);
1035     tcg_temp_free(t1);
1036     tcg_temp_free(t2);
1037 }
1038 #endif
1039 #endif
1040 
1041 #ifndef CONFIG_USER_ONLY
1042 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1043 {
1044     gen_helper_fixup_thrm(cpu_env);
1045     gen_load_spr(cpu_gpr[gprn], sprn);
1046     spr_load_dump_spr(sprn);
1047 }
1048 #endif /* !CONFIG_USER_ONLY */
1049 
1050 #if !defined(CONFIG_USER_ONLY)
1051 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1052 {
1053     TCGv t0 = tcg_temp_new();
1054 
1055     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1056     gen_store_spr(sprn, t0);
1057     tcg_temp_free(t0);
1058 }
1059 
1060 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1061 {
1062     TCGv t0 = tcg_temp_new();
1063 
1064     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1065     gen_store_spr(sprn, t0);
1066     tcg_temp_free(t0);
1067 }
1068 
1069 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1070 {
1071     TCGv t0 = tcg_temp_new();
1072 
1073     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1074                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1075     gen_store_spr(sprn, t0);
1076     tcg_temp_free(t0);
1077 }
1078 
1079 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1080 {
1081     gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1082 }
1083 
1084 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1085 {
1086     TCGv_i32 t0 = tcg_const_i32(sprn);
1087     gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1088     tcg_temp_free_i32(t0);
1089 }
1090 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1091 {
1092     gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1093 }
1094 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1095 {
1096     gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1097 }
1098 
1099 #endif
1100 
1101 #if !defined(CONFIG_USER_ONLY)
1102 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1103 {
1104     TCGv val = tcg_temp_new();
1105     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1106     gen_store_spr(SPR_BOOKE_MAS3, val);
1107     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1108     gen_store_spr(SPR_BOOKE_MAS7, val);
1109     tcg_temp_free(val);
1110 }
1111 
1112 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1113 {
1114     TCGv mas7 = tcg_temp_new();
1115     TCGv mas3 = tcg_temp_new();
1116     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1117     tcg_gen_shli_tl(mas7, mas7, 32);
1118     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1119     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1120     tcg_temp_free(mas3);
1121     tcg_temp_free(mas7);
1122 }
1123 
1124 #endif
1125 
1126 #ifdef TARGET_PPC64
1127 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1128                                     int bit, int sprn, int cause)
1129 {
1130     TCGv_i32 t1 = tcg_const_i32(bit);
1131     TCGv_i32 t2 = tcg_const_i32(sprn);
1132     TCGv_i32 t3 = tcg_const_i32(cause);
1133 
1134     gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1135 
1136     tcg_temp_free_i32(t3);
1137     tcg_temp_free_i32(t2);
1138     tcg_temp_free_i32(t1);
1139 }
1140 
1141 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1142                                    int bit, int sprn, int cause)
1143 {
1144     TCGv_i32 t1 = tcg_const_i32(bit);
1145     TCGv_i32 t2 = tcg_const_i32(sprn);
1146     TCGv_i32 t3 = tcg_const_i32(cause);
1147 
1148     gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1149 
1150     tcg_temp_free_i32(t3);
1151     tcg_temp_free_i32(t2);
1152     tcg_temp_free_i32(t1);
1153 }
1154 
1155 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1156 {
1157     TCGv spr_up = tcg_temp_new();
1158     TCGv spr = tcg_temp_new();
1159 
1160     gen_load_spr(spr, sprn - 1);
1161     tcg_gen_shri_tl(spr_up, spr, 32);
1162     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1163 
1164     tcg_temp_free(spr);
1165     tcg_temp_free(spr_up);
1166 }
1167 
1168 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1169 {
1170     TCGv spr = tcg_temp_new();
1171 
1172     gen_load_spr(spr, sprn - 1);
1173     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1174     gen_store_spr(sprn - 1, spr);
1175 
1176     tcg_temp_free(spr);
1177 }
1178 
1179 #if !defined(CONFIG_USER_ONLY)
1180 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1181 {
1182     TCGv hmer = tcg_temp_new();
1183 
1184     gen_load_spr(hmer, sprn);
1185     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1186     gen_store_spr(sprn, hmer);
1187     spr_store_dump_spr(sprn);
1188     tcg_temp_free(hmer);
1189 }
1190 
1191 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1192 {
1193     gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1194 }
1195 #endif /* !defined(CONFIG_USER_ONLY) */
1196 
1197 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1198 {
1199     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1200     spr_read_generic(ctx, gprn, sprn);
1201 }
1202 
1203 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1204 {
1205     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1206     spr_write_generic(ctx, sprn, gprn);
1207 }
1208 
1209 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1210 {
1211     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1212     spr_read_generic(ctx, gprn, sprn);
1213 }
1214 
1215 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1216 {
1217     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1218     spr_write_generic(ctx, sprn, gprn);
1219 }
1220 
1221 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1222 {
1223     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1224     spr_read_prev_upper32(ctx, gprn, sprn);
1225 }
1226 
1227 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1228 {
1229     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1230     spr_write_prev_upper32(ctx, sprn, gprn);
1231 }
1232 
1233 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1234 {
1235     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1236     spr_read_generic(ctx, gprn, sprn);
1237 }
1238 
1239 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1240 {
1241     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1242     spr_write_generic(ctx, sprn, gprn);
1243 }
1244 
1245 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1246 {
1247     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1248     spr_read_prev_upper32(ctx, gprn, sprn);
1249 }
1250 
1251 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1252 {
1253     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1254     spr_write_prev_upper32(ctx, sprn, gprn);
1255 }
1256 
1257 void spr_read_dexcr_ureg(DisasContext *ctx, int gprn, int sprn)
1258 {
1259     TCGv t0 = tcg_temp_new();
1260 
1261     /*
1262      * Access to the (H)DEXCR in problem state is done using separated
1263      * SPR indexes which are 16 below the SPR indexes which have full
1264      * access to the (H)DEXCR in privileged state. Problem state can
1265      * only read bits 32:63, bits 0:31 return 0.
1266      *
1267      * See section 9.3.1-9.3.2 of PowerISA v3.1B
1268      */
1269 
1270     gen_load_spr(t0, sprn + 16);
1271     tcg_gen_ext32u_tl(cpu_gpr[gprn], t0);
1272 
1273     tcg_temp_free(t0);
1274 }
1275 #endif
1276 
1277 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1278 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1279 
1280 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1281 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1282 
1283 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1284 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1285 
1286 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1287 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1288 
1289 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1290 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1291 
1292 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1293 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1294 
1295 typedef struct opcode_t {
1296     unsigned char opc1, opc2, opc3, opc4;
1297 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1298     unsigned char pad[4];
1299 #endif
1300     opc_handler_t handler;
1301     const char *oname;
1302 } opcode_t;
1303 
1304 static void gen_priv_opc(DisasContext *ctx)
1305 {
1306     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1307 }
1308 
1309 /* Helpers for priv. check */
1310 #define GEN_PRIV(CTX)              \
1311     do {                           \
1312         gen_priv_opc(CTX); return; \
1313     } while (0)
1314 
1315 #if defined(CONFIG_USER_ONLY)
1316 #define CHK_HV(CTX) GEN_PRIV(CTX)
1317 #define CHK_SV(CTX) GEN_PRIV(CTX)
1318 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1319 #else
1320 #define CHK_HV(CTX)                         \
1321     do {                                    \
1322         if (unlikely(ctx->pr || !ctx->hv)) {\
1323             GEN_PRIV(CTX);                  \
1324         }                                   \
1325     } while (0)
1326 #define CHK_SV(CTX)              \
1327     do {                         \
1328         if (unlikely(ctx->pr)) { \
1329             GEN_PRIV(CTX);       \
1330         }                        \
1331     } while (0)
1332 #define CHK_HVRM(CTX)                                   \
1333     do {                                                \
1334         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1335             GEN_PRIV(CTX);                              \
1336         }                                               \
1337     } while (0)
1338 #endif
1339 
1340 #define CHK_NONE(CTX)
1341 
1342 /*****************************************************************************/
1343 /* PowerPC instructions table                                                */
1344 
1345 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1346 {                                                                             \
1347     .opc1 = op1,                                                              \
1348     .opc2 = op2,                                                              \
1349     .opc3 = op3,                                                              \
1350     .opc4 = 0xff,                                                             \
1351     .handler = {                                                              \
1352         .inval1  = invl,                                                      \
1353         .type = _typ,                                                         \
1354         .type2 = _typ2,                                                       \
1355         .handler = &gen_##name,                                               \
1356     },                                                                        \
1357     .oname = stringify(name),                                                 \
1358 }
1359 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1360 {                                                                             \
1361     .opc1 = op1,                                                              \
1362     .opc2 = op2,                                                              \
1363     .opc3 = op3,                                                              \
1364     .opc4 = 0xff,                                                             \
1365     .handler = {                                                              \
1366         .inval1  = invl1,                                                     \
1367         .inval2  = invl2,                                                     \
1368         .type = _typ,                                                         \
1369         .type2 = _typ2,                                                       \
1370         .handler = &gen_##name,                                               \
1371     },                                                                        \
1372     .oname = stringify(name),                                                 \
1373 }
1374 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1375 {                                                                             \
1376     .opc1 = op1,                                                              \
1377     .opc2 = op2,                                                              \
1378     .opc3 = op3,                                                              \
1379     .opc4 = 0xff,                                                             \
1380     .handler = {                                                              \
1381         .inval1  = invl,                                                      \
1382         .type = _typ,                                                         \
1383         .type2 = _typ2,                                                       \
1384         .handler = &gen_##name,                                               \
1385     },                                                                        \
1386     .oname = onam,                                                            \
1387 }
1388 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1389 {                                                                             \
1390     .opc1 = op1,                                                              \
1391     .opc2 = op2,                                                              \
1392     .opc3 = op3,                                                              \
1393     .opc4 = op4,                                                              \
1394     .handler = {                                                              \
1395         .inval1  = invl,                                                      \
1396         .type = _typ,                                                         \
1397         .type2 = _typ2,                                                       \
1398         .handler = &gen_##name,                                               \
1399     },                                                                        \
1400     .oname = stringify(name),                                                 \
1401 }
1402 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1403 {                                                                             \
1404     .opc1 = op1,                                                              \
1405     .opc2 = op2,                                                              \
1406     .opc3 = op3,                                                              \
1407     .opc4 = op4,                                                              \
1408     .handler = {                                                              \
1409         .inval1  = invl,                                                      \
1410         .type = _typ,                                                         \
1411         .type2 = _typ2,                                                       \
1412         .handler = &gen_##name,                                               \
1413     },                                                                        \
1414     .oname = onam,                                                            \
1415 }
1416 
1417 /* Invalid instruction */
1418 static void gen_invalid(DisasContext *ctx)
1419 {
1420     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1421 }
1422 
1423 static opc_handler_t invalid_handler = {
1424     .inval1  = 0xFFFFFFFF,
1425     .inval2  = 0xFFFFFFFF,
1426     .type    = PPC_NONE,
1427     .type2   = PPC_NONE,
1428     .handler = gen_invalid,
1429 };
1430 
1431 /***                           Integer comparison                          ***/
1432 
1433 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1434 {
1435     TCGv t0 = tcg_temp_new();
1436     TCGv t1 = tcg_temp_new();
1437     TCGv_i32 t = tcg_temp_new_i32();
1438 
1439     tcg_gen_movi_tl(t0, CRF_EQ);
1440     tcg_gen_movi_tl(t1, CRF_LT);
1441     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1442                        t0, arg0, arg1, t1, t0);
1443     tcg_gen_movi_tl(t1, CRF_GT);
1444     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1445                        t0, arg0, arg1, t1, t0);
1446 
1447     tcg_gen_trunc_tl_i32(t, t0);
1448     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1449     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1450 
1451     tcg_temp_free(t0);
1452     tcg_temp_free(t1);
1453     tcg_temp_free_i32(t);
1454 }
1455 
1456 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1457 {
1458     TCGv t0 = tcg_const_tl(arg1);
1459     gen_op_cmp(arg0, t0, s, crf);
1460     tcg_temp_free(t0);
1461 }
1462 
1463 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1464 {
1465     TCGv t0, t1;
1466     t0 = tcg_temp_new();
1467     t1 = tcg_temp_new();
1468     if (s) {
1469         tcg_gen_ext32s_tl(t0, arg0);
1470         tcg_gen_ext32s_tl(t1, arg1);
1471     } else {
1472         tcg_gen_ext32u_tl(t0, arg0);
1473         tcg_gen_ext32u_tl(t1, arg1);
1474     }
1475     gen_op_cmp(t0, t1, s, crf);
1476     tcg_temp_free(t1);
1477     tcg_temp_free(t0);
1478 }
1479 
1480 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1481 {
1482     TCGv t0 = tcg_const_tl(arg1);
1483     gen_op_cmp32(arg0, t0, s, crf);
1484     tcg_temp_free(t0);
1485 }
1486 
1487 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1488 {
1489     if (NARROW_MODE(ctx)) {
1490         gen_op_cmpi32(reg, 0, 1, 0);
1491     } else {
1492         gen_op_cmpi(reg, 0, 1, 0);
1493     }
1494 }
1495 
1496 /* cmprb - range comparison: isupper, isaplha, islower*/
1497 static void gen_cmprb(DisasContext *ctx)
1498 {
1499     TCGv_i32 src1 = tcg_temp_new_i32();
1500     TCGv_i32 src2 = tcg_temp_new_i32();
1501     TCGv_i32 src2lo = tcg_temp_new_i32();
1502     TCGv_i32 src2hi = tcg_temp_new_i32();
1503     TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1504 
1505     tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1506     tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1507 
1508     tcg_gen_andi_i32(src1, src1, 0xFF);
1509     tcg_gen_ext8u_i32(src2lo, src2);
1510     tcg_gen_shri_i32(src2, src2, 8);
1511     tcg_gen_ext8u_i32(src2hi, src2);
1512 
1513     tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1514     tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1515     tcg_gen_and_i32(crf, src2lo, src2hi);
1516 
1517     if (ctx->opcode & 0x00200000) {
1518         tcg_gen_shri_i32(src2, src2, 8);
1519         tcg_gen_ext8u_i32(src2lo, src2);
1520         tcg_gen_shri_i32(src2, src2, 8);
1521         tcg_gen_ext8u_i32(src2hi, src2);
1522         tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1523         tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1524         tcg_gen_and_i32(src2lo, src2lo, src2hi);
1525         tcg_gen_or_i32(crf, crf, src2lo);
1526     }
1527     tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1528     tcg_temp_free_i32(src1);
1529     tcg_temp_free_i32(src2);
1530     tcg_temp_free_i32(src2lo);
1531     tcg_temp_free_i32(src2hi);
1532 }
1533 
1534 #if defined(TARGET_PPC64)
1535 /* cmpeqb */
1536 static void gen_cmpeqb(DisasContext *ctx)
1537 {
1538     gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1539                       cpu_gpr[rB(ctx->opcode)]);
1540 }
1541 #endif
1542 
1543 /* isel (PowerPC 2.03 specification) */
1544 static void gen_isel(DisasContext *ctx)
1545 {
1546     uint32_t bi = rC(ctx->opcode);
1547     uint32_t mask = 0x08 >> (bi & 0x03);
1548     TCGv t0 = tcg_temp_new();
1549     TCGv zr;
1550 
1551     tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1552     tcg_gen_andi_tl(t0, t0, mask);
1553 
1554     zr = tcg_const_tl(0);
1555     tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1556                        rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1557                        cpu_gpr[rB(ctx->opcode)]);
1558     tcg_temp_free(zr);
1559     tcg_temp_free(t0);
1560 }
1561 
1562 /* cmpb: PowerPC 2.05 specification */
1563 static void gen_cmpb(DisasContext *ctx)
1564 {
1565     gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1566                     cpu_gpr[rB(ctx->opcode)]);
1567 }
1568 
1569 /***                           Integer arithmetic                          ***/
1570 
1571 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1572                                            TCGv arg1, TCGv arg2, int sub)
1573 {
1574     TCGv t0 = tcg_temp_new();
1575 
1576     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1577     tcg_gen_xor_tl(t0, arg1, arg2);
1578     if (sub) {
1579         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1580     } else {
1581         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1582     }
1583     tcg_temp_free(t0);
1584     if (NARROW_MODE(ctx)) {
1585         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1586         if (is_isa300(ctx)) {
1587             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1588         }
1589     } else {
1590         if (is_isa300(ctx)) {
1591             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1592         }
1593         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1594     }
1595     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1596 }
1597 
1598 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1599                                              TCGv res, TCGv arg0, TCGv arg1,
1600                                              TCGv ca32, int sub)
1601 {
1602     TCGv t0;
1603 
1604     if (!is_isa300(ctx)) {
1605         return;
1606     }
1607 
1608     t0 = tcg_temp_new();
1609     if (sub) {
1610         tcg_gen_eqv_tl(t0, arg0, arg1);
1611     } else {
1612         tcg_gen_xor_tl(t0, arg0, arg1);
1613     }
1614     tcg_gen_xor_tl(t0, t0, res);
1615     tcg_gen_extract_tl(ca32, t0, 32, 1);
1616     tcg_temp_free(t0);
1617 }
1618 
1619 /* Common add function */
1620 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1621                                     TCGv arg2, TCGv ca, TCGv ca32,
1622                                     bool add_ca, bool compute_ca,
1623                                     bool compute_ov, bool compute_rc0)
1624 {
1625     TCGv t0 = ret;
1626 
1627     if (compute_ca || compute_ov) {
1628         t0 = tcg_temp_new();
1629     }
1630 
1631     if (compute_ca) {
1632         if (NARROW_MODE(ctx)) {
1633             /*
1634              * Caution: a non-obvious corner case of the spec is that
1635              * we must produce the *entire* 64-bit addition, but
1636              * produce the carry into bit 32.
1637              */
1638             TCGv t1 = tcg_temp_new();
1639             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1640             tcg_gen_add_tl(t0, arg1, arg2);
1641             if (add_ca) {
1642                 tcg_gen_add_tl(t0, t0, ca);
1643             }
1644             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1645             tcg_temp_free(t1);
1646             tcg_gen_extract_tl(ca, ca, 32, 1);
1647             if (is_isa300(ctx)) {
1648                 tcg_gen_mov_tl(ca32, ca);
1649             }
1650         } else {
1651             TCGv zero = tcg_const_tl(0);
1652             if (add_ca) {
1653                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1654                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1655             } else {
1656                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1657             }
1658             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1659             tcg_temp_free(zero);
1660         }
1661     } else {
1662         tcg_gen_add_tl(t0, arg1, arg2);
1663         if (add_ca) {
1664             tcg_gen_add_tl(t0, t0, ca);
1665         }
1666     }
1667 
1668     if (compute_ov) {
1669         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1670     }
1671     if (unlikely(compute_rc0)) {
1672         gen_set_Rc0(ctx, t0);
1673     }
1674 
1675     if (t0 != ret) {
1676         tcg_gen_mov_tl(ret, t0);
1677         tcg_temp_free(t0);
1678     }
1679 }
1680 /* Add functions with two operands */
1681 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov)     \
1682 static void glue(gen_, name)(DisasContext *ctx)                               \
1683 {                                                                             \
1684     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1685                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1686                      ca, glue(ca, 32),                                        \
1687                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1688 }
1689 /* Add functions with one operand and one immediate */
1690 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca,                    \
1691                                 add_ca, compute_ca, compute_ov)               \
1692 static void glue(gen_, name)(DisasContext *ctx)                               \
1693 {                                                                             \
1694     TCGv t0 = tcg_const_tl(const_val);                                        \
1695     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1696                      cpu_gpr[rA(ctx->opcode)], t0,                            \
1697                      ca, glue(ca, 32),                                        \
1698                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1699     tcg_temp_free(t0);                                                        \
1700 }
1701 
1702 /* add  add.  addo  addo. */
1703 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1704 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1705 /* addc  addc.  addco  addco. */
1706 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1707 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1708 /* adde  adde.  addeo  addeo. */
1709 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1710 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1711 /* addme  addme.  addmeo  addmeo.  */
1712 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1713 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1714 /* addex */
1715 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1716 /* addze  addze.  addzeo  addzeo.*/
1717 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1718 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1719 /* addic  addic.*/
1720 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1721 {
1722     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1723     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1724                      c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1725     tcg_temp_free(c);
1726 }
1727 
1728 static void gen_addic(DisasContext *ctx)
1729 {
1730     gen_op_addic(ctx, 0);
1731 }
1732 
1733 static void gen_addic_(DisasContext *ctx)
1734 {
1735     gen_op_addic(ctx, 1);
1736 }
1737 
1738 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1739                                      TCGv arg2, int sign, int compute_ov)
1740 {
1741     TCGv_i32 t0 = tcg_temp_new_i32();
1742     TCGv_i32 t1 = tcg_temp_new_i32();
1743     TCGv_i32 t2 = tcg_temp_new_i32();
1744     TCGv_i32 t3 = tcg_temp_new_i32();
1745 
1746     tcg_gen_trunc_tl_i32(t0, arg1);
1747     tcg_gen_trunc_tl_i32(t1, arg2);
1748     if (sign) {
1749         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1750         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1751         tcg_gen_and_i32(t2, t2, t3);
1752         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1753         tcg_gen_or_i32(t2, t2, t3);
1754         tcg_gen_movi_i32(t3, 0);
1755         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1756         tcg_gen_div_i32(t3, t0, t1);
1757         tcg_gen_extu_i32_tl(ret, t3);
1758     } else {
1759         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1760         tcg_gen_movi_i32(t3, 0);
1761         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1762         tcg_gen_divu_i32(t3, t0, t1);
1763         tcg_gen_extu_i32_tl(ret, t3);
1764     }
1765     if (compute_ov) {
1766         tcg_gen_extu_i32_tl(cpu_ov, t2);
1767         if (is_isa300(ctx)) {
1768             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1769         }
1770         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1771     }
1772     tcg_temp_free_i32(t0);
1773     tcg_temp_free_i32(t1);
1774     tcg_temp_free_i32(t2);
1775     tcg_temp_free_i32(t3);
1776 
1777     if (unlikely(Rc(ctx->opcode) != 0)) {
1778         gen_set_Rc0(ctx, ret);
1779     }
1780 }
1781 /* Div functions */
1782 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
1783 static void glue(gen_, name)(DisasContext *ctx)                               \
1784 {                                                                             \
1785     gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1786                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1787                      sign, compute_ov);                                       \
1788 }
1789 /* divwu  divwu.  divwuo  divwuo.   */
1790 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1791 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1792 /* divw  divw.  divwo  divwo.   */
1793 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1794 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1795 
1796 /* div[wd]eu[o][.] */
1797 #define GEN_DIVE(name, hlpr, compute_ov)                                      \
1798 static void gen_##name(DisasContext *ctx)                                     \
1799 {                                                                             \
1800     TCGv_i32 t0 = tcg_const_i32(compute_ov);                                  \
1801     gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env,                      \
1802                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1803     tcg_temp_free_i32(t0);                                                    \
1804     if (unlikely(Rc(ctx->opcode) != 0)) {                                     \
1805         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);                           \
1806     }                                                                         \
1807 }
1808 
1809 GEN_DIVE(divweu, divweu, 0);
1810 GEN_DIVE(divweuo, divweu, 1);
1811 GEN_DIVE(divwe, divwe, 0);
1812 GEN_DIVE(divweo, divwe, 1);
1813 
1814 #if defined(TARGET_PPC64)
1815 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1816                                      TCGv arg2, int sign, int compute_ov)
1817 {
1818     TCGv_i64 t0 = tcg_temp_new_i64();
1819     TCGv_i64 t1 = tcg_temp_new_i64();
1820     TCGv_i64 t2 = tcg_temp_new_i64();
1821     TCGv_i64 t3 = tcg_temp_new_i64();
1822 
1823     tcg_gen_mov_i64(t0, arg1);
1824     tcg_gen_mov_i64(t1, arg2);
1825     if (sign) {
1826         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1827         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1828         tcg_gen_and_i64(t2, t2, t3);
1829         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1830         tcg_gen_or_i64(t2, t2, t3);
1831         tcg_gen_movi_i64(t3, 0);
1832         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1833         tcg_gen_div_i64(ret, t0, t1);
1834     } else {
1835         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1836         tcg_gen_movi_i64(t3, 0);
1837         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1838         tcg_gen_divu_i64(ret, t0, t1);
1839     }
1840     if (compute_ov) {
1841         tcg_gen_mov_tl(cpu_ov, t2);
1842         if (is_isa300(ctx)) {
1843             tcg_gen_mov_tl(cpu_ov32, t2);
1844         }
1845         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1846     }
1847     tcg_temp_free_i64(t0);
1848     tcg_temp_free_i64(t1);
1849     tcg_temp_free_i64(t2);
1850     tcg_temp_free_i64(t3);
1851 
1852     if (unlikely(Rc(ctx->opcode) != 0)) {
1853         gen_set_Rc0(ctx, ret);
1854     }
1855 }
1856 
1857 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
1858 static void glue(gen_, name)(DisasContext *ctx)                               \
1859 {                                                                             \
1860     gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1861                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
1862                       sign, compute_ov);                                      \
1863 }
1864 /* divdu  divdu.  divduo  divduo.   */
1865 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1866 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1867 /* divd  divd.  divdo  divdo.   */
1868 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1869 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1870 
1871 GEN_DIVE(divdeu, divdeu, 0);
1872 GEN_DIVE(divdeuo, divdeu, 1);
1873 GEN_DIVE(divde, divde, 0);
1874 GEN_DIVE(divdeo, divde, 1);
1875 #endif
1876 
1877 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1878                                      TCGv arg2, int sign)
1879 {
1880     TCGv_i32 t0 = tcg_temp_new_i32();
1881     TCGv_i32 t1 = tcg_temp_new_i32();
1882 
1883     tcg_gen_trunc_tl_i32(t0, arg1);
1884     tcg_gen_trunc_tl_i32(t1, arg2);
1885     if (sign) {
1886         TCGv_i32 t2 = tcg_temp_new_i32();
1887         TCGv_i32 t3 = tcg_temp_new_i32();
1888         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1889         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1890         tcg_gen_and_i32(t2, t2, t3);
1891         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1892         tcg_gen_or_i32(t2, t2, t3);
1893         tcg_gen_movi_i32(t3, 0);
1894         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1895         tcg_gen_rem_i32(t3, t0, t1);
1896         tcg_gen_ext_i32_tl(ret, t3);
1897         tcg_temp_free_i32(t2);
1898         tcg_temp_free_i32(t3);
1899     } else {
1900         TCGv_i32 t2 = tcg_const_i32(1);
1901         TCGv_i32 t3 = tcg_const_i32(0);
1902         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1903         tcg_gen_remu_i32(t3, t0, t1);
1904         tcg_gen_extu_i32_tl(ret, t3);
1905         tcg_temp_free_i32(t2);
1906         tcg_temp_free_i32(t3);
1907     }
1908     tcg_temp_free_i32(t0);
1909     tcg_temp_free_i32(t1);
1910 }
1911 
1912 #define GEN_INT_ARITH_MODW(name, opc3, sign)                                \
1913 static void glue(gen_, name)(DisasContext *ctx)                             \
1914 {                                                                           \
1915     gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1916                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1917                       sign);                                                \
1918 }
1919 
1920 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1921 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1922 
1923 #if defined(TARGET_PPC64)
1924 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1925                                      TCGv arg2, int sign)
1926 {
1927     TCGv_i64 t0 = tcg_temp_new_i64();
1928     TCGv_i64 t1 = tcg_temp_new_i64();
1929 
1930     tcg_gen_mov_i64(t0, arg1);
1931     tcg_gen_mov_i64(t1, arg2);
1932     if (sign) {
1933         TCGv_i64 t2 = tcg_temp_new_i64();
1934         TCGv_i64 t3 = tcg_temp_new_i64();
1935         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1936         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1937         tcg_gen_and_i64(t2, t2, t3);
1938         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1939         tcg_gen_or_i64(t2, t2, t3);
1940         tcg_gen_movi_i64(t3, 0);
1941         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1942         tcg_gen_rem_i64(ret, t0, t1);
1943         tcg_temp_free_i64(t2);
1944         tcg_temp_free_i64(t3);
1945     } else {
1946         TCGv_i64 t2 = tcg_const_i64(1);
1947         TCGv_i64 t3 = tcg_const_i64(0);
1948         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1949         tcg_gen_remu_i64(ret, t0, t1);
1950         tcg_temp_free_i64(t2);
1951         tcg_temp_free_i64(t3);
1952     }
1953     tcg_temp_free_i64(t0);
1954     tcg_temp_free_i64(t1);
1955 }
1956 
1957 #define GEN_INT_ARITH_MODD(name, opc3, sign)                            \
1958 static void glue(gen_, name)(DisasContext *ctx)                           \
1959 {                                                                         \
1960   gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1961                     cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1962                     sign);                                                \
1963 }
1964 
1965 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1966 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1967 #endif
1968 
1969 /* mulhw  mulhw. */
1970 static void gen_mulhw(DisasContext *ctx)
1971 {
1972     TCGv_i32 t0 = tcg_temp_new_i32();
1973     TCGv_i32 t1 = tcg_temp_new_i32();
1974 
1975     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1976     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1977     tcg_gen_muls2_i32(t0, t1, t0, t1);
1978     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1979     tcg_temp_free_i32(t0);
1980     tcg_temp_free_i32(t1);
1981     if (unlikely(Rc(ctx->opcode) != 0)) {
1982         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1983     }
1984 }
1985 
1986 /* mulhwu  mulhwu.  */
1987 static void gen_mulhwu(DisasContext *ctx)
1988 {
1989     TCGv_i32 t0 = tcg_temp_new_i32();
1990     TCGv_i32 t1 = tcg_temp_new_i32();
1991 
1992     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1993     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1994     tcg_gen_mulu2_i32(t0, t1, t0, t1);
1995     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1996     tcg_temp_free_i32(t0);
1997     tcg_temp_free_i32(t1);
1998     if (unlikely(Rc(ctx->opcode) != 0)) {
1999         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2000     }
2001 }
2002 
2003 /* mullw  mullw. */
2004 static void gen_mullw(DisasContext *ctx)
2005 {
2006 #if defined(TARGET_PPC64)
2007     TCGv_i64 t0, t1;
2008     t0 = tcg_temp_new_i64();
2009     t1 = tcg_temp_new_i64();
2010     tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
2011     tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
2012     tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2013     tcg_temp_free(t0);
2014     tcg_temp_free(t1);
2015 #else
2016     tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2017                     cpu_gpr[rB(ctx->opcode)]);
2018 #endif
2019     if (unlikely(Rc(ctx->opcode) != 0)) {
2020         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2021     }
2022 }
2023 
2024 /* mullwo  mullwo. */
2025 static void gen_mullwo(DisasContext *ctx)
2026 {
2027     TCGv_i32 t0 = tcg_temp_new_i32();
2028     TCGv_i32 t1 = tcg_temp_new_i32();
2029 
2030     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2031     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2032     tcg_gen_muls2_i32(t0, t1, t0, t1);
2033 #if defined(TARGET_PPC64)
2034     tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2035 #else
2036     tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2037 #endif
2038 
2039     tcg_gen_sari_i32(t0, t0, 31);
2040     tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2041     tcg_gen_extu_i32_tl(cpu_ov, t0);
2042     if (is_isa300(ctx)) {
2043         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2044     }
2045     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2046 
2047     tcg_temp_free_i32(t0);
2048     tcg_temp_free_i32(t1);
2049     if (unlikely(Rc(ctx->opcode) != 0)) {
2050         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2051     }
2052 }
2053 
2054 /* mulli */
2055 static void gen_mulli(DisasContext *ctx)
2056 {
2057     tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2058                     SIMM(ctx->opcode));
2059 }
2060 
2061 #if defined(TARGET_PPC64)
2062 /* mulhd  mulhd. */
2063 static void gen_mulhd(DisasContext *ctx)
2064 {
2065     TCGv lo = tcg_temp_new();
2066     tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2067                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2068     tcg_temp_free(lo);
2069     if (unlikely(Rc(ctx->opcode) != 0)) {
2070         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2071     }
2072 }
2073 
2074 /* mulhdu  mulhdu. */
2075 static void gen_mulhdu(DisasContext *ctx)
2076 {
2077     TCGv lo = tcg_temp_new();
2078     tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2079                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2080     tcg_temp_free(lo);
2081     if (unlikely(Rc(ctx->opcode) != 0)) {
2082         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2083     }
2084 }
2085 
2086 /* mulld  mulld. */
2087 static void gen_mulld(DisasContext *ctx)
2088 {
2089     tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2090                    cpu_gpr[rB(ctx->opcode)]);
2091     if (unlikely(Rc(ctx->opcode) != 0)) {
2092         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2093     }
2094 }
2095 
2096 /* mulldo  mulldo. */
2097 static void gen_mulldo(DisasContext *ctx)
2098 {
2099     TCGv_i64 t0 = tcg_temp_new_i64();
2100     TCGv_i64 t1 = tcg_temp_new_i64();
2101 
2102     tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2103                       cpu_gpr[rB(ctx->opcode)]);
2104     tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2105 
2106     tcg_gen_sari_i64(t0, t0, 63);
2107     tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2108     if (is_isa300(ctx)) {
2109         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2110     }
2111     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2112 
2113     tcg_temp_free_i64(t0);
2114     tcg_temp_free_i64(t1);
2115 
2116     if (unlikely(Rc(ctx->opcode) != 0)) {
2117         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2118     }
2119 }
2120 #endif
2121 
2122 /* Common subf function */
2123 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2124                                      TCGv arg2, bool add_ca, bool compute_ca,
2125                                      bool compute_ov, bool compute_rc0)
2126 {
2127     TCGv t0 = ret;
2128 
2129     if (compute_ca || compute_ov) {
2130         t0 = tcg_temp_new();
2131     }
2132 
2133     if (compute_ca) {
2134         /* dest = ~arg1 + arg2 [+ ca].  */
2135         if (NARROW_MODE(ctx)) {
2136             /*
2137              * Caution: a non-obvious corner case of the spec is that
2138              * we must produce the *entire* 64-bit addition, but
2139              * produce the carry into bit 32.
2140              */
2141             TCGv inv1 = tcg_temp_new();
2142             TCGv t1 = tcg_temp_new();
2143             tcg_gen_not_tl(inv1, arg1);
2144             if (add_ca) {
2145                 tcg_gen_add_tl(t0, arg2, cpu_ca);
2146             } else {
2147                 tcg_gen_addi_tl(t0, arg2, 1);
2148             }
2149             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
2150             tcg_gen_add_tl(t0, t0, inv1);
2151             tcg_temp_free(inv1);
2152             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
2153             tcg_temp_free(t1);
2154             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2155             if (is_isa300(ctx)) {
2156                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2157             }
2158         } else if (add_ca) {
2159             TCGv zero, inv1 = tcg_temp_new();
2160             tcg_gen_not_tl(inv1, arg1);
2161             zero = tcg_const_tl(0);
2162             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2163             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2164             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2165             tcg_temp_free(zero);
2166             tcg_temp_free(inv1);
2167         } else {
2168             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2169             tcg_gen_sub_tl(t0, arg2, arg1);
2170             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2171         }
2172     } else if (add_ca) {
2173         /*
2174          * Since we're ignoring carry-out, we can simplify the
2175          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2176          */
2177         tcg_gen_sub_tl(t0, arg2, arg1);
2178         tcg_gen_add_tl(t0, t0, cpu_ca);
2179         tcg_gen_subi_tl(t0, t0, 1);
2180     } else {
2181         tcg_gen_sub_tl(t0, arg2, arg1);
2182     }
2183 
2184     if (compute_ov) {
2185         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2186     }
2187     if (unlikely(compute_rc0)) {
2188         gen_set_Rc0(ctx, t0);
2189     }
2190 
2191     if (t0 != ret) {
2192         tcg_gen_mov_tl(ret, t0);
2193         tcg_temp_free(t0);
2194     }
2195 }
2196 /* Sub functions with Two operands functions */
2197 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
2198 static void glue(gen_, name)(DisasContext *ctx)                               \
2199 {                                                                             \
2200     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2201                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
2202                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2203 }
2204 /* Sub functions with one operand and one immediate */
2205 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
2206                                 add_ca, compute_ca, compute_ov)               \
2207 static void glue(gen_, name)(DisasContext *ctx)                               \
2208 {                                                                             \
2209     TCGv t0 = tcg_const_tl(const_val);                                        \
2210     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2211                       cpu_gpr[rA(ctx->opcode)], t0,                           \
2212                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2213     tcg_temp_free(t0);                                                        \
2214 }
2215 /* subf  subf.  subfo  subfo. */
2216 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2217 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2218 /* subfc  subfc.  subfco  subfco. */
2219 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2220 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2221 /* subfe  subfe.  subfeo  subfo. */
2222 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2223 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2224 /* subfme  subfme.  subfmeo  subfmeo.  */
2225 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2226 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2227 /* subfze  subfze.  subfzeo  subfzeo.*/
2228 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2229 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2230 
2231 /* subfic */
2232 static void gen_subfic(DisasContext *ctx)
2233 {
2234     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2235     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2236                       c, 0, 1, 0, 0);
2237     tcg_temp_free(c);
2238 }
2239 
2240 /* neg neg. nego nego. */
2241 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2242 {
2243     TCGv zero = tcg_const_tl(0);
2244     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2245                       zero, 0, 0, compute_ov, Rc(ctx->opcode));
2246     tcg_temp_free(zero);
2247 }
2248 
2249 static void gen_neg(DisasContext *ctx)
2250 {
2251     tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2252     if (unlikely(Rc(ctx->opcode))) {
2253         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2254     }
2255 }
2256 
2257 static void gen_nego(DisasContext *ctx)
2258 {
2259     gen_op_arith_neg(ctx, 1);
2260 }
2261 
2262 /***                            Integer logical                            ***/
2263 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
2264 static void glue(gen_, name)(DisasContext *ctx)                               \
2265 {                                                                             \
2266     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],                \
2267        cpu_gpr[rB(ctx->opcode)]);                                             \
2268     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2269         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2270 }
2271 
2272 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
2273 static void glue(gen_, name)(DisasContext *ctx)                               \
2274 {                                                                             \
2275     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);               \
2276     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2277         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2278 }
2279 
2280 /* and & and. */
2281 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2282 /* andc & andc. */
2283 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2284 
2285 /* andi. */
2286 static void gen_andi_(DisasContext *ctx)
2287 {
2288     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2289                     UIMM(ctx->opcode));
2290     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2291 }
2292 
2293 /* andis. */
2294 static void gen_andis_(DisasContext *ctx)
2295 {
2296     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2297                     UIMM(ctx->opcode) << 16);
2298     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2299 }
2300 
2301 /* cntlzw */
2302 static void gen_cntlzw(DisasContext *ctx)
2303 {
2304     TCGv_i32 t = tcg_temp_new_i32();
2305 
2306     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2307     tcg_gen_clzi_i32(t, t, 32);
2308     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2309     tcg_temp_free_i32(t);
2310 
2311     if (unlikely(Rc(ctx->opcode) != 0)) {
2312         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2313     }
2314 }
2315 
2316 /* cnttzw */
2317 static void gen_cnttzw(DisasContext *ctx)
2318 {
2319     TCGv_i32 t = tcg_temp_new_i32();
2320 
2321     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2322     tcg_gen_ctzi_i32(t, t, 32);
2323     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2324     tcg_temp_free_i32(t);
2325 
2326     if (unlikely(Rc(ctx->opcode) != 0)) {
2327         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2328     }
2329 }
2330 
2331 /* eqv & eqv. */
2332 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2333 /* extsb & extsb. */
2334 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2335 /* extsh & extsh. */
2336 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2337 /* nand & nand. */
2338 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2339 /* nor & nor. */
2340 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2341 
2342 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2343 static void gen_pause(DisasContext *ctx)
2344 {
2345     TCGv_i32 t0 = tcg_const_i32(0);
2346     tcg_gen_st_i32(t0, cpu_env,
2347                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2348     tcg_temp_free_i32(t0);
2349 
2350     /* Stop translation, this gives other CPUs a chance to run */
2351     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2352 }
2353 #endif /* defined(TARGET_PPC64) */
2354 
2355 /* or & or. */
2356 static void gen_or(DisasContext *ctx)
2357 {
2358     int rs, ra, rb;
2359 
2360     rs = rS(ctx->opcode);
2361     ra = rA(ctx->opcode);
2362     rb = rB(ctx->opcode);
2363     /* Optimisation for mr. ri case */
2364     if (rs != ra || rs != rb) {
2365         if (rs != rb) {
2366             tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2367         } else {
2368             tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2369         }
2370         if (unlikely(Rc(ctx->opcode) != 0)) {
2371             gen_set_Rc0(ctx, cpu_gpr[ra]);
2372         }
2373     } else if (unlikely(Rc(ctx->opcode) != 0)) {
2374         gen_set_Rc0(ctx, cpu_gpr[rs]);
2375 #if defined(TARGET_PPC64)
2376     } else if (rs != 0) { /* 0 is nop */
2377         int prio = 0;
2378 
2379         switch (rs) {
2380         case 1:
2381             /* Set process priority to low */
2382             prio = 2;
2383             break;
2384         case 6:
2385             /* Set process priority to medium-low */
2386             prio = 3;
2387             break;
2388         case 2:
2389             /* Set process priority to normal */
2390             prio = 4;
2391             break;
2392 #if !defined(CONFIG_USER_ONLY)
2393         case 31:
2394             if (!ctx->pr) {
2395                 /* Set process priority to very low */
2396                 prio = 1;
2397             }
2398             break;
2399         case 5:
2400             if (!ctx->pr) {
2401                 /* Set process priority to medium-hight */
2402                 prio = 5;
2403             }
2404             break;
2405         case 3:
2406             if (!ctx->pr) {
2407                 /* Set process priority to high */
2408                 prio = 6;
2409             }
2410             break;
2411         case 7:
2412             if (ctx->hv && !ctx->pr) {
2413                 /* Set process priority to very high */
2414                 prio = 7;
2415             }
2416             break;
2417 #endif
2418         default:
2419             break;
2420         }
2421         if (prio) {
2422             TCGv t0 = tcg_temp_new();
2423             gen_load_spr(t0, SPR_PPR);
2424             tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2425             tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2426             gen_store_spr(SPR_PPR, t0);
2427             tcg_temp_free(t0);
2428         }
2429 #if !defined(CONFIG_USER_ONLY)
2430         /*
2431          * Pause out of TCG otherwise spin loops with smt_low eat too
2432          * much CPU and the kernel hangs.  This applies to all
2433          * encodings other than no-op, e.g., miso(rs=26), yield(27),
2434          * mdoio(29), mdoom(30), and all currently undefined.
2435          */
2436         gen_pause(ctx);
2437 #endif
2438 #endif
2439     }
2440 }
2441 /* orc & orc. */
2442 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2443 
2444 /* xor & xor. */
2445 static void gen_xor(DisasContext *ctx)
2446 {
2447     /* Optimisation for "set to zero" case */
2448     if (rS(ctx->opcode) != rB(ctx->opcode)) {
2449         tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2450                        cpu_gpr[rB(ctx->opcode)]);
2451     } else {
2452         tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2453     }
2454     if (unlikely(Rc(ctx->opcode) != 0)) {
2455         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2456     }
2457 }
2458 
2459 /* ori */
2460 static void gen_ori(DisasContext *ctx)
2461 {
2462     target_ulong uimm = UIMM(ctx->opcode);
2463 
2464     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2465         return;
2466     }
2467     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2468 }
2469 
2470 /* oris */
2471 static void gen_oris(DisasContext *ctx)
2472 {
2473     target_ulong uimm = UIMM(ctx->opcode);
2474 
2475     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2476         /* NOP */
2477         return;
2478     }
2479     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2480                    uimm << 16);
2481 }
2482 
2483 /* xori */
2484 static void gen_xori(DisasContext *ctx)
2485 {
2486     target_ulong uimm = UIMM(ctx->opcode);
2487 
2488     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2489         /* NOP */
2490         return;
2491     }
2492     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2493 }
2494 
2495 /* xoris */
2496 static void gen_xoris(DisasContext *ctx)
2497 {
2498     target_ulong uimm = UIMM(ctx->opcode);
2499 
2500     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2501         /* NOP */
2502         return;
2503     }
2504     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2505                     uimm << 16);
2506 }
2507 
2508 /* popcntb : PowerPC 2.03 specification */
2509 static void gen_popcntb(DisasContext *ctx)
2510 {
2511     gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2512 }
2513 
2514 static void gen_popcntw(DisasContext *ctx)
2515 {
2516 #if defined(TARGET_PPC64)
2517     gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2518 #else
2519     tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2520 #endif
2521 }
2522 
2523 #if defined(TARGET_PPC64)
2524 /* popcntd: PowerPC 2.06 specification */
2525 static void gen_popcntd(DisasContext *ctx)
2526 {
2527     tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2528 }
2529 #endif
2530 
2531 /* prtyw: PowerPC 2.05 specification */
2532 static void gen_prtyw(DisasContext *ctx)
2533 {
2534     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2535     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2536     TCGv t0 = tcg_temp_new();
2537     tcg_gen_shri_tl(t0, rs, 16);
2538     tcg_gen_xor_tl(ra, rs, t0);
2539     tcg_gen_shri_tl(t0, ra, 8);
2540     tcg_gen_xor_tl(ra, ra, t0);
2541     tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2542     tcg_temp_free(t0);
2543 }
2544 
2545 #if defined(TARGET_PPC64)
2546 /* prtyd: PowerPC 2.05 specification */
2547 static void gen_prtyd(DisasContext *ctx)
2548 {
2549     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2550     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2551     TCGv t0 = tcg_temp_new();
2552     tcg_gen_shri_tl(t0, rs, 32);
2553     tcg_gen_xor_tl(ra, rs, t0);
2554     tcg_gen_shri_tl(t0, ra, 16);
2555     tcg_gen_xor_tl(ra, ra, t0);
2556     tcg_gen_shri_tl(t0, ra, 8);
2557     tcg_gen_xor_tl(ra, ra, t0);
2558     tcg_gen_andi_tl(ra, ra, 1);
2559     tcg_temp_free(t0);
2560 }
2561 #endif
2562 
2563 #if defined(TARGET_PPC64)
2564 /* bpermd */
2565 static void gen_bpermd(DisasContext *ctx)
2566 {
2567     gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2568                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2569 }
2570 #endif
2571 
2572 #if defined(TARGET_PPC64)
2573 /* extsw & extsw. */
2574 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2575 
2576 /* cntlzd */
2577 static void gen_cntlzd(DisasContext *ctx)
2578 {
2579     tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2580     if (unlikely(Rc(ctx->opcode) != 0)) {
2581         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2582     }
2583 }
2584 
2585 /* cnttzd */
2586 static void gen_cnttzd(DisasContext *ctx)
2587 {
2588     tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2589     if (unlikely(Rc(ctx->opcode) != 0)) {
2590         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2591     }
2592 }
2593 
2594 /* darn */
2595 static void gen_darn(DisasContext *ctx)
2596 {
2597     int l = L(ctx->opcode);
2598 
2599     if (l > 2) {
2600         tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2601     } else {
2602         gen_icount_io_start(ctx);
2603         if (l == 0) {
2604             gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2605         } else {
2606             /* Return 64-bit random for both CRN and RRN */
2607             gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2608         }
2609     }
2610 }
2611 #endif
2612 
2613 /***                             Integer rotate                            ***/
2614 
2615 /* rlwimi & rlwimi. */
2616 static void gen_rlwimi(DisasContext *ctx)
2617 {
2618     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2619     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2620     uint32_t sh = SH(ctx->opcode);
2621     uint32_t mb = MB(ctx->opcode);
2622     uint32_t me = ME(ctx->opcode);
2623 
2624     if (sh == (31 - me) && mb <= me) {
2625         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2626     } else {
2627         target_ulong mask;
2628         bool mask_in_32b = true;
2629         TCGv t1;
2630 
2631 #if defined(TARGET_PPC64)
2632         mb += 32;
2633         me += 32;
2634 #endif
2635         mask = MASK(mb, me);
2636 
2637 #if defined(TARGET_PPC64)
2638         if (mask > 0xffffffffu) {
2639             mask_in_32b = false;
2640         }
2641 #endif
2642         t1 = tcg_temp_new();
2643         if (mask_in_32b) {
2644             TCGv_i32 t0 = tcg_temp_new_i32();
2645             tcg_gen_trunc_tl_i32(t0, t_rs);
2646             tcg_gen_rotli_i32(t0, t0, sh);
2647             tcg_gen_extu_i32_tl(t1, t0);
2648             tcg_temp_free_i32(t0);
2649         } else {
2650 #if defined(TARGET_PPC64)
2651             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2652             tcg_gen_rotli_i64(t1, t1, sh);
2653 #else
2654             g_assert_not_reached();
2655 #endif
2656         }
2657 
2658         tcg_gen_andi_tl(t1, t1, mask);
2659         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2660         tcg_gen_or_tl(t_ra, t_ra, t1);
2661         tcg_temp_free(t1);
2662     }
2663     if (unlikely(Rc(ctx->opcode) != 0)) {
2664         gen_set_Rc0(ctx, t_ra);
2665     }
2666 }
2667 
2668 /* rlwinm & rlwinm. */
2669 static void gen_rlwinm(DisasContext *ctx)
2670 {
2671     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2672     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2673     int sh = SH(ctx->opcode);
2674     int mb = MB(ctx->opcode);
2675     int me = ME(ctx->opcode);
2676     int len = me - mb + 1;
2677     int rsh = (32 - sh) & 31;
2678 
2679     if (sh != 0 && len > 0 && me == (31 - sh)) {
2680         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2681     } else if (me == 31 && rsh + len <= 32) {
2682         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2683     } else {
2684         target_ulong mask;
2685         bool mask_in_32b = true;
2686 #if defined(TARGET_PPC64)
2687         mb += 32;
2688         me += 32;
2689 #endif
2690         mask = MASK(mb, me);
2691 #if defined(TARGET_PPC64)
2692         if (mask > 0xffffffffu) {
2693             mask_in_32b = false;
2694         }
2695 #endif
2696         if (mask_in_32b) {
2697             if (sh == 0) {
2698                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2699             } else {
2700                 TCGv_i32 t0 = tcg_temp_new_i32();
2701                 tcg_gen_trunc_tl_i32(t0, t_rs);
2702                 tcg_gen_rotli_i32(t0, t0, sh);
2703                 tcg_gen_andi_i32(t0, t0, mask);
2704                 tcg_gen_extu_i32_tl(t_ra, t0);
2705                 tcg_temp_free_i32(t0);
2706             }
2707         } else {
2708 #if defined(TARGET_PPC64)
2709             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2710             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2711             tcg_gen_andi_i64(t_ra, t_ra, mask);
2712 #else
2713             g_assert_not_reached();
2714 #endif
2715         }
2716     }
2717     if (unlikely(Rc(ctx->opcode) != 0)) {
2718         gen_set_Rc0(ctx, t_ra);
2719     }
2720 }
2721 
2722 /* rlwnm & rlwnm. */
2723 static void gen_rlwnm(DisasContext *ctx)
2724 {
2725     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2726     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2727     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2728     uint32_t mb = MB(ctx->opcode);
2729     uint32_t me = ME(ctx->opcode);
2730     target_ulong mask;
2731     bool mask_in_32b = true;
2732 
2733 #if defined(TARGET_PPC64)
2734     mb += 32;
2735     me += 32;
2736 #endif
2737     mask = MASK(mb, me);
2738 
2739 #if defined(TARGET_PPC64)
2740     if (mask > 0xffffffffu) {
2741         mask_in_32b = false;
2742     }
2743 #endif
2744     if (mask_in_32b) {
2745         TCGv_i32 t0 = tcg_temp_new_i32();
2746         TCGv_i32 t1 = tcg_temp_new_i32();
2747         tcg_gen_trunc_tl_i32(t0, t_rb);
2748         tcg_gen_trunc_tl_i32(t1, t_rs);
2749         tcg_gen_andi_i32(t0, t0, 0x1f);
2750         tcg_gen_rotl_i32(t1, t1, t0);
2751         tcg_gen_extu_i32_tl(t_ra, t1);
2752         tcg_temp_free_i32(t0);
2753         tcg_temp_free_i32(t1);
2754     } else {
2755 #if defined(TARGET_PPC64)
2756         TCGv_i64 t0 = tcg_temp_new_i64();
2757         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2758         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2759         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2760         tcg_temp_free_i64(t0);
2761 #else
2762         g_assert_not_reached();
2763 #endif
2764     }
2765 
2766     tcg_gen_andi_tl(t_ra, t_ra, mask);
2767 
2768     if (unlikely(Rc(ctx->opcode) != 0)) {
2769         gen_set_Rc0(ctx, t_ra);
2770     }
2771 }
2772 
2773 #if defined(TARGET_PPC64)
2774 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2775 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2776 {                                                                             \
2777     gen_##name(ctx, 0);                                                       \
2778 }                                                                             \
2779                                                                               \
2780 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2781 {                                                                             \
2782     gen_##name(ctx, 1);                                                       \
2783 }
2784 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2785 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2786 {                                                                             \
2787     gen_##name(ctx, 0, 0);                                                    \
2788 }                                                                             \
2789                                                                               \
2790 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2791 {                                                                             \
2792     gen_##name(ctx, 0, 1);                                                    \
2793 }                                                                             \
2794                                                                               \
2795 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2796 {                                                                             \
2797     gen_##name(ctx, 1, 0);                                                    \
2798 }                                                                             \
2799                                                                               \
2800 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2801 {                                                                             \
2802     gen_##name(ctx, 1, 1);                                                    \
2803 }
2804 
2805 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2806 {
2807     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2808     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2809     int len = me - mb + 1;
2810     int rsh = (64 - sh) & 63;
2811 
2812     if (sh != 0 && len > 0 && me == (63 - sh)) {
2813         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2814     } else if (me == 63 && rsh + len <= 64) {
2815         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2816     } else {
2817         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2818         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2819     }
2820     if (unlikely(Rc(ctx->opcode) != 0)) {
2821         gen_set_Rc0(ctx, t_ra);
2822     }
2823 }
2824 
2825 /* rldicl - rldicl. */
2826 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2827 {
2828     uint32_t sh, mb;
2829 
2830     sh = SH(ctx->opcode) | (shn << 5);
2831     mb = MB(ctx->opcode) | (mbn << 5);
2832     gen_rldinm(ctx, mb, 63, sh);
2833 }
2834 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2835 
2836 /* rldicr - rldicr. */
2837 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2838 {
2839     uint32_t sh, me;
2840 
2841     sh = SH(ctx->opcode) | (shn << 5);
2842     me = MB(ctx->opcode) | (men << 5);
2843     gen_rldinm(ctx, 0, me, sh);
2844 }
2845 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2846 
2847 /* rldic - rldic. */
2848 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2849 {
2850     uint32_t sh, mb;
2851 
2852     sh = SH(ctx->opcode) | (shn << 5);
2853     mb = MB(ctx->opcode) | (mbn << 5);
2854     gen_rldinm(ctx, mb, 63 - sh, sh);
2855 }
2856 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2857 
2858 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2859 {
2860     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2861     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2862     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2863     TCGv t0;
2864 
2865     t0 = tcg_temp_new();
2866     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2867     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2868     tcg_temp_free(t0);
2869 
2870     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2871     if (unlikely(Rc(ctx->opcode) != 0)) {
2872         gen_set_Rc0(ctx, t_ra);
2873     }
2874 }
2875 
2876 /* rldcl - rldcl. */
2877 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2878 {
2879     uint32_t mb;
2880 
2881     mb = MB(ctx->opcode) | (mbn << 5);
2882     gen_rldnm(ctx, mb, 63);
2883 }
2884 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2885 
2886 /* rldcr - rldcr. */
2887 static inline void gen_rldcr(DisasContext *ctx, int men)
2888 {
2889     uint32_t me;
2890 
2891     me = MB(ctx->opcode) | (men << 5);
2892     gen_rldnm(ctx, 0, me);
2893 }
2894 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2895 
2896 /* rldimi - rldimi. */
2897 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2898 {
2899     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2900     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2901     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2902     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2903     uint32_t me = 63 - sh;
2904 
2905     if (mb <= me) {
2906         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2907     } else {
2908         target_ulong mask = MASK(mb, me);
2909         TCGv t1 = tcg_temp_new();
2910 
2911         tcg_gen_rotli_tl(t1, t_rs, sh);
2912         tcg_gen_andi_tl(t1, t1, mask);
2913         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2914         tcg_gen_or_tl(t_ra, t_ra, t1);
2915         tcg_temp_free(t1);
2916     }
2917     if (unlikely(Rc(ctx->opcode) != 0)) {
2918         gen_set_Rc0(ctx, t_ra);
2919     }
2920 }
2921 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2922 #endif
2923 
2924 /***                             Integer shift                             ***/
2925 
2926 /* slw & slw. */
2927 static void gen_slw(DisasContext *ctx)
2928 {
2929     TCGv t0, t1;
2930 
2931     t0 = tcg_temp_new();
2932     /* AND rS with a mask that is 0 when rB >= 0x20 */
2933 #if defined(TARGET_PPC64)
2934     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2935     tcg_gen_sari_tl(t0, t0, 0x3f);
2936 #else
2937     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2938     tcg_gen_sari_tl(t0, t0, 0x1f);
2939 #endif
2940     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2941     t1 = tcg_temp_new();
2942     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2943     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2944     tcg_temp_free(t1);
2945     tcg_temp_free(t0);
2946     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2947     if (unlikely(Rc(ctx->opcode) != 0)) {
2948         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2949     }
2950 }
2951 
2952 /* sraw & sraw. */
2953 static void gen_sraw(DisasContext *ctx)
2954 {
2955     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2956                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2957     if (unlikely(Rc(ctx->opcode) != 0)) {
2958         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2959     }
2960 }
2961 
2962 /* srawi & srawi. */
2963 static void gen_srawi(DisasContext *ctx)
2964 {
2965     int sh = SH(ctx->opcode);
2966     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2967     TCGv src = cpu_gpr[rS(ctx->opcode)];
2968     if (sh == 0) {
2969         tcg_gen_ext32s_tl(dst, src);
2970         tcg_gen_movi_tl(cpu_ca, 0);
2971         if (is_isa300(ctx)) {
2972             tcg_gen_movi_tl(cpu_ca32, 0);
2973         }
2974     } else {
2975         TCGv t0;
2976         tcg_gen_ext32s_tl(dst, src);
2977         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2978         t0 = tcg_temp_new();
2979         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2980         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2981         tcg_temp_free(t0);
2982         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2983         if (is_isa300(ctx)) {
2984             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2985         }
2986         tcg_gen_sari_tl(dst, dst, sh);
2987     }
2988     if (unlikely(Rc(ctx->opcode) != 0)) {
2989         gen_set_Rc0(ctx, dst);
2990     }
2991 }
2992 
2993 /* srw & srw. */
2994 static void gen_srw(DisasContext *ctx)
2995 {
2996     TCGv t0, t1;
2997 
2998     t0 = tcg_temp_new();
2999     /* AND rS with a mask that is 0 when rB >= 0x20 */
3000 #if defined(TARGET_PPC64)
3001     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3002     tcg_gen_sari_tl(t0, t0, 0x3f);
3003 #else
3004     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3005     tcg_gen_sari_tl(t0, t0, 0x1f);
3006 #endif
3007     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3008     tcg_gen_ext32u_tl(t0, t0);
3009     t1 = tcg_temp_new();
3010     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3011     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3012     tcg_temp_free(t1);
3013     tcg_temp_free(t0);
3014     if (unlikely(Rc(ctx->opcode) != 0)) {
3015         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3016     }
3017 }
3018 
3019 #if defined(TARGET_PPC64)
3020 /* sld & sld. */
3021 static void gen_sld(DisasContext *ctx)
3022 {
3023     TCGv t0, t1;
3024 
3025     t0 = tcg_temp_new();
3026     /* AND rS with a mask that is 0 when rB >= 0x40 */
3027     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3028     tcg_gen_sari_tl(t0, t0, 0x3f);
3029     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3030     t1 = tcg_temp_new();
3031     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3032     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3033     tcg_temp_free(t1);
3034     tcg_temp_free(t0);
3035     if (unlikely(Rc(ctx->opcode) != 0)) {
3036         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3037     }
3038 }
3039 
3040 /* srad & srad. */
3041 static void gen_srad(DisasContext *ctx)
3042 {
3043     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3044                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3045     if (unlikely(Rc(ctx->opcode) != 0)) {
3046         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3047     }
3048 }
3049 /* sradi & sradi. */
3050 static inline void gen_sradi(DisasContext *ctx, int n)
3051 {
3052     int sh = SH(ctx->opcode) + (n << 5);
3053     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3054     TCGv src = cpu_gpr[rS(ctx->opcode)];
3055     if (sh == 0) {
3056         tcg_gen_mov_tl(dst, src);
3057         tcg_gen_movi_tl(cpu_ca, 0);
3058         if (is_isa300(ctx)) {
3059             tcg_gen_movi_tl(cpu_ca32, 0);
3060         }
3061     } else {
3062         TCGv t0;
3063         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3064         t0 = tcg_temp_new();
3065         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3066         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3067         tcg_temp_free(t0);
3068         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3069         if (is_isa300(ctx)) {
3070             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3071         }
3072         tcg_gen_sari_tl(dst, src, sh);
3073     }
3074     if (unlikely(Rc(ctx->opcode) != 0)) {
3075         gen_set_Rc0(ctx, dst);
3076     }
3077 }
3078 
3079 static void gen_sradi0(DisasContext *ctx)
3080 {
3081     gen_sradi(ctx, 0);
3082 }
3083 
3084 static void gen_sradi1(DisasContext *ctx)
3085 {
3086     gen_sradi(ctx, 1);
3087 }
3088 
3089 /* extswsli & extswsli. */
3090 static inline void gen_extswsli(DisasContext *ctx, int n)
3091 {
3092     int sh = SH(ctx->opcode) + (n << 5);
3093     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3094     TCGv src = cpu_gpr[rS(ctx->opcode)];
3095 
3096     tcg_gen_ext32s_tl(dst, src);
3097     tcg_gen_shli_tl(dst, dst, sh);
3098     if (unlikely(Rc(ctx->opcode) != 0)) {
3099         gen_set_Rc0(ctx, dst);
3100     }
3101 }
3102 
3103 static void gen_extswsli0(DisasContext *ctx)
3104 {
3105     gen_extswsli(ctx, 0);
3106 }
3107 
3108 static void gen_extswsli1(DisasContext *ctx)
3109 {
3110     gen_extswsli(ctx, 1);
3111 }
3112 
3113 /* srd & srd. */
3114 static void gen_srd(DisasContext *ctx)
3115 {
3116     TCGv t0, t1;
3117 
3118     t0 = tcg_temp_new();
3119     /* AND rS with a mask that is 0 when rB >= 0x40 */
3120     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3121     tcg_gen_sari_tl(t0, t0, 0x3f);
3122     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3123     t1 = tcg_temp_new();
3124     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3125     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3126     tcg_temp_free(t1);
3127     tcg_temp_free(t0);
3128     if (unlikely(Rc(ctx->opcode) != 0)) {
3129         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3130     }
3131 }
3132 #endif
3133 
3134 /***                           Addressing modes                            ***/
3135 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3136 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3137                                       target_long maskl)
3138 {
3139     target_long simm = SIMM(ctx->opcode);
3140 
3141     simm &= ~maskl;
3142     if (rA(ctx->opcode) == 0) {
3143         if (NARROW_MODE(ctx)) {
3144             simm = (uint32_t)simm;
3145         }
3146         tcg_gen_movi_tl(EA, simm);
3147     } else if (likely(simm != 0)) {
3148         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3149         if (NARROW_MODE(ctx)) {
3150             tcg_gen_ext32u_tl(EA, EA);
3151         }
3152     } else {
3153         if (NARROW_MODE(ctx)) {
3154             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3155         } else {
3156             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3157         }
3158     }
3159 }
3160 
3161 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3162 {
3163     if (rA(ctx->opcode) == 0) {
3164         if (NARROW_MODE(ctx)) {
3165             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3166         } else {
3167             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3168         }
3169     } else {
3170         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3171         if (NARROW_MODE(ctx)) {
3172             tcg_gen_ext32u_tl(EA, EA);
3173         }
3174     }
3175 }
3176 
3177 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3178 {
3179     if (rA(ctx->opcode) == 0) {
3180         tcg_gen_movi_tl(EA, 0);
3181     } else if (NARROW_MODE(ctx)) {
3182         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3183     } else {
3184         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3185     }
3186 }
3187 
3188 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3189                                 target_long val)
3190 {
3191     tcg_gen_addi_tl(ret, arg1, val);
3192     if (NARROW_MODE(ctx)) {
3193         tcg_gen_ext32u_tl(ret, ret);
3194     }
3195 }
3196 
3197 static inline void gen_align_no_le(DisasContext *ctx)
3198 {
3199     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3200                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3201 }
3202 
3203 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
3204 {
3205     TCGv ea = tcg_temp_new();
3206     if (ra) {
3207         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
3208     } else {
3209         tcg_gen_mov_tl(ea, displ);
3210     }
3211     if (NARROW_MODE(ctx)) {
3212         tcg_gen_ext32u_tl(ea, ea);
3213     }
3214     return ea;
3215 }
3216 
3217 /***                             Integer load                              ***/
3218 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3219 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3220 
3221 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
3222 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
3223                                   TCGv val,                             \
3224                                   TCGv addr)                            \
3225 {                                                                       \
3226     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
3227 }
3228 
3229 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
3230 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3231 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3232 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3233 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3234 
3235 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3236 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3237 
3238 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
3239 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
3240                                              TCGv_i64 val,          \
3241                                              TCGv addr)             \
3242 {                                                                   \
3243     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
3244 }
3245 
3246 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
3247 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3248 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3249 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3250 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
3251 
3252 #if defined(TARGET_PPC64)
3253 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
3254 #endif
3255 
3256 #define GEN_QEMU_STORE_TL(stop, op)                                     \
3257 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
3258                                   TCGv val,                             \
3259                                   TCGv addr)                            \
3260 {                                                                       \
3261     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
3262 }
3263 
3264 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
3265 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
3266 #endif
3267 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3268 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3269 
3270 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3271 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3272 
3273 #define GEN_QEMU_STORE_64(stop, op)                               \
3274 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
3275                                               TCGv_i64 val,       \
3276                                               TCGv addr)          \
3277 {                                                                 \
3278     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
3279 }
3280 
3281 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
3282 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3283 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3284 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
3285 
3286 #if defined(TARGET_PPC64)
3287 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
3288 #endif
3289 
3290 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
3291 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3292 {                                                                             \
3293     TCGv EA;                                                                  \
3294     chk(ctx);                                                                 \
3295     gen_set_access_type(ctx, ACCESS_INT);                                     \
3296     EA = tcg_temp_new();                                                      \
3297     gen_addr_reg_index(ctx, EA);                                              \
3298     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
3299     tcg_temp_free(EA);                                                        \
3300 }
3301 
3302 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
3303     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3304 
3305 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
3306     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3307 
3308 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
3309 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3310 {                                                                             \
3311     TCGv EA;                                                                  \
3312     CHK_SV(ctx);                                                              \
3313     gen_set_access_type(ctx, ACCESS_INT);                                     \
3314     EA = tcg_temp_new();                                                      \
3315     gen_addr_reg_index(ctx, EA);                                              \
3316     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3317     tcg_temp_free(EA);                                                        \
3318 }
3319 
3320 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3321 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3322 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3323 #if defined(TARGET_PPC64)
3324 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
3325 #endif
3326 
3327 #if defined(TARGET_PPC64)
3328 /* CI load/store variants */
3329 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3330 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3331 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3332 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3333 #endif
3334 
3335 /***                              Integer store                            ***/
3336 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
3337 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3338 {                                                                             \
3339     TCGv EA;                                                                  \
3340     chk(ctx);                                                                 \
3341     gen_set_access_type(ctx, ACCESS_INT);                                     \
3342     EA = tcg_temp_new();                                                      \
3343     gen_addr_reg_index(ctx, EA);                                              \
3344     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
3345     tcg_temp_free(EA);                                                        \
3346 }
3347 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
3348     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3349 
3350 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
3351     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3352 
3353 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
3354 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3355 {                                                                             \
3356     TCGv EA;                                                                  \
3357     CHK_SV(ctx);                                                              \
3358     gen_set_access_type(ctx, ACCESS_INT);                                     \
3359     EA = tcg_temp_new();                                                      \
3360     gen_addr_reg_index(ctx, EA);                                              \
3361     tcg_gen_qemu_st_tl(                                                       \
3362         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
3363     tcg_temp_free(EA);                                                        \
3364 }
3365 
3366 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3367 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3368 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3369 #if defined(TARGET_PPC64)
3370 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
3371 #endif
3372 
3373 #if defined(TARGET_PPC64)
3374 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3375 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3376 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3377 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3378 #endif
3379 /***                Integer load and store with byte reverse               ***/
3380 
3381 /* lhbrx */
3382 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3383 
3384 /* lwbrx */
3385 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3386 
3387 #if defined(TARGET_PPC64)
3388 /* ldbrx */
3389 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3390 /* stdbrx */
3391 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3392 #endif  /* TARGET_PPC64 */
3393 
3394 /* sthbrx */
3395 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3396 /* stwbrx */
3397 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3398 
3399 /***                    Integer load and store multiple                    ***/
3400 
3401 /* lmw */
3402 static void gen_lmw(DisasContext *ctx)
3403 {
3404     TCGv t0;
3405     TCGv_i32 t1;
3406 
3407     if (ctx->le_mode) {
3408         gen_align_no_le(ctx);
3409         return;
3410     }
3411     gen_set_access_type(ctx, ACCESS_INT);
3412     t0 = tcg_temp_new();
3413     t1 = tcg_const_i32(rD(ctx->opcode));
3414     gen_addr_imm_index(ctx, t0, 0);
3415     gen_helper_lmw(cpu_env, t0, t1);
3416     tcg_temp_free(t0);
3417     tcg_temp_free_i32(t1);
3418 }
3419 
3420 /* stmw */
3421 static void gen_stmw(DisasContext *ctx)
3422 {
3423     TCGv t0;
3424     TCGv_i32 t1;
3425 
3426     if (ctx->le_mode) {
3427         gen_align_no_le(ctx);
3428         return;
3429     }
3430     gen_set_access_type(ctx, ACCESS_INT);
3431     t0 = tcg_temp_new();
3432     t1 = tcg_const_i32(rS(ctx->opcode));
3433     gen_addr_imm_index(ctx, t0, 0);
3434     gen_helper_stmw(cpu_env, t0, t1);
3435     tcg_temp_free(t0);
3436     tcg_temp_free_i32(t1);
3437 }
3438 
3439 /***                    Integer load and store strings                     ***/
3440 
3441 /* lswi */
3442 /*
3443  * PowerPC32 specification says we must generate an exception if rA is
3444  * in the range of registers to be loaded.  In an other hand, IBM says
3445  * this is valid, but rA won't be loaded.  For now, I'll follow the
3446  * spec...
3447  */
3448 static void gen_lswi(DisasContext *ctx)
3449 {
3450     TCGv t0;
3451     TCGv_i32 t1, t2;
3452     int nb = NB(ctx->opcode);
3453     int start = rD(ctx->opcode);
3454     int ra = rA(ctx->opcode);
3455     int nr;
3456 
3457     if (ctx->le_mode) {
3458         gen_align_no_le(ctx);
3459         return;
3460     }
3461     if (nb == 0) {
3462         nb = 32;
3463     }
3464     nr = DIV_ROUND_UP(nb, 4);
3465     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3466         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3467         return;
3468     }
3469     gen_set_access_type(ctx, ACCESS_INT);
3470     t0 = tcg_temp_new();
3471     gen_addr_register(ctx, t0);
3472     t1 = tcg_const_i32(nb);
3473     t2 = tcg_const_i32(start);
3474     gen_helper_lsw(cpu_env, t0, t1, t2);
3475     tcg_temp_free(t0);
3476     tcg_temp_free_i32(t1);
3477     tcg_temp_free_i32(t2);
3478 }
3479 
3480 /* lswx */
3481 static void gen_lswx(DisasContext *ctx)
3482 {
3483     TCGv t0;
3484     TCGv_i32 t1, t2, t3;
3485 
3486     if (ctx->le_mode) {
3487         gen_align_no_le(ctx);
3488         return;
3489     }
3490     gen_set_access_type(ctx, ACCESS_INT);
3491     t0 = tcg_temp_new();
3492     gen_addr_reg_index(ctx, t0);
3493     t1 = tcg_const_i32(rD(ctx->opcode));
3494     t2 = tcg_const_i32(rA(ctx->opcode));
3495     t3 = tcg_const_i32(rB(ctx->opcode));
3496     gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3497     tcg_temp_free(t0);
3498     tcg_temp_free_i32(t1);
3499     tcg_temp_free_i32(t2);
3500     tcg_temp_free_i32(t3);
3501 }
3502 
3503 /* stswi */
3504 static void gen_stswi(DisasContext *ctx)
3505 {
3506     TCGv t0;
3507     TCGv_i32 t1, t2;
3508     int nb = NB(ctx->opcode);
3509 
3510     if (ctx->le_mode) {
3511         gen_align_no_le(ctx);
3512         return;
3513     }
3514     gen_set_access_type(ctx, ACCESS_INT);
3515     t0 = tcg_temp_new();
3516     gen_addr_register(ctx, t0);
3517     if (nb == 0) {
3518         nb = 32;
3519     }
3520     t1 = tcg_const_i32(nb);
3521     t2 = tcg_const_i32(rS(ctx->opcode));
3522     gen_helper_stsw(cpu_env, t0, t1, t2);
3523     tcg_temp_free(t0);
3524     tcg_temp_free_i32(t1);
3525     tcg_temp_free_i32(t2);
3526 }
3527 
3528 /* stswx */
3529 static void gen_stswx(DisasContext *ctx)
3530 {
3531     TCGv t0;
3532     TCGv_i32 t1, t2;
3533 
3534     if (ctx->le_mode) {
3535         gen_align_no_le(ctx);
3536         return;
3537     }
3538     gen_set_access_type(ctx, ACCESS_INT);
3539     t0 = tcg_temp_new();
3540     gen_addr_reg_index(ctx, t0);
3541     t1 = tcg_temp_new_i32();
3542     tcg_gen_trunc_tl_i32(t1, cpu_xer);
3543     tcg_gen_andi_i32(t1, t1, 0x7F);
3544     t2 = tcg_const_i32(rS(ctx->opcode));
3545     gen_helper_stsw(cpu_env, t0, t1, t2);
3546     tcg_temp_free(t0);
3547     tcg_temp_free_i32(t1);
3548     tcg_temp_free_i32(t2);
3549 }
3550 
3551 /***                        Memory synchronisation                         ***/
3552 /* eieio */
3553 static void gen_eieio(DisasContext *ctx)
3554 {
3555     TCGBar bar = TCG_MO_ALL;
3556 
3557     /*
3558      * eieio has complex semanitcs. It provides memory ordering between
3559      * operations in the set:
3560      * - loads from CI memory.
3561      * - stores to CI memory.
3562      * - stores to WT memory.
3563      *
3564      * It separately also orders memory for operations in the set:
3565      * - stores to cacheble memory.
3566      *
3567      * It also serializes instructions:
3568      * - dcbt and dcbst.
3569      *
3570      * It separately serializes:
3571      * - tlbie and tlbsync.
3572      *
3573      * And separately serializes:
3574      * - slbieg, slbiag, and slbsync.
3575      *
3576      * The end result is that CI memory ordering requires TCG_MO_ALL
3577      * and it is not possible to special-case more relaxed ordering for
3578      * cacheable accesses. TCG_BAR_SC is required to provide this
3579      * serialization.
3580      */
3581 
3582     /*
3583      * POWER9 has a eieio instruction variant using bit 6 as a hint to
3584      * tell the CPU it is a store-forwarding barrier.
3585      */
3586     if (ctx->opcode & 0x2000000) {
3587         /*
3588          * ISA says that "Reserved fields in instructions are ignored
3589          * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3590          * as this is not an instruction software should be using,
3591          * complain to the user.
3592          */
3593         if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3594             qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3595                           TARGET_FMT_lx "\n", ctx->cia);
3596         } else {
3597             bar = TCG_MO_ST_LD;
3598         }
3599     }
3600 
3601     tcg_gen_mb(bar | TCG_BAR_SC);
3602 }
3603 
3604 #if !defined(CONFIG_USER_ONLY)
3605 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3606 {
3607     TCGv_i32 t;
3608     TCGLabel *l;
3609 
3610     if (!ctx->lazy_tlb_flush) {
3611         return;
3612     }
3613     l = gen_new_label();
3614     t = tcg_temp_new_i32();
3615     tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3616     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3617     if (global) {
3618         gen_helper_check_tlb_flush_global(cpu_env);
3619     } else {
3620         gen_helper_check_tlb_flush_local(cpu_env);
3621     }
3622     gen_set_label(l);
3623     tcg_temp_free_i32(t);
3624 }
3625 #else
3626 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3627 #endif
3628 
3629 /* isync */
3630 static void gen_isync(DisasContext *ctx)
3631 {
3632     /*
3633      * We need to check for a pending TLB flush. This can only happen in
3634      * kernel mode however so check MSR_PR
3635      */
3636     if (!ctx->pr) {
3637         gen_check_tlb_flush(ctx, false);
3638     }
3639     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3640     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3641 }
3642 
3643 #define MEMOP_GET_SIZE(x)  (1 << ((x) & MO_SIZE))
3644 
3645 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3646 {
3647     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3648     TCGv t0 = tcg_temp_new();
3649 
3650     gen_set_access_type(ctx, ACCESS_RES);
3651     gen_addr_reg_index(ctx, t0);
3652     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3653     tcg_gen_mov_tl(cpu_reserve, t0);
3654     tcg_gen_mov_tl(cpu_reserve_val, gpr);
3655     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3656     tcg_temp_free(t0);
3657 }
3658 
3659 #define LARX(name, memop)                  \
3660 static void gen_##name(DisasContext *ctx)  \
3661 {                                          \
3662     gen_load_locked(ctx, memop);           \
3663 }
3664 
3665 /* lwarx */
3666 LARX(lbarx, DEF_MEMOP(MO_UB))
3667 LARX(lharx, DEF_MEMOP(MO_UW))
3668 LARX(lwarx, DEF_MEMOP(MO_UL))
3669 
3670 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
3671                                       TCGv EA, TCGCond cond, int addend)
3672 {
3673     TCGv t = tcg_temp_new();
3674     TCGv t2 = tcg_temp_new();
3675     TCGv u = tcg_temp_new();
3676 
3677     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3678     tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
3679     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3680     tcg_gen_addi_tl(u, t, addend);
3681 
3682     /* E.g. for fetch and increment bounded... */
3683     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3684     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3685     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3686 
3687     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3688     tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
3689     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
3690 
3691     tcg_temp_free(t);
3692     tcg_temp_free(t2);
3693     tcg_temp_free(u);
3694 }
3695 
3696 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3697 {
3698     uint32_t gpr_FC = FC(ctx->opcode);
3699     TCGv EA = tcg_temp_new();
3700     int rt = rD(ctx->opcode);
3701     bool need_serial;
3702     TCGv src, dst;
3703 
3704     gen_addr_register(ctx, EA);
3705     dst = cpu_gpr[rt];
3706     src = cpu_gpr[(rt + 1) & 31];
3707 
3708     need_serial = false;
3709     memop |= MO_ALIGN;
3710     switch (gpr_FC) {
3711     case 0: /* Fetch and add */
3712         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3713         break;
3714     case 1: /* Fetch and xor */
3715         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3716         break;
3717     case 2: /* Fetch and or */
3718         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3719         break;
3720     case 3: /* Fetch and 'and' */
3721         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3722         break;
3723     case 4:  /* Fetch and max unsigned */
3724         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3725         break;
3726     case 5:  /* Fetch and max signed */
3727         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3728         break;
3729     case 6:  /* Fetch and min unsigned */
3730         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3731         break;
3732     case 7:  /* Fetch and min signed */
3733         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3734         break;
3735     case 8: /* Swap */
3736         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3737         break;
3738 
3739     case 16: /* Compare and swap not equal */
3740         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3741             need_serial = true;
3742         } else {
3743             TCGv t0 = tcg_temp_new();
3744             TCGv t1 = tcg_temp_new();
3745 
3746             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3747             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3748                 tcg_gen_mov_tl(t1, src);
3749             } else {
3750                 tcg_gen_ext32u_tl(t1, src);
3751             }
3752             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3753                                cpu_gpr[(rt + 2) & 31], t0);
3754             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3755             tcg_gen_mov_tl(dst, t0);
3756 
3757             tcg_temp_free(t0);
3758             tcg_temp_free(t1);
3759         }
3760         break;
3761 
3762     case 24: /* Fetch and increment bounded */
3763         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3764             need_serial = true;
3765         } else {
3766             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3767         }
3768         break;
3769     case 25: /* Fetch and increment equal */
3770         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3771             need_serial = true;
3772         } else {
3773             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3774         }
3775         break;
3776     case 28: /* Fetch and decrement bounded */
3777         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3778             need_serial = true;
3779         } else {
3780             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3781         }
3782         break;
3783 
3784     default:
3785         /* invoke data storage error handler */
3786         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3787     }
3788     tcg_temp_free(EA);
3789 
3790     if (need_serial) {
3791         /* Restart with exclusive lock.  */
3792         gen_helper_exit_atomic(cpu_env);
3793         ctx->base.is_jmp = DISAS_NORETURN;
3794     }
3795 }
3796 
3797 static void gen_lwat(DisasContext *ctx)
3798 {
3799     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3800 }
3801 
3802 #ifdef TARGET_PPC64
3803 static void gen_ldat(DisasContext *ctx)
3804 {
3805     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3806 }
3807 #endif
3808 
3809 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3810 {
3811     uint32_t gpr_FC = FC(ctx->opcode);
3812     TCGv EA = tcg_temp_new();
3813     TCGv src, discard;
3814 
3815     gen_addr_register(ctx, EA);
3816     src = cpu_gpr[rD(ctx->opcode)];
3817     discard = tcg_temp_new();
3818 
3819     memop |= MO_ALIGN;
3820     switch (gpr_FC) {
3821     case 0: /* add and Store */
3822         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3823         break;
3824     case 1: /* xor and Store */
3825         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3826         break;
3827     case 2: /* Or and Store */
3828         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3829         break;
3830     case 3: /* 'and' and Store */
3831         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3832         break;
3833     case 4:  /* Store max unsigned */
3834         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3835         break;
3836     case 5:  /* Store max signed */
3837         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3838         break;
3839     case 6:  /* Store min unsigned */
3840         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3841         break;
3842     case 7:  /* Store min signed */
3843         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3844         break;
3845     case 24: /* Store twin  */
3846         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3847             /* Restart with exclusive lock.  */
3848             gen_helper_exit_atomic(cpu_env);
3849             ctx->base.is_jmp = DISAS_NORETURN;
3850         } else {
3851             TCGv t = tcg_temp_new();
3852             TCGv t2 = tcg_temp_new();
3853             TCGv s = tcg_temp_new();
3854             TCGv s2 = tcg_temp_new();
3855             TCGv ea_plus_s = tcg_temp_new();
3856 
3857             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3858             tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
3859             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3860             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3861             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3862             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3863             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3864 
3865             tcg_temp_free(ea_plus_s);
3866             tcg_temp_free(s2);
3867             tcg_temp_free(s);
3868             tcg_temp_free(t2);
3869             tcg_temp_free(t);
3870         }
3871         break;
3872     default:
3873         /* invoke data storage error handler */
3874         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3875     }
3876     tcg_temp_free(discard);
3877     tcg_temp_free(EA);
3878 }
3879 
3880 static void gen_stwat(DisasContext *ctx)
3881 {
3882     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3883 }
3884 
3885 #ifdef TARGET_PPC64
3886 static void gen_stdat(DisasContext *ctx)
3887 {
3888     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3889 }
3890 #endif
3891 
3892 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3893 {
3894     TCGLabel *l1 = gen_new_label();
3895     TCGLabel *l2 = gen_new_label();
3896     TCGv t0 = tcg_temp_new();
3897     int reg = rS(ctx->opcode);
3898 
3899     gen_set_access_type(ctx, ACCESS_RES);
3900     gen_addr_reg_index(ctx, t0);
3901     tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3902     tcg_temp_free(t0);
3903 
3904     t0 = tcg_temp_new();
3905     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3906                               cpu_gpr[reg], ctx->mem_idx,
3907                               DEF_MEMOP(memop) | MO_ALIGN);
3908     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3909     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3910     tcg_gen_or_tl(t0, t0, cpu_so);
3911     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3912     tcg_temp_free(t0);
3913     tcg_gen_br(l2);
3914 
3915     gen_set_label(l1);
3916 
3917     /*
3918      * Address mismatch implies failure.  But we still need to provide
3919      * the memory barrier semantics of the instruction.
3920      */
3921     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3922     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3923 
3924     gen_set_label(l2);
3925     tcg_gen_movi_tl(cpu_reserve, -1);
3926 }
3927 
3928 #define STCX(name, memop)                  \
3929 static void gen_##name(DisasContext *ctx)  \
3930 {                                          \
3931     gen_conditional_store(ctx, memop);     \
3932 }
3933 
3934 STCX(stbcx_, DEF_MEMOP(MO_UB))
3935 STCX(sthcx_, DEF_MEMOP(MO_UW))
3936 STCX(stwcx_, DEF_MEMOP(MO_UL))
3937 
3938 #if defined(TARGET_PPC64)
3939 /* ldarx */
3940 LARX(ldarx, DEF_MEMOP(MO_UQ))
3941 /* stdcx. */
3942 STCX(stdcx_, DEF_MEMOP(MO_UQ))
3943 
3944 /* lqarx */
3945 static void gen_lqarx(DisasContext *ctx)
3946 {
3947     int rd = rD(ctx->opcode);
3948     TCGv EA, hi, lo;
3949 
3950     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3951                  (rd == rB(ctx->opcode)))) {
3952         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3953         return;
3954     }
3955 
3956     gen_set_access_type(ctx, ACCESS_RES);
3957     EA = tcg_temp_new();
3958     gen_addr_reg_index(ctx, EA);
3959 
3960     /* Note that the low part is always in RD+1, even in LE mode.  */
3961     lo = cpu_gpr[rd + 1];
3962     hi = cpu_gpr[rd];
3963 
3964     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3965         if (HAVE_ATOMIC128) {
3966             TCGv_i32 oi = tcg_temp_new_i32();
3967             if (ctx->le_mode) {
3968                 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN,
3969                                                     ctx->mem_idx));
3970                 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3971             } else {
3972                 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN,
3973                                                     ctx->mem_idx));
3974                 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3975             }
3976             tcg_temp_free_i32(oi);
3977             tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3978         } else {
3979             /* Restart with exclusive lock.  */
3980             gen_helper_exit_atomic(cpu_env);
3981             ctx->base.is_jmp = DISAS_NORETURN;
3982             tcg_temp_free(EA);
3983             return;
3984         }
3985     } else if (ctx->le_mode) {
3986         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEUQ | MO_ALIGN_16);
3987         tcg_gen_mov_tl(cpu_reserve, EA);
3988         gen_addr_add(ctx, EA, EA, 8);
3989         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEUQ);
3990     } else {
3991         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEUQ | MO_ALIGN_16);
3992         tcg_gen_mov_tl(cpu_reserve, EA);
3993         gen_addr_add(ctx, EA, EA, 8);
3994         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEUQ);
3995     }
3996     tcg_temp_free(EA);
3997 
3998     tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
3999     tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
4000 }
4001 
4002 /* stqcx. */
4003 static void gen_stqcx_(DisasContext *ctx)
4004 {
4005     TCGLabel *lab_fail, *lab_over;
4006     int rs = rS(ctx->opcode);
4007     TCGv EA, t0, t1;
4008     TCGv_i128 cmp, val;
4009 
4010     if (unlikely(rs & 1)) {
4011         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4012         return;
4013     }
4014 
4015     lab_fail = gen_new_label();
4016     lab_over = gen_new_label();
4017 
4018     gen_set_access_type(ctx, ACCESS_RES);
4019     EA = tcg_temp_new();
4020     gen_addr_reg_index(ctx, EA);
4021 
4022     tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4023     tcg_temp_free(EA);
4024 
4025     cmp = tcg_temp_new_i128();
4026     val = tcg_temp_new_i128();
4027 
4028     tcg_gen_concat_i64_i128(cmp, cpu_reserve_val2, cpu_reserve_val);
4029 
4030     /* Note that the low part is always in RS+1, even in LE mode.  */
4031     tcg_gen_concat_i64_i128(val, cpu_gpr[rs + 1], cpu_gpr[rs]);
4032 
4033     tcg_gen_atomic_cmpxchg_i128(val, cpu_reserve, cmp, val, ctx->mem_idx,
4034                                 DEF_MEMOP(MO_128 | MO_ALIGN));
4035     tcg_temp_free_i128(cmp);
4036 
4037     t0 = tcg_temp_new();
4038     t1 = tcg_temp_new();
4039     tcg_gen_extr_i128_i64(t1, t0, val);
4040     tcg_temp_free_i128(val);
4041 
4042     tcg_gen_xor_tl(t1, t1, cpu_reserve_val2);
4043     tcg_gen_xor_tl(t0, t0, cpu_reserve_val);
4044     tcg_gen_or_tl(t0, t0, t1);
4045     tcg_temp_free(t1);
4046 
4047     tcg_gen_setcondi_tl(TCG_COND_EQ, t0, t0, 0);
4048     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
4049     tcg_gen_or_tl(t0, t0, cpu_so);
4050     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
4051     tcg_temp_free(t0);
4052 
4053     tcg_gen_br(lab_over);
4054     gen_set_label(lab_fail);
4055 
4056     /*
4057      * Address mismatch implies failure.  But we still need to provide
4058      * the memory barrier semantics of the instruction.
4059      */
4060     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
4061     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4062 
4063     gen_set_label(lab_over);
4064     tcg_gen_movi_tl(cpu_reserve, -1);
4065 }
4066 #endif /* defined(TARGET_PPC64) */
4067 
4068 /* sync */
4069 static void gen_sync(DisasContext *ctx)
4070 {
4071     TCGBar bar = TCG_MO_ALL;
4072     uint32_t l = (ctx->opcode >> 21) & 3;
4073 
4074     if ((l == 1) && (ctx->insns_flags2 & PPC2_MEM_LWSYNC)) {
4075         bar = TCG_MO_LD_LD | TCG_MO_LD_ST | TCG_MO_ST_ST;
4076     }
4077 
4078     /*
4079      * We may need to check for a pending TLB flush.
4080      *
4081      * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4082      *
4083      * Additionally, this can only happen in kernel mode however so
4084      * check MSR_PR as well.
4085      */
4086     if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4087         gen_check_tlb_flush(ctx, true);
4088     }
4089 
4090     tcg_gen_mb(bar | TCG_BAR_SC);
4091 }
4092 
4093 /* wait */
4094 static void gen_wait(DisasContext *ctx)
4095 {
4096     uint32_t wc;
4097 
4098     if (ctx->insns_flags & PPC_WAIT) {
4099         /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
4100 
4101         if (ctx->insns_flags2 & PPC2_PM_ISA206) {
4102             /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
4103             wc = WC(ctx->opcode);
4104         } else {
4105             wc = 0;
4106         }
4107 
4108     } else if (ctx->insns_flags2 & PPC2_ISA300) {
4109         /* v3.0 defines a new 'wait' encoding. */
4110         wc = WC(ctx->opcode);
4111         if (ctx->insns_flags2 & PPC2_ISA310) {
4112             uint32_t pl = PL(ctx->opcode);
4113 
4114             /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
4115             if (wc == 3) {
4116                 gen_invalid(ctx);
4117                 return;
4118             }
4119 
4120             /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
4121             if (pl > 0 && wc != 2) {
4122                 gen_invalid(ctx);
4123                 return;
4124             }
4125 
4126         } else { /* ISA300 */
4127             /* WC 1-3 are reserved */
4128             if (wc > 0) {
4129                 gen_invalid(ctx);
4130                 return;
4131             }
4132         }
4133 
4134     } else {
4135         warn_report("wait instruction decoded with wrong ISA flags.");
4136         gen_invalid(ctx);
4137         return;
4138     }
4139 
4140     /*
4141      * wait without WC field or with WC=0 waits for an exception / interrupt
4142      * to occur.
4143      */
4144     if (wc == 0) {
4145         TCGv_i32 t0 = tcg_const_i32(1);
4146         tcg_gen_st_i32(t0, cpu_env,
4147                        -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4148         tcg_temp_free_i32(t0);
4149         /* Stop translation, as the CPU is supposed to sleep from now */
4150         gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4151     }
4152 
4153     /*
4154      * Other wait types must not just wait until an exception occurs because
4155      * ignoring their other wake-up conditions could cause a hang.
4156      *
4157      * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
4158      * no-ops.
4159      *
4160      * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
4161      *
4162      * wc=2 waits for an implementation-specific condition, such could be
4163      * always true, so it can be implemented as a no-op.
4164      *
4165      * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
4166      *
4167      * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
4168      * Reservation-loss may have implementation-specific conditions, so it
4169      * can be implemented as a no-op.
4170      *
4171      * wc=2 waits for an exception or an amount of time to pass. This
4172      * amount is implementation-specific so it can be implemented as a
4173      * no-op.
4174      *
4175      * ISA v3.1 allows for execution to resume "in the rare case of
4176      * an implementation-dependent event", so in any case software must
4177      * not depend on the architected resumption condition to become
4178      * true, so no-op implementations should be architecturally correct
4179      * (if suboptimal).
4180      */
4181 }
4182 
4183 #if defined(TARGET_PPC64)
4184 static void gen_doze(DisasContext *ctx)
4185 {
4186 #if defined(CONFIG_USER_ONLY)
4187     GEN_PRIV(ctx);
4188 #else
4189     TCGv_i32 t;
4190 
4191     CHK_HV(ctx);
4192     t = tcg_const_i32(PPC_PM_DOZE);
4193     gen_helper_pminsn(cpu_env, t);
4194     tcg_temp_free_i32(t);
4195     /* Stop translation, as the CPU is supposed to sleep from now */
4196     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4197 #endif /* defined(CONFIG_USER_ONLY) */
4198 }
4199 
4200 static void gen_nap(DisasContext *ctx)
4201 {
4202 #if defined(CONFIG_USER_ONLY)
4203     GEN_PRIV(ctx);
4204 #else
4205     TCGv_i32 t;
4206 
4207     CHK_HV(ctx);
4208     t = tcg_const_i32(PPC_PM_NAP);
4209     gen_helper_pminsn(cpu_env, t);
4210     tcg_temp_free_i32(t);
4211     /* Stop translation, as the CPU is supposed to sleep from now */
4212     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4213 #endif /* defined(CONFIG_USER_ONLY) */
4214 }
4215 
4216 static void gen_stop(DisasContext *ctx)
4217 {
4218 #if defined(CONFIG_USER_ONLY)
4219     GEN_PRIV(ctx);
4220 #else
4221     TCGv_i32 t;
4222 
4223     CHK_HV(ctx);
4224     t = tcg_const_i32(PPC_PM_STOP);
4225     gen_helper_pminsn(cpu_env, t);
4226     tcg_temp_free_i32(t);
4227     /* Stop translation, as the CPU is supposed to sleep from now */
4228     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4229 #endif /* defined(CONFIG_USER_ONLY) */
4230 }
4231 
4232 static void gen_sleep(DisasContext *ctx)
4233 {
4234 #if defined(CONFIG_USER_ONLY)
4235     GEN_PRIV(ctx);
4236 #else
4237     TCGv_i32 t;
4238 
4239     CHK_HV(ctx);
4240     t = tcg_const_i32(PPC_PM_SLEEP);
4241     gen_helper_pminsn(cpu_env, t);
4242     tcg_temp_free_i32(t);
4243     /* Stop translation, as the CPU is supposed to sleep from now */
4244     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4245 #endif /* defined(CONFIG_USER_ONLY) */
4246 }
4247 
4248 static void gen_rvwinkle(DisasContext *ctx)
4249 {
4250 #if defined(CONFIG_USER_ONLY)
4251     GEN_PRIV(ctx);
4252 #else
4253     TCGv_i32 t;
4254 
4255     CHK_HV(ctx);
4256     t = tcg_const_i32(PPC_PM_RVWINKLE);
4257     gen_helper_pminsn(cpu_env, t);
4258     tcg_temp_free_i32(t);
4259     /* Stop translation, as the CPU is supposed to sleep from now */
4260     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4261 #endif /* defined(CONFIG_USER_ONLY) */
4262 }
4263 #endif /* #if defined(TARGET_PPC64) */
4264 
4265 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4266 {
4267 #if defined(TARGET_PPC64)
4268     if (ctx->has_cfar) {
4269         tcg_gen_movi_tl(cpu_cfar, nip);
4270     }
4271 #endif
4272 }
4273 
4274 #if defined(TARGET_PPC64)
4275 static void pmu_count_insns(DisasContext *ctx)
4276 {
4277     /*
4278      * Do not bother calling the helper if the PMU isn't counting
4279      * instructions.
4280      */
4281     if (!ctx->pmu_insn_cnt) {
4282         return;
4283     }
4284 
4285  #if !defined(CONFIG_USER_ONLY)
4286     TCGLabel *l;
4287     TCGv t0;
4288 
4289     /*
4290      * The PMU insns_inc() helper stops the internal PMU timer if a
4291      * counter overflows happens. In that case, if the guest is
4292      * running with icount and we do not handle it beforehand,
4293      * the helper can trigger a 'bad icount read'.
4294      */
4295     gen_icount_io_start(ctx);
4296 
4297     /* Avoid helper calls when only PMC5-6 are enabled. */
4298     if (!ctx->pmc_other) {
4299         l = gen_new_label();
4300         t0 = tcg_temp_new();
4301 
4302         gen_load_spr(t0, SPR_POWER_PMC5);
4303         tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4304         gen_store_spr(SPR_POWER_PMC5, t0);
4305         /* Check for overflow, if it's enabled */
4306         if (ctx->mmcr0_pmcjce) {
4307             tcg_gen_brcondi_tl(TCG_COND_LT, t0, PMC_COUNTER_NEGATIVE_VAL, l);
4308             gen_helper_handle_pmc5_overflow(cpu_env);
4309         }
4310 
4311         gen_set_label(l);
4312         tcg_temp_free(t0);
4313     } else {
4314         gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns));
4315     }
4316   #else
4317     /*
4318      * User mode can read (but not write) PMC5 and start/stop
4319      * the PMU via MMCR0_FC. In this case just increment
4320      * PMC5 with base.num_insns.
4321      */
4322     TCGv t0 = tcg_temp_new();
4323 
4324     gen_load_spr(t0, SPR_POWER_PMC5);
4325     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4326     gen_store_spr(SPR_POWER_PMC5, t0);
4327 
4328     tcg_temp_free(t0);
4329   #endif /* #if !defined(CONFIG_USER_ONLY) */
4330 }
4331 #else
4332 static void pmu_count_insns(DisasContext *ctx)
4333 {
4334     return;
4335 }
4336 #endif /* #if defined(TARGET_PPC64) */
4337 
4338 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4339 {
4340     return translator_use_goto_tb(&ctx->base, dest);
4341 }
4342 
4343 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4344 {
4345     if (unlikely(ctx->singlestep_enabled)) {
4346         gen_debug_exception(ctx);
4347     } else {
4348         /*
4349          * tcg_gen_lookup_and_goto_ptr will exit the TB if
4350          * CF_NO_GOTO_PTR is set. Count insns now.
4351          */
4352         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
4353             pmu_count_insns(ctx);
4354         }
4355 
4356         tcg_gen_lookup_and_goto_ptr();
4357     }
4358 }
4359 
4360 /***                                Branch                                 ***/
4361 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4362 {
4363     if (NARROW_MODE(ctx)) {
4364         dest = (uint32_t) dest;
4365     }
4366     if (use_goto_tb(ctx, dest)) {
4367         pmu_count_insns(ctx);
4368         tcg_gen_goto_tb(n);
4369         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4370         tcg_gen_exit_tb(ctx->base.tb, n);
4371     } else {
4372         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4373         gen_lookup_and_goto_ptr(ctx);
4374     }
4375 }
4376 
4377 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4378 {
4379     if (NARROW_MODE(ctx)) {
4380         nip = (uint32_t)nip;
4381     }
4382     tcg_gen_movi_tl(cpu_lr, nip);
4383 }
4384 
4385 /* b ba bl bla */
4386 static void gen_b(DisasContext *ctx)
4387 {
4388     target_ulong li, target;
4389 
4390     /* sign extend LI */
4391     li = LI(ctx->opcode);
4392     li = (li ^ 0x02000000) - 0x02000000;
4393     if (likely(AA(ctx->opcode) == 0)) {
4394         target = ctx->cia + li;
4395     } else {
4396         target = li;
4397     }
4398     if (LK(ctx->opcode)) {
4399         gen_setlr(ctx, ctx->base.pc_next);
4400     }
4401     gen_update_cfar(ctx, ctx->cia);
4402     gen_goto_tb(ctx, 0, target);
4403     ctx->base.is_jmp = DISAS_NORETURN;
4404 }
4405 
4406 #define BCOND_IM  0
4407 #define BCOND_LR  1
4408 #define BCOND_CTR 2
4409 #define BCOND_TAR 3
4410 
4411 static void gen_bcond(DisasContext *ctx, int type)
4412 {
4413     uint32_t bo = BO(ctx->opcode);
4414     TCGLabel *l1;
4415     TCGv target;
4416 
4417     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4418         target = tcg_temp_new();
4419         if (type == BCOND_CTR) {
4420             tcg_gen_mov_tl(target, cpu_ctr);
4421         } else if (type == BCOND_TAR) {
4422             gen_load_spr(target, SPR_TAR);
4423         } else {
4424             tcg_gen_mov_tl(target, cpu_lr);
4425         }
4426     } else {
4427         target = NULL;
4428     }
4429     if (LK(ctx->opcode)) {
4430         gen_setlr(ctx, ctx->base.pc_next);
4431     }
4432     l1 = gen_new_label();
4433     if ((bo & 0x4) == 0) {
4434         /* Decrement and test CTR */
4435         TCGv temp = tcg_temp_new();
4436 
4437         if (type == BCOND_CTR) {
4438             /*
4439              * All ISAs up to v3 describe this form of bcctr as invalid but
4440              * some processors, ie. 64-bit server processors compliant with
4441              * arch 2.x, do implement a "test and decrement" logic instead,
4442              * as described in their respective UMs. This logic involves CTR
4443              * to act as both the branch target and a counter, which makes
4444              * it basically useless and thus never used in real code.
4445              *
4446              * This form was hence chosen to trigger extra micro-architectural
4447              * side-effect on real HW needed for the Spectre v2 workaround.
4448              * It is up to guests that implement such workaround, ie. linux, to
4449              * use this form in a way it just triggers the side-effect without
4450              * doing anything else harmful.
4451              */
4452             if (unlikely(!is_book3s_arch2x(ctx))) {
4453                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4454                 tcg_temp_free(temp);
4455                 tcg_temp_free(target);
4456                 return;
4457             }
4458 
4459             if (NARROW_MODE(ctx)) {
4460                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4461             } else {
4462                 tcg_gen_mov_tl(temp, cpu_ctr);
4463             }
4464             if (bo & 0x2) {
4465                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4466             } else {
4467                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4468             }
4469             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4470         } else {
4471             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4472             if (NARROW_MODE(ctx)) {
4473                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4474             } else {
4475                 tcg_gen_mov_tl(temp, cpu_ctr);
4476             }
4477             if (bo & 0x2) {
4478                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4479             } else {
4480                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4481             }
4482         }
4483         tcg_temp_free(temp);
4484     }
4485     if ((bo & 0x10) == 0) {
4486         /* Test CR */
4487         uint32_t bi = BI(ctx->opcode);
4488         uint32_t mask = 0x08 >> (bi & 0x03);
4489         TCGv_i32 temp = tcg_temp_new_i32();
4490 
4491         if (bo & 0x8) {
4492             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4493             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4494         } else {
4495             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4496             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4497         }
4498         tcg_temp_free_i32(temp);
4499     }
4500     gen_update_cfar(ctx, ctx->cia);
4501     if (type == BCOND_IM) {
4502         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4503         if (likely(AA(ctx->opcode) == 0)) {
4504             gen_goto_tb(ctx, 0, ctx->cia + li);
4505         } else {
4506             gen_goto_tb(ctx, 0, li);
4507         }
4508     } else {
4509         if (NARROW_MODE(ctx)) {
4510             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4511         } else {
4512             tcg_gen_andi_tl(cpu_nip, target, ~3);
4513         }
4514         gen_lookup_and_goto_ptr(ctx);
4515         tcg_temp_free(target);
4516     }
4517     if ((bo & 0x14) != 0x14) {
4518         /* fallthrough case */
4519         gen_set_label(l1);
4520         gen_goto_tb(ctx, 1, ctx->base.pc_next);
4521     }
4522     ctx->base.is_jmp = DISAS_NORETURN;
4523 }
4524 
4525 static void gen_bc(DisasContext *ctx)
4526 {
4527     gen_bcond(ctx, BCOND_IM);
4528 }
4529 
4530 static void gen_bcctr(DisasContext *ctx)
4531 {
4532     gen_bcond(ctx, BCOND_CTR);
4533 }
4534 
4535 static void gen_bclr(DisasContext *ctx)
4536 {
4537     gen_bcond(ctx, BCOND_LR);
4538 }
4539 
4540 static void gen_bctar(DisasContext *ctx)
4541 {
4542     gen_bcond(ctx, BCOND_TAR);
4543 }
4544 
4545 /***                      Condition register logical                       ***/
4546 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
4547 static void glue(gen_, name)(DisasContext *ctx)                               \
4548 {                                                                             \
4549     uint8_t bitmask;                                                          \
4550     int sh;                                                                   \
4551     TCGv_i32 t0, t1;                                                          \
4552     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
4553     t0 = tcg_temp_new_i32();                                                  \
4554     if (sh > 0)                                                               \
4555         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
4556     else if (sh < 0)                                                          \
4557         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
4558     else                                                                      \
4559         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
4560     t1 = tcg_temp_new_i32();                                                  \
4561     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
4562     if (sh > 0)                                                               \
4563         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
4564     else if (sh < 0)                                                          \
4565         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
4566     else                                                                      \
4567         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
4568     tcg_op(t0, t0, t1);                                                       \
4569     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
4570     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
4571     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
4572     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
4573     tcg_temp_free_i32(t0);                                                    \
4574     tcg_temp_free_i32(t1);                                                    \
4575 }
4576 
4577 /* crand */
4578 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4579 /* crandc */
4580 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4581 /* creqv */
4582 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4583 /* crnand */
4584 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4585 /* crnor */
4586 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4587 /* cror */
4588 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4589 /* crorc */
4590 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4591 /* crxor */
4592 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4593 
4594 /* mcrf */
4595 static void gen_mcrf(DisasContext *ctx)
4596 {
4597     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4598 }
4599 
4600 /***                           System linkage                              ***/
4601 
4602 /* rfi (supervisor only) */
4603 static void gen_rfi(DisasContext *ctx)
4604 {
4605 #if defined(CONFIG_USER_ONLY)
4606     GEN_PRIV(ctx);
4607 #else
4608     /*
4609      * This instruction doesn't exist anymore on 64-bit server
4610      * processors compliant with arch 2.x
4611      */
4612     if (is_book3s_arch2x(ctx)) {
4613         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4614         return;
4615     }
4616     /* Restore CPU state */
4617     CHK_SV(ctx);
4618     gen_icount_io_start(ctx);
4619     gen_update_cfar(ctx, ctx->cia);
4620     gen_helper_rfi(cpu_env);
4621     ctx->base.is_jmp = DISAS_EXIT;
4622 #endif
4623 }
4624 
4625 #if defined(TARGET_PPC64)
4626 static void gen_rfid(DisasContext *ctx)
4627 {
4628 #if defined(CONFIG_USER_ONLY)
4629     GEN_PRIV(ctx);
4630 #else
4631     /* Restore CPU state */
4632     CHK_SV(ctx);
4633     gen_icount_io_start(ctx);
4634     gen_update_cfar(ctx, ctx->cia);
4635     gen_helper_rfid(cpu_env);
4636     ctx->base.is_jmp = DISAS_EXIT;
4637 #endif
4638 }
4639 
4640 #if !defined(CONFIG_USER_ONLY)
4641 static void gen_rfscv(DisasContext *ctx)
4642 {
4643 #if defined(CONFIG_USER_ONLY)
4644     GEN_PRIV(ctx);
4645 #else
4646     /* Restore CPU state */
4647     CHK_SV(ctx);
4648     gen_icount_io_start(ctx);
4649     gen_update_cfar(ctx, ctx->cia);
4650     gen_helper_rfscv(cpu_env);
4651     ctx->base.is_jmp = DISAS_EXIT;
4652 #endif
4653 }
4654 #endif
4655 
4656 static void gen_hrfid(DisasContext *ctx)
4657 {
4658 #if defined(CONFIG_USER_ONLY)
4659     GEN_PRIV(ctx);
4660 #else
4661     /* Restore CPU state */
4662     CHK_HV(ctx);
4663     gen_helper_hrfid(cpu_env);
4664     ctx->base.is_jmp = DISAS_EXIT;
4665 #endif
4666 }
4667 #endif
4668 
4669 /* sc */
4670 #if defined(CONFIG_USER_ONLY)
4671 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4672 #else
4673 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4674 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4675 #endif
4676 static void gen_sc(DisasContext *ctx)
4677 {
4678     uint32_t lev;
4679 
4680     lev = (ctx->opcode >> 5) & 0x7F;
4681     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4682 }
4683 
4684 #if defined(TARGET_PPC64)
4685 #if !defined(CONFIG_USER_ONLY)
4686 static void gen_scv(DisasContext *ctx)
4687 {
4688     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4689 
4690     /* Set the PC back to the faulting instruction. */
4691     gen_update_nip(ctx, ctx->cia);
4692     gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4693 
4694     ctx->base.is_jmp = DISAS_NORETURN;
4695 }
4696 #endif
4697 #endif
4698 
4699 /***                                Trap                                   ***/
4700 
4701 /* Check for unconditional traps (always or never) */
4702 static bool check_unconditional_trap(DisasContext *ctx)
4703 {
4704     /* Trap never */
4705     if (TO(ctx->opcode) == 0) {
4706         return true;
4707     }
4708     /* Trap always */
4709     if (TO(ctx->opcode) == 31) {
4710         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4711         return true;
4712     }
4713     return false;
4714 }
4715 
4716 /* tw */
4717 static void gen_tw(DisasContext *ctx)
4718 {
4719     TCGv_i32 t0;
4720 
4721     if (check_unconditional_trap(ctx)) {
4722         return;
4723     }
4724     t0 = tcg_const_i32(TO(ctx->opcode));
4725     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4726                   t0);
4727     tcg_temp_free_i32(t0);
4728 }
4729 
4730 /* twi */
4731 static void gen_twi(DisasContext *ctx)
4732 {
4733     TCGv t0;
4734     TCGv_i32 t1;
4735 
4736     if (check_unconditional_trap(ctx)) {
4737         return;
4738     }
4739     t0 = tcg_const_tl(SIMM(ctx->opcode));
4740     t1 = tcg_const_i32(TO(ctx->opcode));
4741     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4742     tcg_temp_free(t0);
4743     tcg_temp_free_i32(t1);
4744 }
4745 
4746 #if defined(TARGET_PPC64)
4747 /* td */
4748 static void gen_td(DisasContext *ctx)
4749 {
4750     TCGv_i32 t0;
4751 
4752     if (check_unconditional_trap(ctx)) {
4753         return;
4754     }
4755     t0 = tcg_const_i32(TO(ctx->opcode));
4756     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4757                   t0);
4758     tcg_temp_free_i32(t0);
4759 }
4760 
4761 /* tdi */
4762 static void gen_tdi(DisasContext *ctx)
4763 {
4764     TCGv t0;
4765     TCGv_i32 t1;
4766 
4767     if (check_unconditional_trap(ctx)) {
4768         return;
4769     }
4770     t0 = tcg_const_tl(SIMM(ctx->opcode));
4771     t1 = tcg_const_i32(TO(ctx->opcode));
4772     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4773     tcg_temp_free(t0);
4774     tcg_temp_free_i32(t1);
4775 }
4776 #endif
4777 
4778 /***                          Processor control                            ***/
4779 
4780 /* mcrxr */
4781 static void gen_mcrxr(DisasContext *ctx)
4782 {
4783     TCGv_i32 t0 = tcg_temp_new_i32();
4784     TCGv_i32 t1 = tcg_temp_new_i32();
4785     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4786 
4787     tcg_gen_trunc_tl_i32(t0, cpu_so);
4788     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4789     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4790     tcg_gen_shli_i32(t0, t0, 3);
4791     tcg_gen_shli_i32(t1, t1, 2);
4792     tcg_gen_shli_i32(dst, dst, 1);
4793     tcg_gen_or_i32(dst, dst, t0);
4794     tcg_gen_or_i32(dst, dst, t1);
4795     tcg_temp_free_i32(t0);
4796     tcg_temp_free_i32(t1);
4797 
4798     tcg_gen_movi_tl(cpu_so, 0);
4799     tcg_gen_movi_tl(cpu_ov, 0);
4800     tcg_gen_movi_tl(cpu_ca, 0);
4801 }
4802 
4803 #ifdef TARGET_PPC64
4804 /* mcrxrx */
4805 static void gen_mcrxrx(DisasContext *ctx)
4806 {
4807     TCGv t0 = tcg_temp_new();
4808     TCGv t1 = tcg_temp_new();
4809     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4810 
4811     /* copy OV and OV32 */
4812     tcg_gen_shli_tl(t0, cpu_ov, 1);
4813     tcg_gen_or_tl(t0, t0, cpu_ov32);
4814     tcg_gen_shli_tl(t0, t0, 2);
4815     /* copy CA and CA32 */
4816     tcg_gen_shli_tl(t1, cpu_ca, 1);
4817     tcg_gen_or_tl(t1, t1, cpu_ca32);
4818     tcg_gen_or_tl(t0, t0, t1);
4819     tcg_gen_trunc_tl_i32(dst, t0);
4820     tcg_temp_free(t0);
4821     tcg_temp_free(t1);
4822 }
4823 #endif
4824 
4825 /* mfcr mfocrf */
4826 static void gen_mfcr(DisasContext *ctx)
4827 {
4828     uint32_t crm, crn;
4829 
4830     if (likely(ctx->opcode & 0x00100000)) {
4831         crm = CRM(ctx->opcode);
4832         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4833             crn = ctz32(crm);
4834             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4835             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4836                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4837         }
4838     } else {
4839         TCGv_i32 t0 = tcg_temp_new_i32();
4840         tcg_gen_mov_i32(t0, cpu_crf[0]);
4841         tcg_gen_shli_i32(t0, t0, 4);
4842         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4843         tcg_gen_shli_i32(t0, t0, 4);
4844         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4845         tcg_gen_shli_i32(t0, t0, 4);
4846         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4847         tcg_gen_shli_i32(t0, t0, 4);
4848         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4849         tcg_gen_shli_i32(t0, t0, 4);
4850         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4851         tcg_gen_shli_i32(t0, t0, 4);
4852         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4853         tcg_gen_shli_i32(t0, t0, 4);
4854         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4855         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4856         tcg_temp_free_i32(t0);
4857     }
4858 }
4859 
4860 /* mfmsr */
4861 static void gen_mfmsr(DisasContext *ctx)
4862 {
4863     CHK_SV(ctx);
4864     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4865 }
4866 
4867 /* mfspr */
4868 static inline void gen_op_mfspr(DisasContext *ctx)
4869 {
4870     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4871     uint32_t sprn = SPR(ctx->opcode);
4872 
4873 #if defined(CONFIG_USER_ONLY)
4874     read_cb = ctx->spr_cb[sprn].uea_read;
4875 #else
4876     if (ctx->pr) {
4877         read_cb = ctx->spr_cb[sprn].uea_read;
4878     } else if (ctx->hv) {
4879         read_cb = ctx->spr_cb[sprn].hea_read;
4880     } else {
4881         read_cb = ctx->spr_cb[sprn].oea_read;
4882     }
4883 #endif
4884     if (likely(read_cb != NULL)) {
4885         if (likely(read_cb != SPR_NOACCESS)) {
4886             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4887         } else {
4888             /* Privilege exception */
4889             /*
4890              * This is a hack to avoid warnings when running Linux:
4891              * this OS breaks the PowerPC virtualisation model,
4892              * allowing userland application to read the PVR
4893              */
4894             if (sprn != SPR_PVR) {
4895                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4896                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4897                               ctx->cia);
4898             }
4899             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4900         }
4901     } else {
4902         /* ISA 2.07 defines these as no-ops */
4903         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4904             (sprn >= 808 && sprn <= 811)) {
4905             /* This is a nop */
4906             return;
4907         }
4908         /* Not defined */
4909         qemu_log_mask(LOG_GUEST_ERROR,
4910                       "Trying to read invalid spr %d (0x%03x) at "
4911                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4912 
4913         /*
4914          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4915          * generate a priv, a hv emu or a no-op
4916          */
4917         if (sprn & 0x10) {
4918             if (ctx->pr) {
4919                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4920             }
4921         } else {
4922             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4923                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4924             }
4925         }
4926     }
4927 }
4928 
4929 static void gen_mfspr(DisasContext *ctx)
4930 {
4931     gen_op_mfspr(ctx);
4932 }
4933 
4934 /* mftb */
4935 static void gen_mftb(DisasContext *ctx)
4936 {
4937     gen_op_mfspr(ctx);
4938 }
4939 
4940 /* mtcrf mtocrf*/
4941 static void gen_mtcrf(DisasContext *ctx)
4942 {
4943     uint32_t crm, crn;
4944 
4945     crm = CRM(ctx->opcode);
4946     if (likely((ctx->opcode & 0x00100000))) {
4947         if (crm && ((crm & (crm - 1)) == 0)) {
4948             TCGv_i32 temp = tcg_temp_new_i32();
4949             crn = ctz32(crm);
4950             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4951             tcg_gen_shri_i32(temp, temp, crn * 4);
4952             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4953             tcg_temp_free_i32(temp);
4954         }
4955     } else {
4956         TCGv_i32 temp = tcg_temp_new_i32();
4957         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4958         for (crn = 0 ; crn < 8 ; crn++) {
4959             if (crm & (1 << crn)) {
4960                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4961                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4962             }
4963         }
4964         tcg_temp_free_i32(temp);
4965     }
4966 }
4967 
4968 /* mtmsr */
4969 #if defined(TARGET_PPC64)
4970 static void gen_mtmsrd(DisasContext *ctx)
4971 {
4972     if (unlikely(!is_book3s_arch2x(ctx))) {
4973         gen_invalid(ctx);
4974         return;
4975     }
4976 
4977     CHK_SV(ctx);
4978 
4979 #if !defined(CONFIG_USER_ONLY)
4980     TCGv t0, t1;
4981     target_ulong mask;
4982 
4983     t0 = tcg_temp_new();
4984     t1 = tcg_temp_new();
4985 
4986     gen_icount_io_start(ctx);
4987 
4988     if (ctx->opcode & 0x00010000) {
4989         /* L=1 form only updates EE and RI */
4990         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4991     } else {
4992         /* mtmsrd does not alter HV, S, ME, or LE */
4993         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4994                  (1ULL << MSR_HV));
4995         /*
4996          * XXX: we need to update nip before the store if we enter
4997          *      power saving mode, we will exit the loop directly from
4998          *      ppc_store_msr
4999          */
5000         gen_update_nip(ctx, ctx->base.pc_next);
5001     }
5002 
5003     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
5004     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
5005     tcg_gen_or_tl(t0, t0, t1);
5006 
5007     gen_helper_store_msr(cpu_env, t0);
5008 
5009     /* Must stop the translation as machine state (may have) changed */
5010     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5011 
5012     tcg_temp_free(t0);
5013     tcg_temp_free(t1);
5014 #endif /* !defined(CONFIG_USER_ONLY) */
5015 }
5016 #endif /* defined(TARGET_PPC64) */
5017 
5018 static void gen_mtmsr(DisasContext *ctx)
5019 {
5020     CHK_SV(ctx);
5021 
5022 #if !defined(CONFIG_USER_ONLY)
5023     TCGv t0, t1;
5024     target_ulong mask = 0xFFFFFFFF;
5025 
5026     t0 = tcg_temp_new();
5027     t1 = tcg_temp_new();
5028 
5029     gen_icount_io_start(ctx);
5030     if (ctx->opcode & 0x00010000) {
5031         /* L=1 form only updates EE and RI */
5032         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
5033     } else {
5034         /* mtmsr does not alter S, ME, or LE */
5035         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
5036 
5037         /*
5038          * XXX: we need to update nip before the store if we enter
5039          *      power saving mode, we will exit the loop directly from
5040          *      ppc_store_msr
5041          */
5042         gen_update_nip(ctx, ctx->base.pc_next);
5043     }
5044 
5045     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
5046     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
5047     tcg_gen_or_tl(t0, t0, t1);
5048 
5049     gen_helper_store_msr(cpu_env, t0);
5050 
5051     /* Must stop the translation as machine state (may have) changed */
5052     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5053 
5054     tcg_temp_free(t0);
5055     tcg_temp_free(t1);
5056 #endif
5057 }
5058 
5059 /* mtspr */
5060 static void gen_mtspr(DisasContext *ctx)
5061 {
5062     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
5063     uint32_t sprn = SPR(ctx->opcode);
5064 
5065 #if defined(CONFIG_USER_ONLY)
5066     write_cb = ctx->spr_cb[sprn].uea_write;
5067 #else
5068     if (ctx->pr) {
5069         write_cb = ctx->spr_cb[sprn].uea_write;
5070     } else if (ctx->hv) {
5071         write_cb = ctx->spr_cb[sprn].hea_write;
5072     } else {
5073         write_cb = ctx->spr_cb[sprn].oea_write;
5074     }
5075 #endif
5076     if (likely(write_cb != NULL)) {
5077         if (likely(write_cb != SPR_NOACCESS)) {
5078             (*write_cb)(ctx, sprn, rS(ctx->opcode));
5079         } else {
5080             /* Privilege exception */
5081             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
5082                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5083                           ctx->cia);
5084             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5085         }
5086     } else {
5087         /* ISA 2.07 defines these as no-ops */
5088         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5089             (sprn >= 808 && sprn <= 811)) {
5090             /* This is a nop */
5091             return;
5092         }
5093 
5094         /* Not defined */
5095         qemu_log_mask(LOG_GUEST_ERROR,
5096                       "Trying to write invalid spr %d (0x%03x) at "
5097                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5098 
5099 
5100         /*
5101          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5102          * generate a priv, a hv emu or a no-op
5103          */
5104         if (sprn & 0x10) {
5105             if (ctx->pr) {
5106                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5107             }
5108         } else {
5109             if (ctx->pr || sprn == 0) {
5110                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5111             }
5112         }
5113     }
5114 }
5115 
5116 #if defined(TARGET_PPC64)
5117 /* setb */
5118 static void gen_setb(DisasContext *ctx)
5119 {
5120     TCGv_i32 t0 = tcg_temp_new_i32();
5121     TCGv_i32 t8 = tcg_constant_i32(8);
5122     TCGv_i32 tm1 = tcg_constant_i32(-1);
5123     int crf = crfS(ctx->opcode);
5124 
5125     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5126     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5127     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5128 
5129     tcg_temp_free_i32(t0);
5130 }
5131 #endif
5132 
5133 /***                         Cache management                              ***/
5134 
5135 /* dcbf */
5136 static void gen_dcbf(DisasContext *ctx)
5137 {
5138     /* XXX: specification says this is treated as a load by the MMU */
5139     TCGv t0;
5140     gen_set_access_type(ctx, ACCESS_CACHE);
5141     t0 = tcg_temp_new();
5142     gen_addr_reg_index(ctx, t0);
5143     gen_qemu_ld8u(ctx, t0, t0);
5144     tcg_temp_free(t0);
5145 }
5146 
5147 /* dcbfep (external PID dcbf) */
5148 static void gen_dcbfep(DisasContext *ctx)
5149 {
5150     /* XXX: specification says this is treated as a load by the MMU */
5151     TCGv t0;
5152     CHK_SV(ctx);
5153     gen_set_access_type(ctx, ACCESS_CACHE);
5154     t0 = tcg_temp_new();
5155     gen_addr_reg_index(ctx, t0);
5156     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5157     tcg_temp_free(t0);
5158 }
5159 
5160 /* dcbi (Supervisor only) */
5161 static void gen_dcbi(DisasContext *ctx)
5162 {
5163 #if defined(CONFIG_USER_ONLY)
5164     GEN_PRIV(ctx);
5165 #else
5166     TCGv EA, val;
5167 
5168     CHK_SV(ctx);
5169     EA = tcg_temp_new();
5170     gen_set_access_type(ctx, ACCESS_CACHE);
5171     gen_addr_reg_index(ctx, EA);
5172     val = tcg_temp_new();
5173     /* XXX: specification says this should be treated as a store by the MMU */
5174     gen_qemu_ld8u(ctx, val, EA);
5175     gen_qemu_st8(ctx, val, EA);
5176     tcg_temp_free(val);
5177     tcg_temp_free(EA);
5178 #endif /* defined(CONFIG_USER_ONLY) */
5179 }
5180 
5181 /* dcdst */
5182 static void gen_dcbst(DisasContext *ctx)
5183 {
5184     /* XXX: specification say this is treated as a load by the MMU */
5185     TCGv t0;
5186     gen_set_access_type(ctx, ACCESS_CACHE);
5187     t0 = tcg_temp_new();
5188     gen_addr_reg_index(ctx, t0);
5189     gen_qemu_ld8u(ctx, t0, t0);
5190     tcg_temp_free(t0);
5191 }
5192 
5193 /* dcbstep (dcbstep External PID version) */
5194 static void gen_dcbstep(DisasContext *ctx)
5195 {
5196     /* XXX: specification say this is treated as a load by the MMU */
5197     TCGv t0;
5198     gen_set_access_type(ctx, ACCESS_CACHE);
5199     t0 = tcg_temp_new();
5200     gen_addr_reg_index(ctx, t0);
5201     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5202     tcg_temp_free(t0);
5203 }
5204 
5205 /* dcbt */
5206 static void gen_dcbt(DisasContext *ctx)
5207 {
5208     /*
5209      * interpreted as no-op
5210      * XXX: specification say this is treated as a load by the MMU but
5211      *      does not generate any exception
5212      */
5213 }
5214 
5215 /* dcbtep */
5216 static void gen_dcbtep(DisasContext *ctx)
5217 {
5218     /*
5219      * interpreted as no-op
5220      * XXX: specification say this is treated as a load by the MMU but
5221      *      does not generate any exception
5222      */
5223 }
5224 
5225 /* dcbtst */
5226 static void gen_dcbtst(DisasContext *ctx)
5227 {
5228     /*
5229      * interpreted as no-op
5230      * XXX: specification say this is treated as a load by the MMU but
5231      *      does not generate any exception
5232      */
5233 }
5234 
5235 /* dcbtstep */
5236 static void gen_dcbtstep(DisasContext *ctx)
5237 {
5238     /*
5239      * interpreted as no-op
5240      * XXX: specification say this is treated as a load by the MMU but
5241      *      does not generate any exception
5242      */
5243 }
5244 
5245 /* dcbtls */
5246 static void gen_dcbtls(DisasContext *ctx)
5247 {
5248     /* Always fails locking the cache */
5249     TCGv t0 = tcg_temp_new();
5250     gen_load_spr(t0, SPR_Exxx_L1CSR0);
5251     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5252     gen_store_spr(SPR_Exxx_L1CSR0, t0);
5253     tcg_temp_free(t0);
5254 }
5255 
5256 /* dcblc */
5257 static void gen_dcblc(DisasContext *ctx)
5258 {
5259     /*
5260      * interpreted as no-op
5261      */
5262 }
5263 
5264 /* dcbz */
5265 static void gen_dcbz(DisasContext *ctx)
5266 {
5267     TCGv tcgv_addr;
5268     TCGv_i32 tcgv_op;
5269 
5270     gen_set_access_type(ctx, ACCESS_CACHE);
5271     tcgv_addr = tcg_temp_new();
5272     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5273     gen_addr_reg_index(ctx, tcgv_addr);
5274     gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5275     tcg_temp_free(tcgv_addr);
5276     tcg_temp_free_i32(tcgv_op);
5277 }
5278 
5279 /* dcbzep */
5280 static void gen_dcbzep(DisasContext *ctx)
5281 {
5282     TCGv tcgv_addr;
5283     TCGv_i32 tcgv_op;
5284 
5285     gen_set_access_type(ctx, ACCESS_CACHE);
5286     tcgv_addr = tcg_temp_new();
5287     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5288     gen_addr_reg_index(ctx, tcgv_addr);
5289     gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5290     tcg_temp_free(tcgv_addr);
5291     tcg_temp_free_i32(tcgv_op);
5292 }
5293 
5294 /* dst / dstt */
5295 static void gen_dst(DisasContext *ctx)
5296 {
5297     if (rA(ctx->opcode) == 0) {
5298         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5299     } else {
5300         /* interpreted as no-op */
5301     }
5302 }
5303 
5304 /* dstst /dststt */
5305 static void gen_dstst(DisasContext *ctx)
5306 {
5307     if (rA(ctx->opcode) == 0) {
5308         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5309     } else {
5310         /* interpreted as no-op */
5311     }
5312 
5313 }
5314 
5315 /* dss / dssall */
5316 static void gen_dss(DisasContext *ctx)
5317 {
5318     /* interpreted as no-op */
5319 }
5320 
5321 /* icbi */
5322 static void gen_icbi(DisasContext *ctx)
5323 {
5324     TCGv t0;
5325     gen_set_access_type(ctx, ACCESS_CACHE);
5326     t0 = tcg_temp_new();
5327     gen_addr_reg_index(ctx, t0);
5328     gen_helper_icbi(cpu_env, t0);
5329     tcg_temp_free(t0);
5330 }
5331 
5332 /* icbiep */
5333 static void gen_icbiep(DisasContext *ctx)
5334 {
5335     TCGv t0;
5336     gen_set_access_type(ctx, ACCESS_CACHE);
5337     t0 = tcg_temp_new();
5338     gen_addr_reg_index(ctx, t0);
5339     gen_helper_icbiep(cpu_env, t0);
5340     tcg_temp_free(t0);
5341 }
5342 
5343 /* Optional: */
5344 /* dcba */
5345 static void gen_dcba(DisasContext *ctx)
5346 {
5347     /*
5348      * interpreted as no-op
5349      * XXX: specification say this is treated as a store by the MMU
5350      *      but does not generate any exception
5351      */
5352 }
5353 
5354 /***                    Segment register manipulation                      ***/
5355 /* Supervisor only: */
5356 
5357 /* mfsr */
5358 static void gen_mfsr(DisasContext *ctx)
5359 {
5360 #if defined(CONFIG_USER_ONLY)
5361     GEN_PRIV(ctx);
5362 #else
5363     TCGv t0;
5364 
5365     CHK_SV(ctx);
5366     t0 = tcg_const_tl(SR(ctx->opcode));
5367     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5368     tcg_temp_free(t0);
5369 #endif /* defined(CONFIG_USER_ONLY) */
5370 }
5371 
5372 /* mfsrin */
5373 static void gen_mfsrin(DisasContext *ctx)
5374 {
5375 #if defined(CONFIG_USER_ONLY)
5376     GEN_PRIV(ctx);
5377 #else
5378     TCGv t0;
5379 
5380     CHK_SV(ctx);
5381     t0 = tcg_temp_new();
5382     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5383     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5384     tcg_temp_free(t0);
5385 #endif /* defined(CONFIG_USER_ONLY) */
5386 }
5387 
5388 /* mtsr */
5389 static void gen_mtsr(DisasContext *ctx)
5390 {
5391 #if defined(CONFIG_USER_ONLY)
5392     GEN_PRIV(ctx);
5393 #else
5394     TCGv t0;
5395 
5396     CHK_SV(ctx);
5397     t0 = tcg_const_tl(SR(ctx->opcode));
5398     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5399     tcg_temp_free(t0);
5400 #endif /* defined(CONFIG_USER_ONLY) */
5401 }
5402 
5403 /* mtsrin */
5404 static void gen_mtsrin(DisasContext *ctx)
5405 {
5406 #if defined(CONFIG_USER_ONLY)
5407     GEN_PRIV(ctx);
5408 #else
5409     TCGv t0;
5410     CHK_SV(ctx);
5411 
5412     t0 = tcg_temp_new();
5413     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5414     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5415     tcg_temp_free(t0);
5416 #endif /* defined(CONFIG_USER_ONLY) */
5417 }
5418 
5419 #if defined(TARGET_PPC64)
5420 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5421 
5422 /* mfsr */
5423 static void gen_mfsr_64b(DisasContext *ctx)
5424 {
5425 #if defined(CONFIG_USER_ONLY)
5426     GEN_PRIV(ctx);
5427 #else
5428     TCGv t0;
5429 
5430     CHK_SV(ctx);
5431     t0 = tcg_const_tl(SR(ctx->opcode));
5432     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5433     tcg_temp_free(t0);
5434 #endif /* defined(CONFIG_USER_ONLY) */
5435 }
5436 
5437 /* mfsrin */
5438 static void gen_mfsrin_64b(DisasContext *ctx)
5439 {
5440 #if defined(CONFIG_USER_ONLY)
5441     GEN_PRIV(ctx);
5442 #else
5443     TCGv t0;
5444 
5445     CHK_SV(ctx);
5446     t0 = tcg_temp_new();
5447     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5448     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5449     tcg_temp_free(t0);
5450 #endif /* defined(CONFIG_USER_ONLY) */
5451 }
5452 
5453 /* mtsr */
5454 static void gen_mtsr_64b(DisasContext *ctx)
5455 {
5456 #if defined(CONFIG_USER_ONLY)
5457     GEN_PRIV(ctx);
5458 #else
5459     TCGv t0;
5460 
5461     CHK_SV(ctx);
5462     t0 = tcg_const_tl(SR(ctx->opcode));
5463     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5464     tcg_temp_free(t0);
5465 #endif /* defined(CONFIG_USER_ONLY) */
5466 }
5467 
5468 /* mtsrin */
5469 static void gen_mtsrin_64b(DisasContext *ctx)
5470 {
5471 #if defined(CONFIG_USER_ONLY)
5472     GEN_PRIV(ctx);
5473 #else
5474     TCGv t0;
5475 
5476     CHK_SV(ctx);
5477     t0 = tcg_temp_new();
5478     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5479     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5480     tcg_temp_free(t0);
5481 #endif /* defined(CONFIG_USER_ONLY) */
5482 }
5483 
5484 #endif /* defined(TARGET_PPC64) */
5485 
5486 /***                      Lookaside buffer management                      ***/
5487 /* Optional & supervisor only: */
5488 
5489 /* tlbia */
5490 static void gen_tlbia(DisasContext *ctx)
5491 {
5492 #if defined(CONFIG_USER_ONLY)
5493     GEN_PRIV(ctx);
5494 #else
5495     CHK_HV(ctx);
5496 
5497     gen_helper_tlbia(cpu_env);
5498 #endif  /* defined(CONFIG_USER_ONLY) */
5499 }
5500 
5501 /* tlbsync */
5502 static void gen_tlbsync(DisasContext *ctx)
5503 {
5504 #if defined(CONFIG_USER_ONLY)
5505     GEN_PRIV(ctx);
5506 #else
5507 
5508     if (ctx->gtse) {
5509         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
5510     } else {
5511         CHK_HV(ctx); /* Else hypervisor privileged */
5512     }
5513 
5514     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5515     if (ctx->insns_flags & PPC_BOOKE) {
5516         gen_check_tlb_flush(ctx, true);
5517     }
5518 #endif /* defined(CONFIG_USER_ONLY) */
5519 }
5520 
5521 /***                              External control                         ***/
5522 /* Optional: */
5523 
5524 /* eciwx */
5525 static void gen_eciwx(DisasContext *ctx)
5526 {
5527     TCGv t0;
5528     /* Should check EAR[E] ! */
5529     gen_set_access_type(ctx, ACCESS_EXT);
5530     t0 = tcg_temp_new();
5531     gen_addr_reg_index(ctx, t0);
5532     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5533                        DEF_MEMOP(MO_UL | MO_ALIGN));
5534     tcg_temp_free(t0);
5535 }
5536 
5537 /* ecowx */
5538 static void gen_ecowx(DisasContext *ctx)
5539 {
5540     TCGv t0;
5541     /* Should check EAR[E] ! */
5542     gen_set_access_type(ctx, ACCESS_EXT);
5543     t0 = tcg_temp_new();
5544     gen_addr_reg_index(ctx, t0);
5545     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5546                        DEF_MEMOP(MO_UL | MO_ALIGN));
5547     tcg_temp_free(t0);
5548 }
5549 
5550 /* 602 - 603 - G2 TLB management */
5551 
5552 /* tlbld */
5553 static void gen_tlbld_6xx(DisasContext *ctx)
5554 {
5555 #if defined(CONFIG_USER_ONLY)
5556     GEN_PRIV(ctx);
5557 #else
5558     CHK_SV(ctx);
5559     gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5560 #endif /* defined(CONFIG_USER_ONLY) */
5561 }
5562 
5563 /* tlbli */
5564 static void gen_tlbli_6xx(DisasContext *ctx)
5565 {
5566 #if defined(CONFIG_USER_ONLY)
5567     GEN_PRIV(ctx);
5568 #else
5569     CHK_SV(ctx);
5570     gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5571 #endif /* defined(CONFIG_USER_ONLY) */
5572 }
5573 
5574 /* BookE specific instructions */
5575 
5576 /* XXX: not implemented on 440 ? */
5577 static void gen_mfapidi(DisasContext *ctx)
5578 {
5579     /* XXX: TODO */
5580     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5581 }
5582 
5583 /* XXX: not implemented on 440 ? */
5584 static void gen_tlbiva(DisasContext *ctx)
5585 {
5586 #if defined(CONFIG_USER_ONLY)
5587     GEN_PRIV(ctx);
5588 #else
5589     TCGv t0;
5590 
5591     CHK_SV(ctx);
5592     t0 = tcg_temp_new();
5593     gen_addr_reg_index(ctx, t0);
5594     gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5595     tcg_temp_free(t0);
5596 #endif /* defined(CONFIG_USER_ONLY) */
5597 }
5598 
5599 /* All 405 MAC instructions are translated here */
5600 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5601                                         int ra, int rb, int rt, int Rc)
5602 {
5603     TCGv t0, t1;
5604 
5605     t0 = tcg_temp_new();
5606     t1 = tcg_temp_new();
5607 
5608     switch (opc3 & 0x0D) {
5609     case 0x05:
5610         /* macchw    - macchw.    - macchwo   - macchwo.   */
5611         /* macchws   - macchws.   - macchwso  - macchwso.  */
5612         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
5613         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
5614         /* mulchw - mulchw. */
5615         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5616         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5617         tcg_gen_ext16s_tl(t1, t1);
5618         break;
5619     case 0x04:
5620         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
5621         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
5622         /* mulchwu - mulchwu. */
5623         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5624         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5625         tcg_gen_ext16u_tl(t1, t1);
5626         break;
5627     case 0x01:
5628         /* machhw    - machhw.    - machhwo   - machhwo.   */
5629         /* machhws   - machhws.   - machhwso  - machhwso.  */
5630         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
5631         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
5632         /* mulhhw - mulhhw. */
5633         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5634         tcg_gen_ext16s_tl(t0, t0);
5635         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5636         tcg_gen_ext16s_tl(t1, t1);
5637         break;
5638     case 0x00:
5639         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
5640         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
5641         /* mulhhwu - mulhhwu. */
5642         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5643         tcg_gen_ext16u_tl(t0, t0);
5644         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5645         tcg_gen_ext16u_tl(t1, t1);
5646         break;
5647     case 0x0D:
5648         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
5649         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
5650         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
5651         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
5652         /* mullhw - mullhw. */
5653         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5654         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5655         break;
5656     case 0x0C:
5657         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
5658         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
5659         /* mullhwu - mullhwu. */
5660         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5661         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5662         break;
5663     }
5664     if (opc2 & 0x04) {
5665         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5666         tcg_gen_mul_tl(t1, t0, t1);
5667         if (opc2 & 0x02) {
5668             /* nmultiply-and-accumulate (0x0E) */
5669             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5670         } else {
5671             /* multiply-and-accumulate (0x0C) */
5672             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5673         }
5674 
5675         if (opc3 & 0x12) {
5676             /* Check overflow and/or saturate */
5677             TCGLabel *l1 = gen_new_label();
5678 
5679             if (opc3 & 0x10) {
5680                 /* Start with XER OV disabled, the most likely case */
5681                 tcg_gen_movi_tl(cpu_ov, 0);
5682             }
5683             if (opc3 & 0x01) {
5684                 /* Signed */
5685                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5686                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5687                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5688                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5689                 if (opc3 & 0x02) {
5690                     /* Saturate */
5691                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5692                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5693                 }
5694             } else {
5695                 /* Unsigned */
5696                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5697                 if (opc3 & 0x02) {
5698                     /* Saturate */
5699                     tcg_gen_movi_tl(t0, UINT32_MAX);
5700                 }
5701             }
5702             if (opc3 & 0x10) {
5703                 /* Check overflow */
5704                 tcg_gen_movi_tl(cpu_ov, 1);
5705                 tcg_gen_movi_tl(cpu_so, 1);
5706             }
5707             gen_set_label(l1);
5708             tcg_gen_mov_tl(cpu_gpr[rt], t0);
5709         }
5710     } else {
5711         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5712     }
5713     tcg_temp_free(t0);
5714     tcg_temp_free(t1);
5715     if (unlikely(Rc) != 0) {
5716         /* Update Rc0 */
5717         gen_set_Rc0(ctx, cpu_gpr[rt]);
5718     }
5719 }
5720 
5721 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
5722 static void glue(gen_, name)(DisasContext *ctx)                               \
5723 {                                                                             \
5724     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
5725                          rD(ctx->opcode), Rc(ctx->opcode));                   \
5726 }
5727 
5728 /* macchw    - macchw.    */
5729 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5730 /* macchwo   - macchwo.   */
5731 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5732 /* macchws   - macchws.   */
5733 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5734 /* macchwso  - macchwso.  */
5735 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5736 /* macchwsu  - macchwsu.  */
5737 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5738 /* macchwsuo - macchwsuo. */
5739 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5740 /* macchwu   - macchwu.   */
5741 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5742 /* macchwuo  - macchwuo.  */
5743 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5744 /* machhw    - machhw.    */
5745 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5746 /* machhwo   - machhwo.   */
5747 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5748 /* machhws   - machhws.   */
5749 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5750 /* machhwso  - machhwso.  */
5751 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5752 /* machhwsu  - machhwsu.  */
5753 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5754 /* machhwsuo - machhwsuo. */
5755 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5756 /* machhwu   - machhwu.   */
5757 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5758 /* machhwuo  - machhwuo.  */
5759 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5760 /* maclhw    - maclhw.    */
5761 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5762 /* maclhwo   - maclhwo.   */
5763 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5764 /* maclhws   - maclhws.   */
5765 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5766 /* maclhwso  - maclhwso.  */
5767 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5768 /* maclhwu   - maclhwu.   */
5769 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5770 /* maclhwuo  - maclhwuo.  */
5771 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5772 /* maclhwsu  - maclhwsu.  */
5773 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5774 /* maclhwsuo - maclhwsuo. */
5775 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5776 /* nmacchw   - nmacchw.   */
5777 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5778 /* nmacchwo  - nmacchwo.  */
5779 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5780 /* nmacchws  - nmacchws.  */
5781 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5782 /* nmacchwso - nmacchwso. */
5783 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5784 /* nmachhw   - nmachhw.   */
5785 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5786 /* nmachhwo  - nmachhwo.  */
5787 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5788 /* nmachhws  - nmachhws.  */
5789 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5790 /* nmachhwso - nmachhwso. */
5791 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5792 /* nmaclhw   - nmaclhw.   */
5793 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5794 /* nmaclhwo  - nmaclhwo.  */
5795 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5796 /* nmaclhws  - nmaclhws.  */
5797 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5798 /* nmaclhwso - nmaclhwso. */
5799 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5800 
5801 /* mulchw  - mulchw.  */
5802 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5803 /* mulchwu - mulchwu. */
5804 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5805 /* mulhhw  - mulhhw.  */
5806 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5807 /* mulhhwu - mulhhwu. */
5808 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5809 /* mullhw  - mullhw.  */
5810 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5811 /* mullhwu - mullhwu. */
5812 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5813 
5814 /* mfdcr */
5815 static void gen_mfdcr(DisasContext *ctx)
5816 {
5817 #if defined(CONFIG_USER_ONLY)
5818     GEN_PRIV(ctx);
5819 #else
5820     TCGv dcrn;
5821 
5822     CHK_SV(ctx);
5823     dcrn = tcg_const_tl(SPR(ctx->opcode));
5824     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5825     tcg_temp_free(dcrn);
5826 #endif /* defined(CONFIG_USER_ONLY) */
5827 }
5828 
5829 /* mtdcr */
5830 static void gen_mtdcr(DisasContext *ctx)
5831 {
5832 #if defined(CONFIG_USER_ONLY)
5833     GEN_PRIV(ctx);
5834 #else
5835     TCGv dcrn;
5836 
5837     CHK_SV(ctx);
5838     dcrn = tcg_const_tl(SPR(ctx->opcode));
5839     gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5840     tcg_temp_free(dcrn);
5841 #endif /* defined(CONFIG_USER_ONLY) */
5842 }
5843 
5844 /* mfdcrx */
5845 /* XXX: not implemented on 440 ? */
5846 static void gen_mfdcrx(DisasContext *ctx)
5847 {
5848 #if defined(CONFIG_USER_ONLY)
5849     GEN_PRIV(ctx);
5850 #else
5851     CHK_SV(ctx);
5852     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5853                         cpu_gpr[rA(ctx->opcode)]);
5854     /* Note: Rc update flag set leads to undefined state of Rc0 */
5855 #endif /* defined(CONFIG_USER_ONLY) */
5856 }
5857 
5858 /* mtdcrx */
5859 /* XXX: not implemented on 440 ? */
5860 static void gen_mtdcrx(DisasContext *ctx)
5861 {
5862 #if defined(CONFIG_USER_ONLY)
5863     GEN_PRIV(ctx);
5864 #else
5865     CHK_SV(ctx);
5866     gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5867                          cpu_gpr[rS(ctx->opcode)]);
5868     /* Note: Rc update flag set leads to undefined state of Rc0 */
5869 #endif /* defined(CONFIG_USER_ONLY) */
5870 }
5871 
5872 /* dccci */
5873 static void gen_dccci(DisasContext *ctx)
5874 {
5875     CHK_SV(ctx);
5876     /* interpreted as no-op */
5877 }
5878 
5879 /* dcread */
5880 static void gen_dcread(DisasContext *ctx)
5881 {
5882 #if defined(CONFIG_USER_ONLY)
5883     GEN_PRIV(ctx);
5884 #else
5885     TCGv EA, val;
5886 
5887     CHK_SV(ctx);
5888     gen_set_access_type(ctx, ACCESS_CACHE);
5889     EA = tcg_temp_new();
5890     gen_addr_reg_index(ctx, EA);
5891     val = tcg_temp_new();
5892     gen_qemu_ld32u(ctx, val, EA);
5893     tcg_temp_free(val);
5894     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5895     tcg_temp_free(EA);
5896 #endif /* defined(CONFIG_USER_ONLY) */
5897 }
5898 
5899 /* icbt */
5900 static void gen_icbt_40x(DisasContext *ctx)
5901 {
5902     /*
5903      * interpreted as no-op
5904      * XXX: specification say this is treated as a load by the MMU but
5905      *      does not generate any exception
5906      */
5907 }
5908 
5909 /* iccci */
5910 static void gen_iccci(DisasContext *ctx)
5911 {
5912     CHK_SV(ctx);
5913     /* interpreted as no-op */
5914 }
5915 
5916 /* icread */
5917 static void gen_icread(DisasContext *ctx)
5918 {
5919     CHK_SV(ctx);
5920     /* interpreted as no-op */
5921 }
5922 
5923 /* rfci (supervisor only) */
5924 static void gen_rfci_40x(DisasContext *ctx)
5925 {
5926 #if defined(CONFIG_USER_ONLY)
5927     GEN_PRIV(ctx);
5928 #else
5929     CHK_SV(ctx);
5930     /* Restore CPU state */
5931     gen_helper_40x_rfci(cpu_env);
5932     ctx->base.is_jmp = DISAS_EXIT;
5933 #endif /* defined(CONFIG_USER_ONLY) */
5934 }
5935 
5936 static void gen_rfci(DisasContext *ctx)
5937 {
5938 #if defined(CONFIG_USER_ONLY)
5939     GEN_PRIV(ctx);
5940 #else
5941     CHK_SV(ctx);
5942     /* Restore CPU state */
5943     gen_helper_rfci(cpu_env);
5944     ctx->base.is_jmp = DISAS_EXIT;
5945 #endif /* defined(CONFIG_USER_ONLY) */
5946 }
5947 
5948 /* BookE specific */
5949 
5950 /* XXX: not implemented on 440 ? */
5951 static void gen_rfdi(DisasContext *ctx)
5952 {
5953 #if defined(CONFIG_USER_ONLY)
5954     GEN_PRIV(ctx);
5955 #else
5956     CHK_SV(ctx);
5957     /* Restore CPU state */
5958     gen_helper_rfdi(cpu_env);
5959     ctx->base.is_jmp = DISAS_EXIT;
5960 #endif /* defined(CONFIG_USER_ONLY) */
5961 }
5962 
5963 /* XXX: not implemented on 440 ? */
5964 static void gen_rfmci(DisasContext *ctx)
5965 {
5966 #if defined(CONFIG_USER_ONLY)
5967     GEN_PRIV(ctx);
5968 #else
5969     CHK_SV(ctx);
5970     /* Restore CPU state */
5971     gen_helper_rfmci(cpu_env);
5972     ctx->base.is_jmp = DISAS_EXIT;
5973 #endif /* defined(CONFIG_USER_ONLY) */
5974 }
5975 
5976 /* TLB management - PowerPC 405 implementation */
5977 
5978 /* tlbre */
5979 static void gen_tlbre_40x(DisasContext *ctx)
5980 {
5981 #if defined(CONFIG_USER_ONLY)
5982     GEN_PRIV(ctx);
5983 #else
5984     CHK_SV(ctx);
5985     switch (rB(ctx->opcode)) {
5986     case 0:
5987         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5988                                 cpu_gpr[rA(ctx->opcode)]);
5989         break;
5990     case 1:
5991         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5992                                 cpu_gpr[rA(ctx->opcode)]);
5993         break;
5994     default:
5995         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5996         break;
5997     }
5998 #endif /* defined(CONFIG_USER_ONLY) */
5999 }
6000 
6001 /* tlbsx - tlbsx. */
6002 static void gen_tlbsx_40x(DisasContext *ctx)
6003 {
6004 #if defined(CONFIG_USER_ONLY)
6005     GEN_PRIV(ctx);
6006 #else
6007     TCGv t0;
6008 
6009     CHK_SV(ctx);
6010     t0 = tcg_temp_new();
6011     gen_addr_reg_index(ctx, t0);
6012     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6013     tcg_temp_free(t0);
6014     if (Rc(ctx->opcode)) {
6015         TCGLabel *l1 = gen_new_label();
6016         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6017         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6018         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6019         gen_set_label(l1);
6020     }
6021 #endif /* defined(CONFIG_USER_ONLY) */
6022 }
6023 
6024 /* tlbwe */
6025 static void gen_tlbwe_40x(DisasContext *ctx)
6026 {
6027 #if defined(CONFIG_USER_ONLY)
6028     GEN_PRIV(ctx);
6029 #else
6030     CHK_SV(ctx);
6031 
6032     switch (rB(ctx->opcode)) {
6033     case 0:
6034         gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
6035                                 cpu_gpr[rS(ctx->opcode)]);
6036         break;
6037     case 1:
6038         gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
6039                                 cpu_gpr[rS(ctx->opcode)]);
6040         break;
6041     default:
6042         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6043         break;
6044     }
6045 #endif /* defined(CONFIG_USER_ONLY) */
6046 }
6047 
6048 /* TLB management - PowerPC 440 implementation */
6049 
6050 /* tlbre */
6051 static void gen_tlbre_440(DisasContext *ctx)
6052 {
6053 #if defined(CONFIG_USER_ONLY)
6054     GEN_PRIV(ctx);
6055 #else
6056     CHK_SV(ctx);
6057 
6058     switch (rB(ctx->opcode)) {
6059     case 0:
6060     case 1:
6061     case 2:
6062         {
6063             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6064             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
6065                                  t0, cpu_gpr[rA(ctx->opcode)]);
6066             tcg_temp_free_i32(t0);
6067         }
6068         break;
6069     default:
6070         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6071         break;
6072     }
6073 #endif /* defined(CONFIG_USER_ONLY) */
6074 }
6075 
6076 /* tlbsx - tlbsx. */
6077 static void gen_tlbsx_440(DisasContext *ctx)
6078 {
6079 #if defined(CONFIG_USER_ONLY)
6080     GEN_PRIV(ctx);
6081 #else
6082     TCGv t0;
6083 
6084     CHK_SV(ctx);
6085     t0 = tcg_temp_new();
6086     gen_addr_reg_index(ctx, t0);
6087     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6088     tcg_temp_free(t0);
6089     if (Rc(ctx->opcode)) {
6090         TCGLabel *l1 = gen_new_label();
6091         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6092         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6093         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6094         gen_set_label(l1);
6095     }
6096 #endif /* defined(CONFIG_USER_ONLY) */
6097 }
6098 
6099 /* tlbwe */
6100 static void gen_tlbwe_440(DisasContext *ctx)
6101 {
6102 #if defined(CONFIG_USER_ONLY)
6103     GEN_PRIV(ctx);
6104 #else
6105     CHK_SV(ctx);
6106     switch (rB(ctx->opcode)) {
6107     case 0:
6108     case 1:
6109     case 2:
6110         {
6111             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6112             gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6113                                  cpu_gpr[rS(ctx->opcode)]);
6114             tcg_temp_free_i32(t0);
6115         }
6116         break;
6117     default:
6118         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6119         break;
6120     }
6121 #endif /* defined(CONFIG_USER_ONLY) */
6122 }
6123 
6124 /* TLB management - PowerPC BookE 2.06 implementation */
6125 
6126 /* tlbre */
6127 static void gen_tlbre_booke206(DisasContext *ctx)
6128 {
6129  #if defined(CONFIG_USER_ONLY)
6130     GEN_PRIV(ctx);
6131 #else
6132    CHK_SV(ctx);
6133     gen_helper_booke206_tlbre(cpu_env);
6134 #endif /* defined(CONFIG_USER_ONLY) */
6135 }
6136 
6137 /* tlbsx - tlbsx. */
6138 static void gen_tlbsx_booke206(DisasContext *ctx)
6139 {
6140 #if defined(CONFIG_USER_ONLY)
6141     GEN_PRIV(ctx);
6142 #else
6143     TCGv t0;
6144 
6145     CHK_SV(ctx);
6146     if (rA(ctx->opcode)) {
6147         t0 = tcg_temp_new();
6148         tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6149     } else {
6150         t0 = tcg_const_tl(0);
6151     }
6152 
6153     tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6154     gen_helper_booke206_tlbsx(cpu_env, t0);
6155     tcg_temp_free(t0);
6156 #endif /* defined(CONFIG_USER_ONLY) */
6157 }
6158 
6159 /* tlbwe */
6160 static void gen_tlbwe_booke206(DisasContext *ctx)
6161 {
6162 #if defined(CONFIG_USER_ONLY)
6163     GEN_PRIV(ctx);
6164 #else
6165     CHK_SV(ctx);
6166     gen_helper_booke206_tlbwe(cpu_env);
6167 #endif /* defined(CONFIG_USER_ONLY) */
6168 }
6169 
6170 static void gen_tlbivax_booke206(DisasContext *ctx)
6171 {
6172 #if defined(CONFIG_USER_ONLY)
6173     GEN_PRIV(ctx);
6174 #else
6175     TCGv t0;
6176 
6177     CHK_SV(ctx);
6178     t0 = tcg_temp_new();
6179     gen_addr_reg_index(ctx, t0);
6180     gen_helper_booke206_tlbivax(cpu_env, t0);
6181     tcg_temp_free(t0);
6182 #endif /* defined(CONFIG_USER_ONLY) */
6183 }
6184 
6185 static void gen_tlbilx_booke206(DisasContext *ctx)
6186 {
6187 #if defined(CONFIG_USER_ONLY)
6188     GEN_PRIV(ctx);
6189 #else
6190     TCGv t0;
6191 
6192     CHK_SV(ctx);
6193     t0 = tcg_temp_new();
6194     gen_addr_reg_index(ctx, t0);
6195 
6196     switch ((ctx->opcode >> 21) & 0x3) {
6197     case 0:
6198         gen_helper_booke206_tlbilx0(cpu_env, t0);
6199         break;
6200     case 1:
6201         gen_helper_booke206_tlbilx1(cpu_env, t0);
6202         break;
6203     case 3:
6204         gen_helper_booke206_tlbilx3(cpu_env, t0);
6205         break;
6206     default:
6207         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6208         break;
6209     }
6210 
6211     tcg_temp_free(t0);
6212 #endif /* defined(CONFIG_USER_ONLY) */
6213 }
6214 
6215 /* wrtee */
6216 static void gen_wrtee(DisasContext *ctx)
6217 {
6218 #if defined(CONFIG_USER_ONLY)
6219     GEN_PRIV(ctx);
6220 #else
6221     TCGv t0;
6222 
6223     CHK_SV(ctx);
6224     t0 = tcg_temp_new();
6225     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6226     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6227     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6228     gen_ppc_maybe_interrupt(ctx);
6229     tcg_temp_free(t0);
6230     /*
6231      * Stop translation to have a chance to raise an exception if we
6232      * just set msr_ee to 1
6233      */
6234     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6235 #endif /* defined(CONFIG_USER_ONLY) */
6236 }
6237 
6238 /* wrteei */
6239 static void gen_wrteei(DisasContext *ctx)
6240 {
6241 #if defined(CONFIG_USER_ONLY)
6242     GEN_PRIV(ctx);
6243 #else
6244     CHK_SV(ctx);
6245     if (ctx->opcode & 0x00008000) {
6246         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6247         gen_ppc_maybe_interrupt(ctx);
6248         /* Stop translation to have a chance to raise an exception */
6249         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6250     } else {
6251         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6252     }
6253 #endif /* defined(CONFIG_USER_ONLY) */
6254 }
6255 
6256 /* PowerPC 440 specific instructions */
6257 
6258 /* dlmzb */
6259 static void gen_dlmzb(DisasContext *ctx)
6260 {
6261     TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6262     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6263                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6264     tcg_temp_free_i32(t0);
6265 }
6266 
6267 /* mbar replaces eieio on 440 */
6268 static void gen_mbar(DisasContext *ctx)
6269 {
6270     /* interpreted as no-op */
6271 }
6272 
6273 /* msync replaces sync on 440 */
6274 static void gen_msync_4xx(DisasContext *ctx)
6275 {
6276     /* Only e500 seems to treat reserved bits as invalid */
6277     if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
6278         (ctx->opcode & 0x03FFF801)) {
6279         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6280     }
6281     /* otherwise interpreted as no-op */
6282 }
6283 
6284 /* icbt */
6285 static void gen_icbt_440(DisasContext *ctx)
6286 {
6287     /*
6288      * interpreted as no-op
6289      * XXX: specification say this is treated as a load by the MMU but
6290      *      does not generate any exception
6291      */
6292 }
6293 
6294 #if defined(TARGET_PPC64)
6295 static void gen_maddld(DisasContext *ctx)
6296 {
6297     TCGv_i64 t1 = tcg_temp_new_i64();
6298 
6299     tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6300     tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
6301     tcg_temp_free_i64(t1);
6302 }
6303 
6304 /* maddhd maddhdu */
6305 static void gen_maddhd_maddhdu(DisasContext *ctx)
6306 {
6307     TCGv_i64 lo = tcg_temp_new_i64();
6308     TCGv_i64 hi = tcg_temp_new_i64();
6309     TCGv_i64 t1 = tcg_temp_new_i64();
6310 
6311     if (Rc(ctx->opcode)) {
6312         tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6313                           cpu_gpr[rB(ctx->opcode)]);
6314         tcg_gen_movi_i64(t1, 0);
6315     } else {
6316         tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6317                           cpu_gpr[rB(ctx->opcode)]);
6318         tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6319     }
6320     tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6321                      cpu_gpr[rC(ctx->opcode)], t1);
6322     tcg_temp_free_i64(lo);
6323     tcg_temp_free_i64(hi);
6324     tcg_temp_free_i64(t1);
6325 }
6326 #endif /* defined(TARGET_PPC64) */
6327 
6328 static void gen_tbegin(DisasContext *ctx)
6329 {
6330     if (unlikely(!ctx->tm_enabled)) {
6331         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6332         return;
6333     }
6334     gen_helper_tbegin(cpu_env);
6335 }
6336 
6337 #define GEN_TM_NOOP(name)                                      \
6338 static inline void gen_##name(DisasContext *ctx)               \
6339 {                                                              \
6340     if (unlikely(!ctx->tm_enabled)) {                          \
6341         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6342         return;                                                \
6343     }                                                          \
6344     /*                                                         \
6345      * Because tbegin always fails in QEMU, these user         \
6346      * space instructions all have a simple implementation:    \
6347      *                                                         \
6348      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
6349      *           = 0b0 || 0b00    || 0b0                       \
6350      */                                                        \
6351     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6352 }
6353 
6354 GEN_TM_NOOP(tend);
6355 GEN_TM_NOOP(tabort);
6356 GEN_TM_NOOP(tabortwc);
6357 GEN_TM_NOOP(tabortwci);
6358 GEN_TM_NOOP(tabortdc);
6359 GEN_TM_NOOP(tabortdci);
6360 GEN_TM_NOOP(tsr);
6361 
6362 static inline void gen_cp_abort(DisasContext *ctx)
6363 {
6364     /* Do Nothing */
6365 }
6366 
6367 #define GEN_CP_PASTE_NOOP(name)                           \
6368 static inline void gen_##name(DisasContext *ctx)          \
6369 {                                                         \
6370     /*                                                    \
6371      * Generate invalid exception until we have an        \
6372      * implementation of the copy paste facility          \
6373      */                                                   \
6374     gen_invalid(ctx);                                     \
6375 }
6376 
6377 GEN_CP_PASTE_NOOP(copy)
6378 GEN_CP_PASTE_NOOP(paste)
6379 
6380 static void gen_tcheck(DisasContext *ctx)
6381 {
6382     if (unlikely(!ctx->tm_enabled)) {
6383         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6384         return;
6385     }
6386     /*
6387      * Because tbegin always fails, the tcheck implementation is
6388      * simple:
6389      *
6390      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6391      *         = 0b1 || 0b00 || 0b0
6392      */
6393     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6394 }
6395 
6396 #if defined(CONFIG_USER_ONLY)
6397 #define GEN_TM_PRIV_NOOP(name)                                 \
6398 static inline void gen_##name(DisasContext *ctx)               \
6399 {                                                              \
6400     gen_priv_opc(ctx);                                         \
6401 }
6402 
6403 #else
6404 
6405 #define GEN_TM_PRIV_NOOP(name)                                 \
6406 static inline void gen_##name(DisasContext *ctx)               \
6407 {                                                              \
6408     CHK_SV(ctx);                                               \
6409     if (unlikely(!ctx->tm_enabled)) {                          \
6410         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6411         return;                                                \
6412     }                                                          \
6413     /*                                                         \
6414      * Because tbegin always fails, the implementation is      \
6415      * simple:                                                 \
6416      *                                                         \
6417      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
6418      *         = 0b0 || 0b00 | 0b0                             \
6419      */                                                        \
6420     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6421 }
6422 
6423 #endif
6424 
6425 GEN_TM_PRIV_NOOP(treclaim);
6426 GEN_TM_PRIV_NOOP(trechkpt);
6427 
6428 static inline void get_fpr(TCGv_i64 dst, int regno)
6429 {
6430     tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
6431 }
6432 
6433 static inline void set_fpr(int regno, TCGv_i64 src)
6434 {
6435     tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
6436     /*
6437      * Before PowerISA v3.1 the result of doubleword 1 of the VSR
6438      * corresponding to the target FPR was undefined. However,
6439      * most (if not all) real hardware were setting the result to 0.
6440      * Starting at ISA v3.1, the result for doubleword 1 is now defined
6441      * to be 0.
6442      */
6443     tcg_gen_st_i64(tcg_constant_i64(0), cpu_env, vsr64_offset(regno, false));
6444 }
6445 
6446 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
6447 {
6448     tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
6449 }
6450 
6451 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
6452 {
6453     tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
6454 }
6455 
6456 /*
6457  * Helpers for decodetree used by !function for decoding arguments.
6458  */
6459 static int times_2(DisasContext *ctx, int x)
6460 {
6461     return x * 2;
6462 }
6463 
6464 static int times_4(DisasContext *ctx, int x)
6465 {
6466     return x * 4;
6467 }
6468 
6469 static int times_16(DisasContext *ctx, int x)
6470 {
6471     return x * 16;
6472 }
6473 
6474 static int64_t dw_compose_ea(DisasContext *ctx, int x)
6475 {
6476     return deposit64(0xfffffffffffffe00, 3, 6, x);
6477 }
6478 
6479 /*
6480  * Helpers for trans_* functions to check for specific insns flags.
6481  * Use token pasting to ensure that we use the proper flag with the
6482  * proper variable.
6483  */
6484 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
6485     do {                                                \
6486         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
6487             return false;                               \
6488         }                                               \
6489     } while (0)
6490 
6491 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
6492     do {                                                \
6493         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
6494             return false;                               \
6495         }                                               \
6496     } while (0)
6497 
6498 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
6499 #if TARGET_LONG_BITS == 32
6500 # define REQUIRE_64BIT(CTX)  return false
6501 #else
6502 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
6503 #endif
6504 
6505 #define REQUIRE_VECTOR(CTX)                             \
6506     do {                                                \
6507         if (unlikely(!(CTX)->altivec_enabled)) {        \
6508             gen_exception((CTX), POWERPC_EXCP_VPU);     \
6509             return true;                                \
6510         }                                               \
6511     } while (0)
6512 
6513 #define REQUIRE_VSX(CTX)                                \
6514     do {                                                \
6515         if (unlikely(!(CTX)->vsx_enabled)) {            \
6516             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
6517             return true;                                \
6518         }                                               \
6519     } while (0)
6520 
6521 #define REQUIRE_FPU(ctx)                                \
6522     do {                                                \
6523         if (unlikely(!(ctx)->fpu_enabled)) {            \
6524             gen_exception((ctx), POWERPC_EXCP_FPU);     \
6525             return true;                                \
6526         }                                               \
6527     } while (0)
6528 
6529 #if !defined(CONFIG_USER_ONLY)
6530 #define REQUIRE_SV(CTX)             \
6531     do {                            \
6532         if (unlikely((CTX)->pr)) {  \
6533             gen_priv_opc(CTX);      \
6534             return true;            \
6535         }                           \
6536     } while (0)
6537 
6538 #define REQUIRE_HV(CTX)                             \
6539     do {                                            \
6540         if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
6541             gen_priv_opc(CTX);                      \
6542             return true;                            \
6543         }                                           \
6544     } while (0)
6545 #else
6546 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6547 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6548 #endif
6549 
6550 /*
6551  * Helpers for implementing sets of trans_* functions.
6552  * Defer the implementation of NAME to FUNC, with optional extra arguments.
6553  */
6554 #define TRANS(NAME, FUNC, ...) \
6555     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6556     { return FUNC(ctx, a, __VA_ARGS__); }
6557 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
6558     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6559     {                                                          \
6560         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
6561         return FUNC(ctx, a, __VA_ARGS__);                      \
6562     }
6563 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6564     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6565     {                                                          \
6566         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6567         return FUNC(ctx, a, __VA_ARGS__);                      \
6568     }
6569 
6570 #define TRANS64(NAME, FUNC, ...) \
6571     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6572     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
6573 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6574     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6575     {                                                          \
6576         REQUIRE_64BIT(ctx);                                    \
6577         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6578         return FUNC(ctx, a, __VA_ARGS__);                      \
6579     }
6580 
6581 /* TODO: More TRANS* helpers for extra insn_flags checks. */
6582 
6583 
6584 #include "decode-insn32.c.inc"
6585 #include "decode-insn64.c.inc"
6586 #include "power8-pmu-regs.c.inc"
6587 
6588 /*
6589  * Incorporate CIA into the constant when R=1.
6590  * Validate that when R=1, RA=0.
6591  */
6592 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
6593 {
6594     d->rt = a->rt;
6595     d->ra = a->ra;
6596     d->si = a->si;
6597     if (a->r) {
6598         if (unlikely(a->ra != 0)) {
6599             gen_invalid(ctx);
6600             return false;
6601         }
6602         d->si += ctx->cia;
6603     }
6604     return true;
6605 }
6606 
6607 #include "translate/fixedpoint-impl.c.inc"
6608 
6609 #include "translate/fp-impl.c.inc"
6610 
6611 #include "translate/vmx-impl.c.inc"
6612 
6613 #include "translate/vsx-impl.c.inc"
6614 
6615 #include "translate/dfp-impl.c.inc"
6616 
6617 #include "translate/spe-impl.c.inc"
6618 
6619 #include "translate/branch-impl.c.inc"
6620 
6621 #include "translate/processor-ctrl-impl.c.inc"
6622 
6623 #include "translate/storage-ctrl-impl.c.inc"
6624 
6625 /* Handles lfdp */
6626 static void gen_dform39(DisasContext *ctx)
6627 {
6628     if ((ctx->opcode & 0x3) == 0) {
6629         if (ctx->insns_flags2 & PPC2_ISA205) {
6630             return gen_lfdp(ctx);
6631         }
6632     }
6633     return gen_invalid(ctx);
6634 }
6635 
6636 /* Handles stfdp */
6637 static void gen_dform3D(DisasContext *ctx)
6638 {
6639     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
6640         /* stfdp */
6641         if (ctx->insns_flags2 & PPC2_ISA205) {
6642             return gen_stfdp(ctx);
6643         }
6644     }
6645     return gen_invalid(ctx);
6646 }
6647 
6648 #if defined(TARGET_PPC64)
6649 /* brd */
6650 static void gen_brd(DisasContext *ctx)
6651 {
6652     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6653 }
6654 
6655 /* brw */
6656 static void gen_brw(DisasContext *ctx)
6657 {
6658     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6659     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
6660 
6661 }
6662 
6663 /* brh */
6664 static void gen_brh(DisasContext *ctx)
6665 {
6666     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
6667     TCGv_i64 t1 = tcg_temp_new_i64();
6668     TCGv_i64 t2 = tcg_temp_new_i64();
6669 
6670     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
6671     tcg_gen_and_i64(t2, t1, mask);
6672     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
6673     tcg_gen_shli_i64(t1, t1, 8);
6674     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
6675 
6676     tcg_temp_free_i64(t1);
6677     tcg_temp_free_i64(t2);
6678 }
6679 #endif
6680 
6681 static opcode_t opcodes[] = {
6682 #if defined(TARGET_PPC64)
6683 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
6684 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
6685 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
6686 #endif
6687 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6688 #if defined(TARGET_PPC64)
6689 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6690 #endif
6691 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6692 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6693 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6694 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6695 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6696 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6697 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6698 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6699 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6700 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6701 #if defined(TARGET_PPC64)
6702 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6703 #endif
6704 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6705 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6706 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6707 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6708 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6709 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6710 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6711 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
6712 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6713 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
6714 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6715 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6716 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6717 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6718 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6719 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6720 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6721 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6722 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6723 #if defined(TARGET_PPC64)
6724 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6725 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6726 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6727 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6728 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6729 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6730 #endif
6731 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6732 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6733 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6734 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6735 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6736 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6737 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6738 #if defined(TARGET_PPC64)
6739 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6740 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6741 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6742 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6743 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6744 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6745                PPC_NONE, PPC2_ISA300),
6746 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6747                PPC_NONE, PPC2_ISA300),
6748 #endif
6749 /* handles lfdp, lxsd, lxssp */
6750 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6751 /* handles stfdp, stxsd, stxssp */
6752 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6753 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6754 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6755 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6756 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6757 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6758 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6759 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
6760 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6761 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6762 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6763 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6764 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
6765 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
6766 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6767 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6768 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6769 #if defined(TARGET_PPC64)
6770 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
6771 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
6772 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6773 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6774 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6775 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6776 #endif
6777 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6778 /* ISA v3.0 changed the extended opcode from 62 to 30 */
6779 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
6780 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
6781 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6782 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6783 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6784 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6785 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6786 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6787 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6788 #if defined(TARGET_PPC64)
6789 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6790 #if !defined(CONFIG_USER_ONLY)
6791 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6792 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6793 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6794 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
6795 #endif
6796 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6797 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6798 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6799 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6800 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6801 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6802 #endif
6803 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6804 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
6805 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
6806 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6807 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6808 #if defined(TARGET_PPC64)
6809 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6810 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6811 #endif
6812 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6813 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6814 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6815 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6816 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6817 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6818 #if defined(TARGET_PPC64)
6819 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6820 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6821 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
6822 #endif
6823 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6824 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6825 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6826 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6827 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6828 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6829 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6830 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6831 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6832 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6833 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6834 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6835 GEN_HANDLER_E(dcblc, 0x1F, 0x06, 0x0c, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6836 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6837 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6838 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6839 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
6840 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6841 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6842 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6843 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6844 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6845 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6846 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6847 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6848 #if defined(TARGET_PPC64)
6849 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6850 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6851              PPC_SEGMENT_64B),
6852 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6853 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6854              PPC_SEGMENT_64B),
6855 #endif
6856 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6857 /*
6858  * XXX Those instructions will need to be handled differently for
6859  * different ISA versions
6860  */
6861 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6862 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6863 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6864 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6865 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6866 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6867 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6868 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6869 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6870 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6871 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6872 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6873 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6874 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6875 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6876 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6877 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6878 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6879 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6880 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6881 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6882 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6883 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6884 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6885 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6886 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6887 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6888                PPC_NONE, PPC2_BOOKE206),
6889 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6890                PPC_NONE, PPC2_BOOKE206),
6891 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6892                PPC_NONE, PPC2_BOOKE206),
6893 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6894                PPC_NONE, PPC2_BOOKE206),
6895 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6896                PPC_NONE, PPC2_BOOKE206),
6897 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6898 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6899 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6900 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6901               PPC_BOOKE, PPC2_BOOKE206),
6902 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
6903 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6904                PPC_BOOKE, PPC2_BOOKE206),
6905 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
6906              PPC_440_SPEC),
6907 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6908 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6909 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6910 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6911 #if defined(TARGET_PPC64)
6912 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6913               PPC2_ISA300),
6914 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6915 #endif
6916 
6917 #undef GEN_INT_ARITH_ADD
6918 #undef GEN_INT_ARITH_ADD_CONST
6919 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov)         \
6920 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6921 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val,                        \
6922                                 add_ca, compute_ca, compute_ov)               \
6923 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6924 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6925 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6926 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6927 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6928 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
6929 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
6930 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
6931 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
6932 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
6933 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
6934 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
6935 
6936 #undef GEN_INT_ARITH_DIVW
6937 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
6938 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
6939 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
6940 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
6941 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
6942 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
6943 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6944 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6945 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6946 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6947 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6948 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6949 
6950 #if defined(TARGET_PPC64)
6951 #undef GEN_INT_ARITH_DIVD
6952 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
6953 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6954 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
6955 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
6956 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
6957 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
6958 
6959 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6960 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6961 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6962 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6963 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6964 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6965 
6966 #undef GEN_INT_ARITH_MUL_HELPER
6967 #define GEN_INT_ARITH_MUL_HELPER(name, opc3)                                  \
6968 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6969 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
6970 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
6971 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
6972 #endif
6973 
6974 #undef GEN_INT_ARITH_SUBF
6975 #undef GEN_INT_ARITH_SUBF_CONST
6976 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
6977 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
6978 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
6979                                 add_ca, compute_ca, compute_ov)               \
6980 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
6981 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
6982 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
6983 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
6984 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
6985 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
6986 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
6987 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
6988 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
6989 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
6990 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
6991 
6992 #undef GEN_LOGICAL1
6993 #undef GEN_LOGICAL2
6994 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
6995 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
6996 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
6997 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
6998 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
6999 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
7000 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
7001 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
7002 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
7003 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
7004 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
7005 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
7006 #if defined(TARGET_PPC64)
7007 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
7008 #endif
7009 
7010 #if defined(TARGET_PPC64)
7011 #undef GEN_PPC64_R2
7012 #undef GEN_PPC64_R4
7013 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
7014 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
7015 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
7016              PPC_64B)
7017 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
7018 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
7019 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
7020              PPC_64B),                                                        \
7021 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
7022              PPC_64B),                                                        \
7023 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
7024              PPC_64B)
7025 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
7026 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
7027 GEN_PPC64_R4(rldic, 0x1E, 0x04),
7028 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
7029 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
7030 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
7031 #endif
7032 
7033 #undef GEN_LDX_E
7034 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
7035 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
7036 
7037 #if defined(TARGET_PPC64)
7038 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
7039 
7040 /* HV/P7 and later only */
7041 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
7042 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
7043 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
7044 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
7045 #endif
7046 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
7047 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
7048 
7049 /* External PID based load */
7050 #undef GEN_LDEPX
7051 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
7052 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7053               0x00000001, PPC_NONE, PPC2_BOOKE206),
7054 
7055 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
7056 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
7057 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
7058 #if defined(TARGET_PPC64)
7059 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
7060 #endif
7061 
7062 #undef GEN_STX_E
7063 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
7064 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
7065 
7066 #if defined(TARGET_PPC64)
7067 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
7068 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
7069 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
7070 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
7071 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
7072 #endif
7073 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
7074 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
7075 
7076 #undef GEN_STEPX
7077 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
7078 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7079               0x00000001, PPC_NONE, PPC2_BOOKE206),
7080 
7081 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
7082 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
7083 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
7084 #if defined(TARGET_PPC64)
7085 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
7086 #endif
7087 
7088 #undef GEN_CRLOGIC
7089 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
7090 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
7091 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
7092 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
7093 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
7094 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
7095 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
7096 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
7097 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
7098 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
7099 
7100 #undef GEN_MAC_HANDLER
7101 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
7102 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
7103 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
7104 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
7105 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
7106 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
7107 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
7108 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
7109 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
7110 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
7111 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
7112 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
7113 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
7114 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
7115 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
7116 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
7117 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
7118 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
7119 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
7120 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
7121 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
7122 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
7123 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
7124 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
7125 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
7126 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
7127 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
7128 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
7129 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
7130 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
7131 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
7132 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
7133 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
7134 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
7135 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
7136 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
7137 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
7138 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
7139 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
7140 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
7141 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
7142 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
7143 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
7144 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
7145 
7146 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
7147                PPC_NONE, PPC2_TM),
7148 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
7149                PPC_NONE, PPC2_TM),
7150 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
7151                PPC_NONE, PPC2_TM),
7152 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
7153                PPC_NONE, PPC2_TM),
7154 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
7155                PPC_NONE, PPC2_TM),
7156 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
7157                PPC_NONE, PPC2_TM),
7158 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
7159                PPC_NONE, PPC2_TM),
7160 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
7161                PPC_NONE, PPC2_TM),
7162 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
7163                PPC_NONE, PPC2_TM),
7164 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
7165                PPC_NONE, PPC2_TM),
7166 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
7167                PPC_NONE, PPC2_TM),
7168 
7169 #include "translate/fp-ops.c.inc"
7170 
7171 #include "translate/vmx-ops.c.inc"
7172 
7173 #include "translate/vsx-ops.c.inc"
7174 
7175 #include "translate/spe-ops.c.inc"
7176 };
7177 
7178 /*****************************************************************************/
7179 /* Opcode types */
7180 enum {
7181     PPC_DIRECT   = 0, /* Opcode routine        */
7182     PPC_INDIRECT = 1, /* Indirect opcode table */
7183 };
7184 
7185 #define PPC_OPCODE_MASK 0x3
7186 
7187 static inline int is_indirect_opcode(void *handler)
7188 {
7189     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
7190 }
7191 
7192 static inline opc_handler_t **ind_table(void *handler)
7193 {
7194     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
7195 }
7196 
7197 /* Instruction table creation */
7198 /* Opcodes tables creation */
7199 static void fill_new_table(opc_handler_t **table, int len)
7200 {
7201     int i;
7202 
7203     for (i = 0; i < len; i++) {
7204         table[i] = &invalid_handler;
7205     }
7206 }
7207 
7208 static int create_new_table(opc_handler_t **table, unsigned char idx)
7209 {
7210     opc_handler_t **tmp;
7211 
7212     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
7213     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
7214     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
7215 
7216     return 0;
7217 }
7218 
7219 static int insert_in_table(opc_handler_t **table, unsigned char idx,
7220                             opc_handler_t *handler)
7221 {
7222     if (table[idx] != &invalid_handler) {
7223         return -1;
7224     }
7225     table[idx] = handler;
7226 
7227     return 0;
7228 }
7229 
7230 static int register_direct_insn(opc_handler_t **ppc_opcodes,
7231                                 unsigned char idx, opc_handler_t *handler)
7232 {
7233     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
7234         printf("*** ERROR: opcode %02x already assigned in main "
7235                "opcode table\n", idx);
7236         return -1;
7237     }
7238 
7239     return 0;
7240 }
7241 
7242 static int register_ind_in_table(opc_handler_t **table,
7243                                  unsigned char idx1, unsigned char idx2,
7244                                  opc_handler_t *handler)
7245 {
7246     if (table[idx1] == &invalid_handler) {
7247         if (create_new_table(table, idx1) < 0) {
7248             printf("*** ERROR: unable to create indirect table "
7249                    "idx=%02x\n", idx1);
7250             return -1;
7251         }
7252     } else {
7253         if (!is_indirect_opcode(table[idx1])) {
7254             printf("*** ERROR: idx %02x already assigned to a direct "
7255                    "opcode\n", idx1);
7256             return -1;
7257         }
7258     }
7259     if (handler != NULL &&
7260         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
7261         printf("*** ERROR: opcode %02x already assigned in "
7262                "opcode table %02x\n", idx2, idx1);
7263         return -1;
7264     }
7265 
7266     return 0;
7267 }
7268 
7269 static int register_ind_insn(opc_handler_t **ppc_opcodes,
7270                              unsigned char idx1, unsigned char idx2,
7271                              opc_handler_t *handler)
7272 {
7273     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
7274 }
7275 
7276 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
7277                                 unsigned char idx1, unsigned char idx2,
7278                                 unsigned char idx3, opc_handler_t *handler)
7279 {
7280     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7281         printf("*** ERROR: unable to join indirect table idx "
7282                "[%02x-%02x]\n", idx1, idx2);
7283         return -1;
7284     }
7285     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
7286                               handler) < 0) {
7287         printf("*** ERROR: unable to insert opcode "
7288                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7289         return -1;
7290     }
7291 
7292     return 0;
7293 }
7294 
7295 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
7296                                  unsigned char idx1, unsigned char idx2,
7297                                  unsigned char idx3, unsigned char idx4,
7298                                  opc_handler_t *handler)
7299 {
7300     opc_handler_t **table;
7301 
7302     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7303         printf("*** ERROR: unable to join indirect table idx "
7304                "[%02x-%02x]\n", idx1, idx2);
7305         return -1;
7306     }
7307     table = ind_table(ppc_opcodes[idx1]);
7308     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
7309         printf("*** ERROR: unable to join 2nd-level indirect table idx "
7310                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7311         return -1;
7312     }
7313     table = ind_table(table[idx2]);
7314     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
7315         printf("*** ERROR: unable to insert opcode "
7316                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
7317         return -1;
7318     }
7319     return 0;
7320 }
7321 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
7322 {
7323     if (insn->opc2 != 0xFF) {
7324         if (insn->opc3 != 0xFF) {
7325             if (insn->opc4 != 0xFF) {
7326                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7327                                           insn->opc3, insn->opc4,
7328                                           &insn->handler) < 0) {
7329                     return -1;
7330                 }
7331             } else {
7332                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7333                                          insn->opc3, &insn->handler) < 0) {
7334                     return -1;
7335                 }
7336             }
7337         } else {
7338             if (register_ind_insn(ppc_opcodes, insn->opc1,
7339                                   insn->opc2, &insn->handler) < 0) {
7340                 return -1;
7341             }
7342         }
7343     } else {
7344         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
7345             return -1;
7346         }
7347     }
7348 
7349     return 0;
7350 }
7351 
7352 static int test_opcode_table(opc_handler_t **table, int len)
7353 {
7354     int i, count, tmp;
7355 
7356     for (i = 0, count = 0; i < len; i++) {
7357         /* Consistency fixup */
7358         if (table[i] == NULL) {
7359             table[i] = &invalid_handler;
7360         }
7361         if (table[i] != &invalid_handler) {
7362             if (is_indirect_opcode(table[i])) {
7363                 tmp = test_opcode_table(ind_table(table[i]),
7364                     PPC_CPU_INDIRECT_OPCODES_LEN);
7365                 if (tmp == 0) {
7366                     free(table[i]);
7367                     table[i] = &invalid_handler;
7368                 } else {
7369                     count++;
7370                 }
7371             } else {
7372                 count++;
7373             }
7374         }
7375     }
7376 
7377     return count;
7378 }
7379 
7380 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
7381 {
7382     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
7383         printf("*** WARNING: no opcode defined !\n");
7384     }
7385 }
7386 
7387 /*****************************************************************************/
7388 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
7389 {
7390     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
7391     opcode_t *opc;
7392 
7393     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
7394     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
7395         if (((opc->handler.type & pcc->insns_flags) != 0) ||
7396             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
7397             if (register_insn(cpu->opcodes, opc) < 0) {
7398                 error_setg(errp, "ERROR initializing PowerPC instruction "
7399                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
7400                            opc->opc3);
7401                 return;
7402             }
7403         }
7404     }
7405     fix_opcode_tables(cpu->opcodes);
7406     fflush(stdout);
7407     fflush(stderr);
7408 }
7409 
7410 void destroy_ppc_opcodes(PowerPCCPU *cpu)
7411 {
7412     opc_handler_t **table, **table_2;
7413     int i, j, k;
7414 
7415     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
7416         if (cpu->opcodes[i] == &invalid_handler) {
7417             continue;
7418         }
7419         if (is_indirect_opcode(cpu->opcodes[i])) {
7420             table = ind_table(cpu->opcodes[i]);
7421             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
7422                 if (table[j] == &invalid_handler) {
7423                     continue;
7424                 }
7425                 if (is_indirect_opcode(table[j])) {
7426                     table_2 = ind_table(table[j]);
7427                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
7428                         if (table_2[k] != &invalid_handler &&
7429                             is_indirect_opcode(table_2[k])) {
7430                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
7431                                                      ~PPC_INDIRECT));
7432                         }
7433                     }
7434                     g_free((opc_handler_t *)((uintptr_t)table[j] &
7435                                              ~PPC_INDIRECT));
7436                 }
7437             }
7438             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
7439                 ~PPC_INDIRECT));
7440         }
7441     }
7442 }
7443 
7444 int ppc_fixup_cpu(PowerPCCPU *cpu)
7445 {
7446     CPUPPCState *env = &cpu->env;
7447 
7448     /*
7449      * TCG doesn't (yet) emulate some groups of instructions that are
7450      * implemented on some otherwise supported CPUs (e.g. VSX and
7451      * decimal floating point instructions on POWER7).  We remove
7452      * unsupported instruction groups from the cpu state's instruction
7453      * masks and hope the guest can cope.  For at least the pseries
7454      * machine, the unavailability of these instructions can be
7455      * advertised to the guest via the device tree.
7456      */
7457     if ((env->insns_flags & ~PPC_TCG_INSNS)
7458         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
7459         warn_report("Disabling some instructions which are not "
7460                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
7461                     env->insns_flags & ~PPC_TCG_INSNS,
7462                     env->insns_flags2 & ~PPC_TCG_INSNS2);
7463     }
7464     env->insns_flags &= PPC_TCG_INSNS;
7465     env->insns_flags2 &= PPC_TCG_INSNS2;
7466     return 0;
7467 }
7468 
7469 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
7470 {
7471     opc_handler_t **table, *handler;
7472     uint32_t inval;
7473 
7474     ctx->opcode = insn;
7475 
7476     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7477               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7478               ctx->le_mode ? "little" : "big");
7479 
7480     table = cpu->opcodes;
7481     handler = table[opc1(insn)];
7482     if (is_indirect_opcode(handler)) {
7483         table = ind_table(handler);
7484         handler = table[opc2(insn)];
7485         if (is_indirect_opcode(handler)) {
7486             table = ind_table(handler);
7487             handler = table[opc3(insn)];
7488             if (is_indirect_opcode(handler)) {
7489                 table = ind_table(handler);
7490                 handler = table[opc4(insn)];
7491             }
7492         }
7493     }
7494 
7495     /* Is opcode *REALLY* valid ? */
7496     if (unlikely(handler->handler == &gen_invalid)) {
7497         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7498                       "%02x - %02x - %02x - %02x (%08x) "
7499                       TARGET_FMT_lx "\n",
7500                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7501                       insn, ctx->cia);
7502         return false;
7503     }
7504 
7505     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
7506                  && Rc(insn))) {
7507         inval = handler->inval2;
7508     } else {
7509         inval = handler->inval1;
7510     }
7511 
7512     if (unlikely((insn & inval) != 0)) {
7513         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7514                       "%02x - %02x - %02x - %02x (%08x) "
7515                       TARGET_FMT_lx "\n", insn & inval,
7516                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7517                       insn, ctx->cia);
7518         return false;
7519     }
7520 
7521     handler->handler(ctx);
7522     return true;
7523 }
7524 
7525 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
7526 {
7527     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7528     CPUPPCState *env = cs->env_ptr;
7529     uint32_t hflags = ctx->base.tb->flags;
7530 
7531     ctx->spr_cb = env->spr_cb;
7532     ctx->pr = (hflags >> HFLAGS_PR) & 1;
7533     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
7534     ctx->dr = (hflags >> HFLAGS_DR) & 1;
7535     ctx->hv = (hflags >> HFLAGS_HV) & 1;
7536     ctx->insns_flags = env->insns_flags;
7537     ctx->insns_flags2 = env->insns_flags2;
7538     ctx->access_type = -1;
7539     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
7540     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
7541     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
7542     ctx->flags = env->flags;
7543 #if defined(TARGET_PPC64)
7544     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
7545     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7546 #endif
7547     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
7548         || env->mmu_model & POWERPC_MMU_64;
7549 
7550     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
7551     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
7552     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
7553     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
7554     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
7555     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
7556     ctx->hr = (hflags >> HFLAGS_HR) & 1;
7557     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
7558     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
7559     ctx->mmcr0_pmcjce = (hflags >> HFLAGS_PMCJCE) & 1;
7560     ctx->pmc_other = (hflags >> HFLAGS_PMC_OTHER) & 1;
7561     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
7562 
7563     ctx->singlestep_enabled = 0;
7564     if ((hflags >> HFLAGS_SE) & 1) {
7565         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
7566         ctx->base.max_insns = 1;
7567     }
7568     if ((hflags >> HFLAGS_BE) & 1) {
7569         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
7570     }
7571 }
7572 
7573 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
7574 {
7575 }
7576 
7577 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
7578 {
7579     tcg_gen_insn_start(dcbase->pc_next);
7580 }
7581 
7582 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
7583 {
7584     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
7585     return opc1(insn) == 1;
7586 }
7587 
7588 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
7589 {
7590     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7591     PowerPCCPU *cpu = POWERPC_CPU(cs);
7592     CPUPPCState *env = cs->env_ptr;
7593     target_ulong pc;
7594     uint32_t insn;
7595     bool ok;
7596 
7597     LOG_DISAS("----------------\n");
7598     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7599               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
7600 
7601     ctx->cia = pc = ctx->base.pc_next;
7602     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
7603     ctx->base.pc_next = pc += 4;
7604 
7605     if (!is_prefix_insn(ctx, insn)) {
7606         ok = (decode_insn32(ctx, insn) ||
7607               decode_legacy(cpu, ctx, insn));
7608     } else if ((pc & 63) == 0) {
7609         /*
7610          * Power v3.1, section 1.9 Exceptions:
7611          * attempt to execute a prefixed instruction that crosses a
7612          * 64-byte address boundary (system alignment error).
7613          */
7614         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
7615         ok = true;
7616     } else {
7617         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
7618                                              need_byteswap(ctx));
7619         ctx->base.pc_next = pc += 4;
7620         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
7621     }
7622     if (!ok) {
7623         gen_invalid(ctx);
7624     }
7625 
7626     /* End the TB when crossing a page boundary. */
7627     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
7628         ctx->base.is_jmp = DISAS_TOO_MANY;
7629     }
7630 
7631     translator_loop_temp_check(&ctx->base);
7632 }
7633 
7634 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
7635 {
7636     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7637     DisasJumpType is_jmp = ctx->base.is_jmp;
7638     target_ulong nip = ctx->base.pc_next;
7639 
7640     if (is_jmp == DISAS_NORETURN) {
7641         /* We have already exited the TB. */
7642         return;
7643     }
7644 
7645     /* Honor single stepping. */
7646     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)
7647         && (nip <= 0x100 || nip > 0xf00)) {
7648         switch (is_jmp) {
7649         case DISAS_TOO_MANY:
7650         case DISAS_EXIT_UPDATE:
7651         case DISAS_CHAIN_UPDATE:
7652             gen_update_nip(ctx, nip);
7653             break;
7654         case DISAS_EXIT:
7655         case DISAS_CHAIN:
7656             break;
7657         default:
7658             g_assert_not_reached();
7659         }
7660 
7661         gen_debug_exception(ctx);
7662         return;
7663     }
7664 
7665     switch (is_jmp) {
7666     case DISAS_TOO_MANY:
7667         if (use_goto_tb(ctx, nip)) {
7668             pmu_count_insns(ctx);
7669             tcg_gen_goto_tb(0);
7670             gen_update_nip(ctx, nip);
7671             tcg_gen_exit_tb(ctx->base.tb, 0);
7672             break;
7673         }
7674         /* fall through */
7675     case DISAS_CHAIN_UPDATE:
7676         gen_update_nip(ctx, nip);
7677         /* fall through */
7678     case DISAS_CHAIN:
7679         /*
7680          * tcg_gen_lookup_and_goto_ptr will exit the TB if
7681          * CF_NO_GOTO_PTR is set. Count insns now.
7682          */
7683         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
7684             pmu_count_insns(ctx);
7685         }
7686 
7687         tcg_gen_lookup_and_goto_ptr();
7688         break;
7689 
7690     case DISAS_EXIT_UPDATE:
7691         gen_update_nip(ctx, nip);
7692         /* fall through */
7693     case DISAS_EXIT:
7694         pmu_count_insns(ctx);
7695         tcg_gen_exit_tb(NULL, 0);
7696         break;
7697 
7698     default:
7699         g_assert_not_reached();
7700     }
7701 }
7702 
7703 static void ppc_tr_disas_log(const DisasContextBase *dcbase,
7704                              CPUState *cs, FILE *logfile)
7705 {
7706     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
7707     target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size);
7708 }
7709 
7710 static const TranslatorOps ppc_tr_ops = {
7711     .init_disas_context = ppc_tr_init_disas_context,
7712     .tb_start           = ppc_tr_tb_start,
7713     .insn_start         = ppc_tr_insn_start,
7714     .translate_insn     = ppc_tr_translate_insn,
7715     .tb_stop            = ppc_tr_tb_stop,
7716     .disas_log          = ppc_tr_disas_log,
7717 };
7718 
7719 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
7720                            target_ulong pc, void *host_pc)
7721 {
7722     DisasContext ctx;
7723 
7724     translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
7725 }
7726