xref: /openbmc/qemu/target/ppc/translate.c (revision 37b0b24e)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31 
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34 
35 #include "exec/translator.h"
36 #include "exec/log.h"
37 #include "qemu/atomic128.h"
38 #include "spr_common.h"
39 #include "power8-pmu.h"
40 
41 #include "qemu/qemu-print.h"
42 #include "qapi/error.h"
43 
44 #define CPU_SINGLE_STEP 0x1
45 #define CPU_BRANCH_STEP 0x2
46 
47 /* Include definitions for instructions classes and implementations flags */
48 /* #define PPC_DEBUG_DISAS */
49 
50 #ifdef PPC_DEBUG_DISAS
51 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
52 #else
53 #  define LOG_DISAS(...) do { } while (0)
54 #endif
55 /*****************************************************************************/
56 /* Code translation helpers                                                  */
57 
58 /* global register indexes */
59 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
60                           + 10 * 4 + 22 * 5 /* SPE GPRh */
61                           + 8 * 5           /* CRF */];
62 static TCGv cpu_gpr[32];
63 static TCGv cpu_gprh[32];
64 static TCGv_i32 cpu_crf[8];
65 static TCGv cpu_nip;
66 static TCGv cpu_msr;
67 static TCGv cpu_ctr;
68 static TCGv cpu_lr;
69 #if defined(TARGET_PPC64)
70 static TCGv cpu_cfar;
71 #endif
72 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
73 static TCGv cpu_reserve;
74 static TCGv cpu_reserve_val;
75 static TCGv cpu_fpscr;
76 static TCGv_i32 cpu_access_type;
77 
78 #include "exec/gen-icount.h"
79 
80 void ppc_translate_init(void)
81 {
82     int i;
83     char *p;
84     size_t cpu_reg_names_size;
85 
86     p = cpu_reg_names;
87     cpu_reg_names_size = sizeof(cpu_reg_names);
88 
89     for (i = 0; i < 8; i++) {
90         snprintf(p, cpu_reg_names_size, "crf%d", i);
91         cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
92                                             offsetof(CPUPPCState, crf[i]), p);
93         p += 5;
94         cpu_reg_names_size -= 5;
95     }
96 
97     for (i = 0; i < 32; i++) {
98         snprintf(p, cpu_reg_names_size, "r%d", i);
99         cpu_gpr[i] = tcg_global_mem_new(cpu_env,
100                                         offsetof(CPUPPCState, gpr[i]), p);
101         p += (i < 10) ? 3 : 4;
102         cpu_reg_names_size -= (i < 10) ? 3 : 4;
103         snprintf(p, cpu_reg_names_size, "r%dH", i);
104         cpu_gprh[i] = tcg_global_mem_new(cpu_env,
105                                          offsetof(CPUPPCState, gprh[i]), p);
106         p += (i < 10) ? 4 : 5;
107         cpu_reg_names_size -= (i < 10) ? 4 : 5;
108     }
109 
110     cpu_nip = tcg_global_mem_new(cpu_env,
111                                  offsetof(CPUPPCState, nip), "nip");
112 
113     cpu_msr = tcg_global_mem_new(cpu_env,
114                                  offsetof(CPUPPCState, msr), "msr");
115 
116     cpu_ctr = tcg_global_mem_new(cpu_env,
117                                  offsetof(CPUPPCState, ctr), "ctr");
118 
119     cpu_lr = tcg_global_mem_new(cpu_env,
120                                 offsetof(CPUPPCState, lr), "lr");
121 
122 #if defined(TARGET_PPC64)
123     cpu_cfar = tcg_global_mem_new(cpu_env,
124                                   offsetof(CPUPPCState, cfar), "cfar");
125 #endif
126 
127     cpu_xer = tcg_global_mem_new(cpu_env,
128                                  offsetof(CPUPPCState, xer), "xer");
129     cpu_so = tcg_global_mem_new(cpu_env,
130                                 offsetof(CPUPPCState, so), "SO");
131     cpu_ov = tcg_global_mem_new(cpu_env,
132                                 offsetof(CPUPPCState, ov), "OV");
133     cpu_ca = tcg_global_mem_new(cpu_env,
134                                 offsetof(CPUPPCState, ca), "CA");
135     cpu_ov32 = tcg_global_mem_new(cpu_env,
136                                   offsetof(CPUPPCState, ov32), "OV32");
137     cpu_ca32 = tcg_global_mem_new(cpu_env,
138                                   offsetof(CPUPPCState, ca32), "CA32");
139 
140     cpu_reserve = tcg_global_mem_new(cpu_env,
141                                      offsetof(CPUPPCState, reserve_addr),
142                                      "reserve_addr");
143     cpu_reserve_val = tcg_global_mem_new(cpu_env,
144                                      offsetof(CPUPPCState, reserve_val),
145                                      "reserve_val");
146 
147     cpu_fpscr = tcg_global_mem_new(cpu_env,
148                                    offsetof(CPUPPCState, fpscr), "fpscr");
149 
150     cpu_access_type = tcg_global_mem_new_i32(cpu_env,
151                                              offsetof(CPUPPCState, access_type),
152                                              "access_type");
153 }
154 
155 /* internal defines */
156 struct DisasContext {
157     DisasContextBase base;
158     target_ulong cia;  /* current instruction address */
159     uint32_t opcode;
160     /* Routine used to access memory */
161     bool pr, hv, dr, le_mode;
162     bool lazy_tlb_flush;
163     bool need_access_type;
164     int mem_idx;
165     int access_type;
166     /* Translation flags */
167     MemOp default_tcg_memop_mask;
168 #if defined(TARGET_PPC64)
169     bool sf_mode;
170     bool has_cfar;
171 #endif
172     bool fpu_enabled;
173     bool altivec_enabled;
174     bool vsx_enabled;
175     bool spe_enabled;
176     bool tm_enabled;
177     bool gtse;
178     bool hr;
179     bool mmcr0_pmcc0;
180     bool mmcr0_pmcc1;
181     bool mmcr0_pmcjce;
182     bool pmc_other;
183     bool pmu_insn_cnt;
184     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
185     int singlestep_enabled;
186     uint32_t flags;
187     uint64_t insns_flags;
188     uint64_t insns_flags2;
189 };
190 
191 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
192 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
193 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
194 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
195 
196 /* Return true iff byteswap is needed in a scalar memop */
197 static inline bool need_byteswap(const DisasContext *ctx)
198 {
199 #if TARGET_BIG_ENDIAN
200      return ctx->le_mode;
201 #else
202      return !ctx->le_mode;
203 #endif
204 }
205 
206 /* True when active word size < size of target_long.  */
207 #ifdef TARGET_PPC64
208 # define NARROW_MODE(C)  (!(C)->sf_mode)
209 #else
210 # define NARROW_MODE(C)  0
211 #endif
212 
213 struct opc_handler_t {
214     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
215     uint32_t inval1;
216     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
217     uint32_t inval2;
218     /* instruction type */
219     uint64_t type;
220     /* extended instruction type */
221     uint64_t type2;
222     /* handler */
223     void (*handler)(DisasContext *ctx);
224 };
225 
226 /* SPR load/store helpers */
227 static inline void gen_load_spr(TCGv t, int reg)
228 {
229     tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
230 }
231 
232 static inline void gen_store_spr(int reg, TCGv t)
233 {
234     tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
235 }
236 
237 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
238 {
239     if (ctx->need_access_type && ctx->access_type != access_type) {
240         tcg_gen_movi_i32(cpu_access_type, access_type);
241         ctx->access_type = access_type;
242     }
243 }
244 
245 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
246 {
247     if (NARROW_MODE(ctx)) {
248         nip = (uint32_t)nip;
249     }
250     tcg_gen_movi_tl(cpu_nip, nip);
251 }
252 
253 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
254 {
255     TCGv_i32 t0, t1;
256 
257     /*
258      * These are all synchronous exceptions, we set the PC back to the
259      * faulting instruction
260      */
261     gen_update_nip(ctx, ctx->cia);
262     t0 = tcg_const_i32(excp);
263     t1 = tcg_const_i32(error);
264     gen_helper_raise_exception_err(cpu_env, t0, t1);
265     tcg_temp_free_i32(t0);
266     tcg_temp_free_i32(t1);
267     ctx->base.is_jmp = DISAS_NORETURN;
268 }
269 
270 static void gen_exception(DisasContext *ctx, uint32_t excp)
271 {
272     TCGv_i32 t0;
273 
274     /*
275      * These are all synchronous exceptions, we set the PC back to the
276      * faulting instruction
277      */
278     gen_update_nip(ctx, ctx->cia);
279     t0 = tcg_const_i32(excp);
280     gen_helper_raise_exception(cpu_env, t0);
281     tcg_temp_free_i32(t0);
282     ctx->base.is_jmp = DISAS_NORETURN;
283 }
284 
285 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
286                               target_ulong nip)
287 {
288     TCGv_i32 t0;
289 
290     gen_update_nip(ctx, nip);
291     t0 = tcg_const_i32(excp);
292     gen_helper_raise_exception(cpu_env, t0);
293     tcg_temp_free_i32(t0);
294     ctx->base.is_jmp = DISAS_NORETURN;
295 }
296 
297 static void gen_icount_io_start(DisasContext *ctx)
298 {
299     if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
300         gen_io_start();
301         /*
302          * An I/O instruction must be last in the TB.
303          * Chain to the next TB, and let the code from gen_tb_start
304          * decide if we need to return to the main loop.
305          * Doing this first also allows this value to be overridden.
306          */
307         ctx->base.is_jmp = DISAS_TOO_MANY;
308     }
309 }
310 
311 #if !defined(CONFIG_USER_ONLY)
312 static void gen_ppc_maybe_interrupt(DisasContext *ctx)
313 {
314     gen_icount_io_start(ctx);
315     gen_helper_ppc_maybe_interrupt(cpu_env);
316 }
317 #endif
318 
319 /*
320  * Tells the caller what is the appropriate exception to generate and prepares
321  * SPR registers for this exception.
322  *
323  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
324  * POWERPC_EXCP_DEBUG (on BookE).
325  */
326 static uint32_t gen_prep_dbgex(DisasContext *ctx)
327 {
328     if (ctx->flags & POWERPC_FLAG_DE) {
329         target_ulong dbsr = 0;
330         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
331             dbsr = DBCR0_ICMP;
332         } else {
333             /* Must have been branch */
334             dbsr = DBCR0_BRT;
335         }
336         TCGv t0 = tcg_temp_new();
337         gen_load_spr(t0, SPR_BOOKE_DBSR);
338         tcg_gen_ori_tl(t0, t0, dbsr);
339         gen_store_spr(SPR_BOOKE_DBSR, t0);
340         tcg_temp_free(t0);
341         return POWERPC_EXCP_DEBUG;
342     } else {
343         return POWERPC_EXCP_TRACE;
344     }
345 }
346 
347 static void gen_debug_exception(DisasContext *ctx)
348 {
349     gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx)));
350     ctx->base.is_jmp = DISAS_NORETURN;
351 }
352 
353 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
354 {
355     /* Will be converted to program check if needed */
356     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
357 }
358 
359 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
360 {
361     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
362 }
363 
364 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
365 {
366     /* Will be converted to program check if needed */
367     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
368 }
369 
370 /*****************************************************************************/
371 /* SPR READ/WRITE CALLBACKS */
372 
373 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
374 {
375 #if 0
376     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
377     printf("ERROR: try to access SPR %d !\n", sprn);
378 #endif
379 }
380 
381 /* #define PPC_DUMP_SPR_ACCESSES */
382 
383 /*
384  * Generic callbacks:
385  * do nothing but store/retrieve spr value
386  */
387 static void spr_load_dump_spr(int sprn)
388 {
389 #ifdef PPC_DUMP_SPR_ACCESSES
390     TCGv_i32 t0 = tcg_const_i32(sprn);
391     gen_helper_load_dump_spr(cpu_env, t0);
392     tcg_temp_free_i32(t0);
393 #endif
394 }
395 
396 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
397 {
398     gen_load_spr(cpu_gpr[gprn], sprn);
399     spr_load_dump_spr(sprn);
400 }
401 
402 static void spr_store_dump_spr(int sprn)
403 {
404 #ifdef PPC_DUMP_SPR_ACCESSES
405     TCGv_i32 t0 = tcg_const_i32(sprn);
406     gen_helper_store_dump_spr(cpu_env, t0);
407     tcg_temp_free_i32(t0);
408 #endif
409 }
410 
411 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
412 {
413     gen_store_spr(sprn, cpu_gpr[gprn]);
414     spr_store_dump_spr(sprn);
415 }
416 
417 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
418 {
419     spr_write_generic(ctx, sprn, gprn);
420 
421     /*
422      * SPR_CTRL writes must force a new translation block,
423      * allowing the PMU to calculate the run latch events with
424      * more accuracy.
425      */
426     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
427 }
428 
429 #if !defined(CONFIG_USER_ONLY)
430 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
431 {
432 #ifdef TARGET_PPC64
433     TCGv t0 = tcg_temp_new();
434     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
435     gen_store_spr(sprn, t0);
436     tcg_temp_free(t0);
437     spr_store_dump_spr(sprn);
438 #else
439     spr_write_generic(ctx, sprn, gprn);
440 #endif
441 }
442 
443 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
444 {
445     TCGv t0 = tcg_temp_new();
446     TCGv t1 = tcg_temp_new();
447     gen_load_spr(t0, sprn);
448     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
449     tcg_gen_and_tl(t0, t0, t1);
450     gen_store_spr(sprn, t0);
451     tcg_temp_free(t0);
452     tcg_temp_free(t1);
453 }
454 
455 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
456 {
457 }
458 
459 #endif
460 
461 /* SPR common to all PowerPC */
462 /* XER */
463 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
464 {
465     TCGv dst = cpu_gpr[gprn];
466     TCGv t0 = tcg_temp_new();
467     TCGv t1 = tcg_temp_new();
468     TCGv t2 = tcg_temp_new();
469     tcg_gen_mov_tl(dst, cpu_xer);
470     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
471     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
472     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
473     tcg_gen_or_tl(t0, t0, t1);
474     tcg_gen_or_tl(dst, dst, t2);
475     tcg_gen_or_tl(dst, dst, t0);
476     if (is_isa300(ctx)) {
477         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
478         tcg_gen_or_tl(dst, dst, t0);
479         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
480         tcg_gen_or_tl(dst, dst, t0);
481     }
482     tcg_temp_free(t0);
483     tcg_temp_free(t1);
484     tcg_temp_free(t2);
485 }
486 
487 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
488 {
489     TCGv src = cpu_gpr[gprn];
490     /* Write all flags, while reading back check for isa300 */
491     tcg_gen_andi_tl(cpu_xer, src,
492                     ~((1u << XER_SO) |
493                       (1u << XER_OV) | (1u << XER_OV32) |
494                       (1u << XER_CA) | (1u << XER_CA32)));
495     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
496     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
497     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
498     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
499     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
500 }
501 
502 /* LR */
503 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
504 {
505     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
506 }
507 
508 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
509 {
510     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
511 }
512 
513 /* CFAR */
514 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
515 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
516 {
517     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
518 }
519 
520 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
521 {
522     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
523 }
524 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
525 
526 /* CTR */
527 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
528 {
529     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
530 }
531 
532 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
533 {
534     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
535 }
536 
537 /* User read access to SPR */
538 /* USPRx */
539 /* UMMCRx */
540 /* UPMCx */
541 /* USIA */
542 /* UDECR */
543 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
544 {
545     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
546 }
547 
548 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
549 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
550 {
551     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
552 }
553 #endif
554 
555 /* SPR common to all non-embedded PowerPC */
556 /* DECR */
557 #if !defined(CONFIG_USER_ONLY)
558 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
559 {
560     gen_icount_io_start(ctx);
561     gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
562 }
563 
564 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
565 {
566     gen_icount_io_start(ctx);
567     gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
568 }
569 #endif
570 
571 /* SPR common to all non-embedded PowerPC, except 601 */
572 /* Time base */
573 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
574 {
575     gen_icount_io_start(ctx);
576     gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
577 }
578 
579 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
580 {
581     gen_icount_io_start(ctx);
582     gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
583 }
584 
585 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
586 {
587     gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
588 }
589 
590 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
591 {
592     gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
593 }
594 
595 #if !defined(CONFIG_USER_ONLY)
596 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
597 {
598     gen_icount_io_start(ctx);
599     gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
600 }
601 
602 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
603 {
604     gen_icount_io_start(ctx);
605     gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
606 }
607 
608 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
609 {
610     gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
611 }
612 
613 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
614 {
615     gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
616 }
617 
618 #if defined(TARGET_PPC64)
619 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
620 {
621     gen_icount_io_start(ctx);
622     gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
623 }
624 
625 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
626 {
627     gen_icount_io_start(ctx);
628     gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
629 }
630 
631 /* HDECR */
632 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
633 {
634     gen_icount_io_start(ctx);
635     gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
636 }
637 
638 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
639 {
640     gen_icount_io_start(ctx);
641     gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
642 }
643 
644 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
645 {
646     gen_icount_io_start(ctx);
647     gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
648 }
649 
650 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
651 {
652     gen_icount_io_start(ctx);
653     gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
654 }
655 
656 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
657 {
658     gen_icount_io_start(ctx);
659     gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
660 }
661 
662 #endif
663 #endif
664 
665 #if !defined(CONFIG_USER_ONLY)
666 /* IBAT0U...IBAT0U */
667 /* IBAT0L...IBAT7L */
668 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
669 {
670     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
671                   offsetof(CPUPPCState,
672                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
673 }
674 
675 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
676 {
677     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
678                   offsetof(CPUPPCState,
679                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
680 }
681 
682 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
683 {
684     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
685     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
686     tcg_temp_free_i32(t0);
687 }
688 
689 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
690 {
691     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
692     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
693     tcg_temp_free_i32(t0);
694 }
695 
696 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
697 {
698     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
699     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
700     tcg_temp_free_i32(t0);
701 }
702 
703 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
704 {
705     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
706     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
707     tcg_temp_free_i32(t0);
708 }
709 
710 /* DBAT0U...DBAT7U */
711 /* DBAT0L...DBAT7L */
712 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
713 {
714     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
715                   offsetof(CPUPPCState,
716                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
717 }
718 
719 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
720 {
721     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
722                   offsetof(CPUPPCState,
723                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
724 }
725 
726 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
727 {
728     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
729     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
730     tcg_temp_free_i32(t0);
731 }
732 
733 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
734 {
735     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
736     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
737     tcg_temp_free_i32(t0);
738 }
739 
740 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
741 {
742     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
743     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
744     tcg_temp_free_i32(t0);
745 }
746 
747 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
748 {
749     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
750     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
751     tcg_temp_free_i32(t0);
752 }
753 
754 /* SDR1 */
755 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
756 {
757     gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
758 }
759 
760 #if defined(TARGET_PPC64)
761 /* 64 bits PowerPC specific SPRs */
762 /* PIDR */
763 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
764 {
765     gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
766 }
767 
768 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
769 {
770     gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
771 }
772 
773 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
774 {
775     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
776 }
777 
778 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
779 {
780     TCGv t0 = tcg_temp_new();
781     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
782     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
783     tcg_temp_free(t0);
784 }
785 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
786 {
787     gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
788 }
789 
790 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
791 {
792     gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
793 }
794 
795 /* DPDES */
796 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
797 {
798     gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
799 }
800 
801 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
802 {
803     gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
804 }
805 #endif
806 #endif
807 
808 /* PowerPC 40x specific registers */
809 #if !defined(CONFIG_USER_ONLY)
810 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
811 {
812     gen_icount_io_start(ctx);
813     gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
814 }
815 
816 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
817 {
818     gen_icount_io_start(ctx);
819     gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
820 }
821 
822 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
823 {
824     gen_icount_io_start(ctx);
825     gen_store_spr(sprn, cpu_gpr[gprn]);
826     gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
827     /* We must stop translation as we may have rebooted */
828     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
829 }
830 
831 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
832 {
833     gen_icount_io_start(ctx);
834     gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
835 }
836 
837 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
838 {
839     gen_icount_io_start(ctx);
840     gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]);
841 }
842 
843 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
844 {
845     gen_icount_io_start(ctx);
846     gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]);
847 }
848 
849 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
850 {
851     TCGv t0 = tcg_temp_new();
852     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
853     gen_helper_store_40x_pid(cpu_env, t0);
854     tcg_temp_free(t0);
855 }
856 
857 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
858 {
859     gen_icount_io_start(ctx);
860     gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
861 }
862 
863 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
864 {
865     gen_icount_io_start(ctx);
866     gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
867 }
868 #endif
869 
870 /* PIR */
871 #if !defined(CONFIG_USER_ONLY)
872 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
873 {
874     TCGv t0 = tcg_temp_new();
875     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
876     gen_store_spr(SPR_PIR, t0);
877     tcg_temp_free(t0);
878 }
879 #endif
880 
881 /* SPE specific registers */
882 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
883 {
884     TCGv_i32 t0 = tcg_temp_new_i32();
885     tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
886     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
887     tcg_temp_free_i32(t0);
888 }
889 
890 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
891 {
892     TCGv_i32 t0 = tcg_temp_new_i32();
893     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
894     tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
895     tcg_temp_free_i32(t0);
896 }
897 
898 #if !defined(CONFIG_USER_ONLY)
899 /* Callback used to write the exception vector base */
900 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
901 {
902     TCGv t0 = tcg_temp_new();
903     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
904     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
905     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
906     gen_store_spr(sprn, t0);
907     tcg_temp_free(t0);
908 }
909 
910 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
911 {
912     int sprn_offs;
913 
914     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
915         sprn_offs = sprn - SPR_BOOKE_IVOR0;
916     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
917         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
918     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
919         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
920     } else {
921         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
922                       " vector 0x%03x\n", sprn);
923         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
924         return;
925     }
926 
927     TCGv t0 = tcg_temp_new();
928     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
929     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
930     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
931     gen_store_spr(sprn, t0);
932     tcg_temp_free(t0);
933 }
934 #endif
935 
936 #ifdef TARGET_PPC64
937 #ifndef CONFIG_USER_ONLY
938 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
939 {
940     TCGv t0 = tcg_temp_new();
941     TCGv t1 = tcg_temp_new();
942     TCGv t2 = tcg_temp_new();
943 
944     /*
945      * Note, the HV=1 PR=0 case is handled earlier by simply using
946      * spr_write_generic for HV mode in the SPR table
947      */
948 
949     /* Build insertion mask into t1 based on context */
950     if (ctx->pr) {
951         gen_load_spr(t1, SPR_UAMOR);
952     } else {
953         gen_load_spr(t1, SPR_AMOR);
954     }
955 
956     /* Mask new bits into t2 */
957     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
958 
959     /* Load AMR and clear new bits in t0 */
960     gen_load_spr(t0, SPR_AMR);
961     tcg_gen_andc_tl(t0, t0, t1);
962 
963     /* Or'in new bits and write it out */
964     tcg_gen_or_tl(t0, t0, t2);
965     gen_store_spr(SPR_AMR, t0);
966     spr_store_dump_spr(SPR_AMR);
967 
968     tcg_temp_free(t0);
969     tcg_temp_free(t1);
970     tcg_temp_free(t2);
971 }
972 
973 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
974 {
975     TCGv t0 = tcg_temp_new();
976     TCGv t1 = tcg_temp_new();
977     TCGv t2 = tcg_temp_new();
978 
979     /*
980      * Note, the HV=1 case is handled earlier by simply using
981      * spr_write_generic for HV mode in the SPR table
982      */
983 
984     /* Build insertion mask into t1 based on context */
985     gen_load_spr(t1, SPR_AMOR);
986 
987     /* Mask new bits into t2 */
988     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
989 
990     /* Load AMR and clear new bits in t0 */
991     gen_load_spr(t0, SPR_UAMOR);
992     tcg_gen_andc_tl(t0, t0, t1);
993 
994     /* Or'in new bits and write it out */
995     tcg_gen_or_tl(t0, t0, t2);
996     gen_store_spr(SPR_UAMOR, t0);
997     spr_store_dump_spr(SPR_UAMOR);
998 
999     tcg_temp_free(t0);
1000     tcg_temp_free(t1);
1001     tcg_temp_free(t2);
1002 }
1003 
1004 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1005 {
1006     TCGv t0 = tcg_temp_new();
1007     TCGv t1 = tcg_temp_new();
1008     TCGv t2 = tcg_temp_new();
1009 
1010     /*
1011      * Note, the HV=1 case is handled earlier by simply using
1012      * spr_write_generic for HV mode in the SPR table
1013      */
1014 
1015     /* Build insertion mask into t1 based on context */
1016     gen_load_spr(t1, SPR_AMOR);
1017 
1018     /* Mask new bits into t2 */
1019     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1020 
1021     /* Load AMR and clear new bits in t0 */
1022     gen_load_spr(t0, SPR_IAMR);
1023     tcg_gen_andc_tl(t0, t0, t1);
1024 
1025     /* Or'in new bits and write it out */
1026     tcg_gen_or_tl(t0, t0, t2);
1027     gen_store_spr(SPR_IAMR, t0);
1028     spr_store_dump_spr(SPR_IAMR);
1029 
1030     tcg_temp_free(t0);
1031     tcg_temp_free(t1);
1032     tcg_temp_free(t2);
1033 }
1034 #endif
1035 #endif
1036 
1037 #ifndef CONFIG_USER_ONLY
1038 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1039 {
1040     gen_helper_fixup_thrm(cpu_env);
1041     gen_load_spr(cpu_gpr[gprn], sprn);
1042     spr_load_dump_spr(sprn);
1043 }
1044 #endif /* !CONFIG_USER_ONLY */
1045 
1046 #if !defined(CONFIG_USER_ONLY)
1047 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1048 {
1049     TCGv t0 = tcg_temp_new();
1050 
1051     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1052     gen_store_spr(sprn, t0);
1053     tcg_temp_free(t0);
1054 }
1055 
1056 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1057 {
1058     TCGv t0 = tcg_temp_new();
1059 
1060     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1061     gen_store_spr(sprn, t0);
1062     tcg_temp_free(t0);
1063 }
1064 
1065 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1066 {
1067     TCGv t0 = tcg_temp_new();
1068 
1069     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1070                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1071     gen_store_spr(sprn, t0);
1072     tcg_temp_free(t0);
1073 }
1074 
1075 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1076 {
1077     gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1078 }
1079 
1080 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1081 {
1082     TCGv_i32 t0 = tcg_const_i32(sprn);
1083     gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1084     tcg_temp_free_i32(t0);
1085 }
1086 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1087 {
1088     gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1089 }
1090 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1091 {
1092     gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1093 }
1094 
1095 #endif
1096 
1097 #if !defined(CONFIG_USER_ONLY)
1098 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1099 {
1100     TCGv val = tcg_temp_new();
1101     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1102     gen_store_spr(SPR_BOOKE_MAS3, val);
1103     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1104     gen_store_spr(SPR_BOOKE_MAS7, val);
1105     tcg_temp_free(val);
1106 }
1107 
1108 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1109 {
1110     TCGv mas7 = tcg_temp_new();
1111     TCGv mas3 = tcg_temp_new();
1112     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1113     tcg_gen_shli_tl(mas7, mas7, 32);
1114     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1115     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1116     tcg_temp_free(mas3);
1117     tcg_temp_free(mas7);
1118 }
1119 
1120 #endif
1121 
1122 #ifdef TARGET_PPC64
1123 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1124                                     int bit, int sprn, int cause)
1125 {
1126     TCGv_i32 t1 = tcg_const_i32(bit);
1127     TCGv_i32 t2 = tcg_const_i32(sprn);
1128     TCGv_i32 t3 = tcg_const_i32(cause);
1129 
1130     gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1131 
1132     tcg_temp_free_i32(t3);
1133     tcg_temp_free_i32(t2);
1134     tcg_temp_free_i32(t1);
1135 }
1136 
1137 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1138                                    int bit, int sprn, int cause)
1139 {
1140     TCGv_i32 t1 = tcg_const_i32(bit);
1141     TCGv_i32 t2 = tcg_const_i32(sprn);
1142     TCGv_i32 t3 = tcg_const_i32(cause);
1143 
1144     gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1145 
1146     tcg_temp_free_i32(t3);
1147     tcg_temp_free_i32(t2);
1148     tcg_temp_free_i32(t1);
1149 }
1150 
1151 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1152 {
1153     TCGv spr_up = tcg_temp_new();
1154     TCGv spr = tcg_temp_new();
1155 
1156     gen_load_spr(spr, sprn - 1);
1157     tcg_gen_shri_tl(spr_up, spr, 32);
1158     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1159 
1160     tcg_temp_free(spr);
1161     tcg_temp_free(spr_up);
1162 }
1163 
1164 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1165 {
1166     TCGv spr = tcg_temp_new();
1167 
1168     gen_load_spr(spr, sprn - 1);
1169     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1170     gen_store_spr(sprn - 1, spr);
1171 
1172     tcg_temp_free(spr);
1173 }
1174 
1175 #if !defined(CONFIG_USER_ONLY)
1176 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1177 {
1178     TCGv hmer = tcg_temp_new();
1179 
1180     gen_load_spr(hmer, sprn);
1181     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1182     gen_store_spr(sprn, hmer);
1183     spr_store_dump_spr(sprn);
1184     tcg_temp_free(hmer);
1185 }
1186 
1187 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1188 {
1189     gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1190 }
1191 #endif /* !defined(CONFIG_USER_ONLY) */
1192 
1193 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1194 {
1195     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1196     spr_read_generic(ctx, gprn, sprn);
1197 }
1198 
1199 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1200 {
1201     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1202     spr_write_generic(ctx, sprn, gprn);
1203 }
1204 
1205 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1206 {
1207     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1208     spr_read_generic(ctx, gprn, sprn);
1209 }
1210 
1211 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1212 {
1213     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1214     spr_write_generic(ctx, sprn, gprn);
1215 }
1216 
1217 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1218 {
1219     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1220     spr_read_prev_upper32(ctx, gprn, sprn);
1221 }
1222 
1223 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1224 {
1225     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1226     spr_write_prev_upper32(ctx, sprn, gprn);
1227 }
1228 
1229 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1230 {
1231     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1232     spr_read_generic(ctx, gprn, sprn);
1233 }
1234 
1235 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1236 {
1237     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1238     spr_write_generic(ctx, sprn, gprn);
1239 }
1240 
1241 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1242 {
1243     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1244     spr_read_prev_upper32(ctx, gprn, sprn);
1245 }
1246 
1247 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1248 {
1249     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1250     spr_write_prev_upper32(ctx, sprn, gprn);
1251 }
1252 
1253 void spr_read_dexcr_ureg(DisasContext *ctx, int gprn, int sprn)
1254 {
1255     TCGv t0 = tcg_temp_new();
1256 
1257     /*
1258      * Access to the (H)DEXCR in problem state is done using separated
1259      * SPR indexes which are 16 below the SPR indexes which have full
1260      * access to the (H)DEXCR in privileged state. Problem state can
1261      * only read bits 32:63, bits 0:31 return 0.
1262      *
1263      * See section 9.3.1-9.3.2 of PowerISA v3.1B
1264      */
1265 
1266     gen_load_spr(t0, sprn + 16);
1267     tcg_gen_ext32u_tl(cpu_gpr[gprn], t0);
1268 
1269     tcg_temp_free(t0);
1270 }
1271 #endif
1272 
1273 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1274 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1275 
1276 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1277 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1278 
1279 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1280 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1281 
1282 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1283 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1284 
1285 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1286 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1287 
1288 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1289 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1290 
1291 typedef struct opcode_t {
1292     unsigned char opc1, opc2, opc3, opc4;
1293 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1294     unsigned char pad[4];
1295 #endif
1296     opc_handler_t handler;
1297     const char *oname;
1298 } opcode_t;
1299 
1300 static void gen_priv_opc(DisasContext *ctx)
1301 {
1302     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1303 }
1304 
1305 /* Helpers for priv. check */
1306 #define GEN_PRIV(CTX)              \
1307     do {                           \
1308         gen_priv_opc(CTX); return; \
1309     } while (0)
1310 
1311 #if defined(CONFIG_USER_ONLY)
1312 #define CHK_HV(CTX) GEN_PRIV(CTX)
1313 #define CHK_SV(CTX) GEN_PRIV(CTX)
1314 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1315 #else
1316 #define CHK_HV(CTX)                         \
1317     do {                                    \
1318         if (unlikely(ctx->pr || !ctx->hv)) {\
1319             GEN_PRIV(CTX);                  \
1320         }                                   \
1321     } while (0)
1322 #define CHK_SV(CTX)              \
1323     do {                         \
1324         if (unlikely(ctx->pr)) { \
1325             GEN_PRIV(CTX);       \
1326         }                        \
1327     } while (0)
1328 #define CHK_HVRM(CTX)                                   \
1329     do {                                                \
1330         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1331             GEN_PRIV(CTX);                              \
1332         }                                               \
1333     } while (0)
1334 #endif
1335 
1336 #define CHK_NONE(CTX)
1337 
1338 /*****************************************************************************/
1339 /* PowerPC instructions table                                                */
1340 
1341 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1342 {                                                                             \
1343     .opc1 = op1,                                                              \
1344     .opc2 = op2,                                                              \
1345     .opc3 = op3,                                                              \
1346     .opc4 = 0xff,                                                             \
1347     .handler = {                                                              \
1348         .inval1  = invl,                                                      \
1349         .type = _typ,                                                         \
1350         .type2 = _typ2,                                                       \
1351         .handler = &gen_##name,                                               \
1352     },                                                                        \
1353     .oname = stringify(name),                                                 \
1354 }
1355 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1356 {                                                                             \
1357     .opc1 = op1,                                                              \
1358     .opc2 = op2,                                                              \
1359     .opc3 = op3,                                                              \
1360     .opc4 = 0xff,                                                             \
1361     .handler = {                                                              \
1362         .inval1  = invl1,                                                     \
1363         .inval2  = invl2,                                                     \
1364         .type = _typ,                                                         \
1365         .type2 = _typ2,                                                       \
1366         .handler = &gen_##name,                                               \
1367     },                                                                        \
1368     .oname = stringify(name),                                                 \
1369 }
1370 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1371 {                                                                             \
1372     .opc1 = op1,                                                              \
1373     .opc2 = op2,                                                              \
1374     .opc3 = op3,                                                              \
1375     .opc4 = 0xff,                                                             \
1376     .handler = {                                                              \
1377         .inval1  = invl,                                                      \
1378         .type = _typ,                                                         \
1379         .type2 = _typ2,                                                       \
1380         .handler = &gen_##name,                                               \
1381     },                                                                        \
1382     .oname = onam,                                                            \
1383 }
1384 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1385 {                                                                             \
1386     .opc1 = op1,                                                              \
1387     .opc2 = op2,                                                              \
1388     .opc3 = op3,                                                              \
1389     .opc4 = op4,                                                              \
1390     .handler = {                                                              \
1391         .inval1  = invl,                                                      \
1392         .type = _typ,                                                         \
1393         .type2 = _typ2,                                                       \
1394         .handler = &gen_##name,                                               \
1395     },                                                                        \
1396     .oname = stringify(name),                                                 \
1397 }
1398 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1399 {                                                                             \
1400     .opc1 = op1,                                                              \
1401     .opc2 = op2,                                                              \
1402     .opc3 = op3,                                                              \
1403     .opc4 = op4,                                                              \
1404     .handler = {                                                              \
1405         .inval1  = invl,                                                      \
1406         .type = _typ,                                                         \
1407         .type2 = _typ2,                                                       \
1408         .handler = &gen_##name,                                               \
1409     },                                                                        \
1410     .oname = onam,                                                            \
1411 }
1412 
1413 /* Invalid instruction */
1414 static void gen_invalid(DisasContext *ctx)
1415 {
1416     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1417 }
1418 
1419 static opc_handler_t invalid_handler = {
1420     .inval1  = 0xFFFFFFFF,
1421     .inval2  = 0xFFFFFFFF,
1422     .type    = PPC_NONE,
1423     .type2   = PPC_NONE,
1424     .handler = gen_invalid,
1425 };
1426 
1427 /***                           Integer comparison                          ***/
1428 
1429 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1430 {
1431     TCGv t0 = tcg_temp_new();
1432     TCGv t1 = tcg_temp_new();
1433     TCGv_i32 t = tcg_temp_new_i32();
1434 
1435     tcg_gen_movi_tl(t0, CRF_EQ);
1436     tcg_gen_movi_tl(t1, CRF_LT);
1437     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1438                        t0, arg0, arg1, t1, t0);
1439     tcg_gen_movi_tl(t1, CRF_GT);
1440     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1441                        t0, arg0, arg1, t1, t0);
1442 
1443     tcg_gen_trunc_tl_i32(t, t0);
1444     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1445     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1446 
1447     tcg_temp_free(t0);
1448     tcg_temp_free(t1);
1449     tcg_temp_free_i32(t);
1450 }
1451 
1452 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1453 {
1454     TCGv t0 = tcg_const_tl(arg1);
1455     gen_op_cmp(arg0, t0, s, crf);
1456     tcg_temp_free(t0);
1457 }
1458 
1459 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1460 {
1461     TCGv t0, t1;
1462     t0 = tcg_temp_new();
1463     t1 = tcg_temp_new();
1464     if (s) {
1465         tcg_gen_ext32s_tl(t0, arg0);
1466         tcg_gen_ext32s_tl(t1, arg1);
1467     } else {
1468         tcg_gen_ext32u_tl(t0, arg0);
1469         tcg_gen_ext32u_tl(t1, arg1);
1470     }
1471     gen_op_cmp(t0, t1, s, crf);
1472     tcg_temp_free(t1);
1473     tcg_temp_free(t0);
1474 }
1475 
1476 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1477 {
1478     TCGv t0 = tcg_const_tl(arg1);
1479     gen_op_cmp32(arg0, t0, s, crf);
1480     tcg_temp_free(t0);
1481 }
1482 
1483 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1484 {
1485     if (NARROW_MODE(ctx)) {
1486         gen_op_cmpi32(reg, 0, 1, 0);
1487     } else {
1488         gen_op_cmpi(reg, 0, 1, 0);
1489     }
1490 }
1491 
1492 /* cmprb - range comparison: isupper, isaplha, islower*/
1493 static void gen_cmprb(DisasContext *ctx)
1494 {
1495     TCGv_i32 src1 = tcg_temp_new_i32();
1496     TCGv_i32 src2 = tcg_temp_new_i32();
1497     TCGv_i32 src2lo = tcg_temp_new_i32();
1498     TCGv_i32 src2hi = tcg_temp_new_i32();
1499     TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1500 
1501     tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1502     tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1503 
1504     tcg_gen_andi_i32(src1, src1, 0xFF);
1505     tcg_gen_ext8u_i32(src2lo, src2);
1506     tcg_gen_shri_i32(src2, src2, 8);
1507     tcg_gen_ext8u_i32(src2hi, src2);
1508 
1509     tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1510     tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1511     tcg_gen_and_i32(crf, src2lo, src2hi);
1512 
1513     if (ctx->opcode & 0x00200000) {
1514         tcg_gen_shri_i32(src2, src2, 8);
1515         tcg_gen_ext8u_i32(src2lo, src2);
1516         tcg_gen_shri_i32(src2, src2, 8);
1517         tcg_gen_ext8u_i32(src2hi, src2);
1518         tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1519         tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1520         tcg_gen_and_i32(src2lo, src2lo, src2hi);
1521         tcg_gen_or_i32(crf, crf, src2lo);
1522     }
1523     tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1524     tcg_temp_free_i32(src1);
1525     tcg_temp_free_i32(src2);
1526     tcg_temp_free_i32(src2lo);
1527     tcg_temp_free_i32(src2hi);
1528 }
1529 
1530 #if defined(TARGET_PPC64)
1531 /* cmpeqb */
1532 static void gen_cmpeqb(DisasContext *ctx)
1533 {
1534     gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1535                       cpu_gpr[rB(ctx->opcode)]);
1536 }
1537 #endif
1538 
1539 /* isel (PowerPC 2.03 specification) */
1540 static void gen_isel(DisasContext *ctx)
1541 {
1542     uint32_t bi = rC(ctx->opcode);
1543     uint32_t mask = 0x08 >> (bi & 0x03);
1544     TCGv t0 = tcg_temp_new();
1545     TCGv zr;
1546 
1547     tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1548     tcg_gen_andi_tl(t0, t0, mask);
1549 
1550     zr = tcg_const_tl(0);
1551     tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1552                        rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1553                        cpu_gpr[rB(ctx->opcode)]);
1554     tcg_temp_free(zr);
1555     tcg_temp_free(t0);
1556 }
1557 
1558 /* cmpb: PowerPC 2.05 specification */
1559 static void gen_cmpb(DisasContext *ctx)
1560 {
1561     gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1562                     cpu_gpr[rB(ctx->opcode)]);
1563 }
1564 
1565 /***                           Integer arithmetic                          ***/
1566 
1567 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1568                                            TCGv arg1, TCGv arg2, int sub)
1569 {
1570     TCGv t0 = tcg_temp_new();
1571 
1572     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1573     tcg_gen_xor_tl(t0, arg1, arg2);
1574     if (sub) {
1575         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1576     } else {
1577         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1578     }
1579     tcg_temp_free(t0);
1580     if (NARROW_MODE(ctx)) {
1581         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1582         if (is_isa300(ctx)) {
1583             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1584         }
1585     } else {
1586         if (is_isa300(ctx)) {
1587             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1588         }
1589         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1590     }
1591     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1592 }
1593 
1594 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1595                                              TCGv res, TCGv arg0, TCGv arg1,
1596                                              TCGv ca32, int sub)
1597 {
1598     TCGv t0;
1599 
1600     if (!is_isa300(ctx)) {
1601         return;
1602     }
1603 
1604     t0 = tcg_temp_new();
1605     if (sub) {
1606         tcg_gen_eqv_tl(t0, arg0, arg1);
1607     } else {
1608         tcg_gen_xor_tl(t0, arg0, arg1);
1609     }
1610     tcg_gen_xor_tl(t0, t0, res);
1611     tcg_gen_extract_tl(ca32, t0, 32, 1);
1612     tcg_temp_free(t0);
1613 }
1614 
1615 /* Common add function */
1616 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1617                                     TCGv arg2, TCGv ca, TCGv ca32,
1618                                     bool add_ca, bool compute_ca,
1619                                     bool compute_ov, bool compute_rc0)
1620 {
1621     TCGv t0 = ret;
1622 
1623     if (compute_ca || compute_ov) {
1624         t0 = tcg_temp_new();
1625     }
1626 
1627     if (compute_ca) {
1628         if (NARROW_MODE(ctx)) {
1629             /*
1630              * Caution: a non-obvious corner case of the spec is that
1631              * we must produce the *entire* 64-bit addition, but
1632              * produce the carry into bit 32.
1633              */
1634             TCGv t1 = tcg_temp_new();
1635             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1636             tcg_gen_add_tl(t0, arg1, arg2);
1637             if (add_ca) {
1638                 tcg_gen_add_tl(t0, t0, ca);
1639             }
1640             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1641             tcg_temp_free(t1);
1642             tcg_gen_extract_tl(ca, ca, 32, 1);
1643             if (is_isa300(ctx)) {
1644                 tcg_gen_mov_tl(ca32, ca);
1645             }
1646         } else {
1647             TCGv zero = tcg_const_tl(0);
1648             if (add_ca) {
1649                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1650                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1651             } else {
1652                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1653             }
1654             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1655             tcg_temp_free(zero);
1656         }
1657     } else {
1658         tcg_gen_add_tl(t0, arg1, arg2);
1659         if (add_ca) {
1660             tcg_gen_add_tl(t0, t0, ca);
1661         }
1662     }
1663 
1664     if (compute_ov) {
1665         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1666     }
1667     if (unlikely(compute_rc0)) {
1668         gen_set_Rc0(ctx, t0);
1669     }
1670 
1671     if (t0 != ret) {
1672         tcg_gen_mov_tl(ret, t0);
1673         tcg_temp_free(t0);
1674     }
1675 }
1676 /* Add functions with two operands */
1677 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov)     \
1678 static void glue(gen_, name)(DisasContext *ctx)                               \
1679 {                                                                             \
1680     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1681                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1682                      ca, glue(ca, 32),                                        \
1683                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1684 }
1685 /* Add functions with one operand and one immediate */
1686 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca,                    \
1687                                 add_ca, compute_ca, compute_ov)               \
1688 static void glue(gen_, name)(DisasContext *ctx)                               \
1689 {                                                                             \
1690     TCGv t0 = tcg_const_tl(const_val);                                        \
1691     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1692                      cpu_gpr[rA(ctx->opcode)], t0,                            \
1693                      ca, glue(ca, 32),                                        \
1694                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1695     tcg_temp_free(t0);                                                        \
1696 }
1697 
1698 /* add  add.  addo  addo. */
1699 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1700 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1701 /* addc  addc.  addco  addco. */
1702 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1703 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1704 /* adde  adde.  addeo  addeo. */
1705 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1706 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1707 /* addme  addme.  addmeo  addmeo.  */
1708 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1709 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1710 /* addex */
1711 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1712 /* addze  addze.  addzeo  addzeo.*/
1713 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1714 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1715 /* addic  addic.*/
1716 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1717 {
1718     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1719     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1720                      c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1721     tcg_temp_free(c);
1722 }
1723 
1724 static void gen_addic(DisasContext *ctx)
1725 {
1726     gen_op_addic(ctx, 0);
1727 }
1728 
1729 static void gen_addic_(DisasContext *ctx)
1730 {
1731     gen_op_addic(ctx, 1);
1732 }
1733 
1734 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1735                                      TCGv arg2, int sign, int compute_ov)
1736 {
1737     TCGv_i32 t0 = tcg_temp_new_i32();
1738     TCGv_i32 t1 = tcg_temp_new_i32();
1739     TCGv_i32 t2 = tcg_temp_new_i32();
1740     TCGv_i32 t3 = tcg_temp_new_i32();
1741 
1742     tcg_gen_trunc_tl_i32(t0, arg1);
1743     tcg_gen_trunc_tl_i32(t1, arg2);
1744     if (sign) {
1745         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1746         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1747         tcg_gen_and_i32(t2, t2, t3);
1748         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1749         tcg_gen_or_i32(t2, t2, t3);
1750         tcg_gen_movi_i32(t3, 0);
1751         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1752         tcg_gen_div_i32(t3, t0, t1);
1753         tcg_gen_extu_i32_tl(ret, t3);
1754     } else {
1755         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1756         tcg_gen_movi_i32(t3, 0);
1757         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1758         tcg_gen_divu_i32(t3, t0, t1);
1759         tcg_gen_extu_i32_tl(ret, t3);
1760     }
1761     if (compute_ov) {
1762         tcg_gen_extu_i32_tl(cpu_ov, t2);
1763         if (is_isa300(ctx)) {
1764             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1765         }
1766         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1767     }
1768     tcg_temp_free_i32(t0);
1769     tcg_temp_free_i32(t1);
1770     tcg_temp_free_i32(t2);
1771     tcg_temp_free_i32(t3);
1772 
1773     if (unlikely(Rc(ctx->opcode) != 0)) {
1774         gen_set_Rc0(ctx, ret);
1775     }
1776 }
1777 /* Div functions */
1778 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
1779 static void glue(gen_, name)(DisasContext *ctx)                               \
1780 {                                                                             \
1781     gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1782                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1783                      sign, compute_ov);                                       \
1784 }
1785 /* divwu  divwu.  divwuo  divwuo.   */
1786 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1787 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1788 /* divw  divw.  divwo  divwo.   */
1789 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1790 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1791 
1792 /* div[wd]eu[o][.] */
1793 #define GEN_DIVE(name, hlpr, compute_ov)                                      \
1794 static void gen_##name(DisasContext *ctx)                                     \
1795 {                                                                             \
1796     TCGv_i32 t0 = tcg_const_i32(compute_ov);                                  \
1797     gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env,                      \
1798                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1799     tcg_temp_free_i32(t0);                                                    \
1800     if (unlikely(Rc(ctx->opcode) != 0)) {                                     \
1801         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);                           \
1802     }                                                                         \
1803 }
1804 
1805 GEN_DIVE(divweu, divweu, 0);
1806 GEN_DIVE(divweuo, divweu, 1);
1807 GEN_DIVE(divwe, divwe, 0);
1808 GEN_DIVE(divweo, divwe, 1);
1809 
1810 #if defined(TARGET_PPC64)
1811 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1812                                      TCGv arg2, int sign, int compute_ov)
1813 {
1814     TCGv_i64 t0 = tcg_temp_new_i64();
1815     TCGv_i64 t1 = tcg_temp_new_i64();
1816     TCGv_i64 t2 = tcg_temp_new_i64();
1817     TCGv_i64 t3 = tcg_temp_new_i64();
1818 
1819     tcg_gen_mov_i64(t0, arg1);
1820     tcg_gen_mov_i64(t1, arg2);
1821     if (sign) {
1822         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1823         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1824         tcg_gen_and_i64(t2, t2, t3);
1825         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1826         tcg_gen_or_i64(t2, t2, t3);
1827         tcg_gen_movi_i64(t3, 0);
1828         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1829         tcg_gen_div_i64(ret, t0, t1);
1830     } else {
1831         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1832         tcg_gen_movi_i64(t3, 0);
1833         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1834         tcg_gen_divu_i64(ret, t0, t1);
1835     }
1836     if (compute_ov) {
1837         tcg_gen_mov_tl(cpu_ov, t2);
1838         if (is_isa300(ctx)) {
1839             tcg_gen_mov_tl(cpu_ov32, t2);
1840         }
1841         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1842     }
1843     tcg_temp_free_i64(t0);
1844     tcg_temp_free_i64(t1);
1845     tcg_temp_free_i64(t2);
1846     tcg_temp_free_i64(t3);
1847 
1848     if (unlikely(Rc(ctx->opcode) != 0)) {
1849         gen_set_Rc0(ctx, ret);
1850     }
1851 }
1852 
1853 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
1854 static void glue(gen_, name)(DisasContext *ctx)                               \
1855 {                                                                             \
1856     gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1857                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
1858                       sign, compute_ov);                                      \
1859 }
1860 /* divdu  divdu.  divduo  divduo.   */
1861 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1862 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1863 /* divd  divd.  divdo  divdo.   */
1864 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1865 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1866 
1867 GEN_DIVE(divdeu, divdeu, 0);
1868 GEN_DIVE(divdeuo, divdeu, 1);
1869 GEN_DIVE(divde, divde, 0);
1870 GEN_DIVE(divdeo, divde, 1);
1871 #endif
1872 
1873 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1874                                      TCGv arg2, int sign)
1875 {
1876     TCGv_i32 t0 = tcg_temp_new_i32();
1877     TCGv_i32 t1 = tcg_temp_new_i32();
1878 
1879     tcg_gen_trunc_tl_i32(t0, arg1);
1880     tcg_gen_trunc_tl_i32(t1, arg2);
1881     if (sign) {
1882         TCGv_i32 t2 = tcg_temp_new_i32();
1883         TCGv_i32 t3 = tcg_temp_new_i32();
1884         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1885         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1886         tcg_gen_and_i32(t2, t2, t3);
1887         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1888         tcg_gen_or_i32(t2, t2, t3);
1889         tcg_gen_movi_i32(t3, 0);
1890         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1891         tcg_gen_rem_i32(t3, t0, t1);
1892         tcg_gen_ext_i32_tl(ret, t3);
1893         tcg_temp_free_i32(t2);
1894         tcg_temp_free_i32(t3);
1895     } else {
1896         TCGv_i32 t2 = tcg_const_i32(1);
1897         TCGv_i32 t3 = tcg_const_i32(0);
1898         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1899         tcg_gen_remu_i32(t3, t0, t1);
1900         tcg_gen_extu_i32_tl(ret, t3);
1901         tcg_temp_free_i32(t2);
1902         tcg_temp_free_i32(t3);
1903     }
1904     tcg_temp_free_i32(t0);
1905     tcg_temp_free_i32(t1);
1906 }
1907 
1908 #define GEN_INT_ARITH_MODW(name, opc3, sign)                                \
1909 static void glue(gen_, name)(DisasContext *ctx)                             \
1910 {                                                                           \
1911     gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1912                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1913                       sign);                                                \
1914 }
1915 
1916 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1917 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1918 
1919 #if defined(TARGET_PPC64)
1920 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1921                                      TCGv arg2, int sign)
1922 {
1923     TCGv_i64 t0 = tcg_temp_new_i64();
1924     TCGv_i64 t1 = tcg_temp_new_i64();
1925 
1926     tcg_gen_mov_i64(t0, arg1);
1927     tcg_gen_mov_i64(t1, arg2);
1928     if (sign) {
1929         TCGv_i64 t2 = tcg_temp_new_i64();
1930         TCGv_i64 t3 = tcg_temp_new_i64();
1931         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1932         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1933         tcg_gen_and_i64(t2, t2, t3);
1934         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1935         tcg_gen_or_i64(t2, t2, t3);
1936         tcg_gen_movi_i64(t3, 0);
1937         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1938         tcg_gen_rem_i64(ret, t0, t1);
1939         tcg_temp_free_i64(t2);
1940         tcg_temp_free_i64(t3);
1941     } else {
1942         TCGv_i64 t2 = tcg_const_i64(1);
1943         TCGv_i64 t3 = tcg_const_i64(0);
1944         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1945         tcg_gen_remu_i64(ret, t0, t1);
1946         tcg_temp_free_i64(t2);
1947         tcg_temp_free_i64(t3);
1948     }
1949     tcg_temp_free_i64(t0);
1950     tcg_temp_free_i64(t1);
1951 }
1952 
1953 #define GEN_INT_ARITH_MODD(name, opc3, sign)                            \
1954 static void glue(gen_, name)(DisasContext *ctx)                           \
1955 {                                                                         \
1956   gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1957                     cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1958                     sign);                                                \
1959 }
1960 
1961 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1962 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1963 #endif
1964 
1965 /* mulhw  mulhw. */
1966 static void gen_mulhw(DisasContext *ctx)
1967 {
1968     TCGv_i32 t0 = tcg_temp_new_i32();
1969     TCGv_i32 t1 = tcg_temp_new_i32();
1970 
1971     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1972     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1973     tcg_gen_muls2_i32(t0, t1, t0, t1);
1974     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1975     tcg_temp_free_i32(t0);
1976     tcg_temp_free_i32(t1);
1977     if (unlikely(Rc(ctx->opcode) != 0)) {
1978         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1979     }
1980 }
1981 
1982 /* mulhwu  mulhwu.  */
1983 static void gen_mulhwu(DisasContext *ctx)
1984 {
1985     TCGv_i32 t0 = tcg_temp_new_i32();
1986     TCGv_i32 t1 = tcg_temp_new_i32();
1987 
1988     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1989     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1990     tcg_gen_mulu2_i32(t0, t1, t0, t1);
1991     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1992     tcg_temp_free_i32(t0);
1993     tcg_temp_free_i32(t1);
1994     if (unlikely(Rc(ctx->opcode) != 0)) {
1995         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1996     }
1997 }
1998 
1999 /* mullw  mullw. */
2000 static void gen_mullw(DisasContext *ctx)
2001 {
2002 #if defined(TARGET_PPC64)
2003     TCGv_i64 t0, t1;
2004     t0 = tcg_temp_new_i64();
2005     t1 = tcg_temp_new_i64();
2006     tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
2007     tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
2008     tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2009     tcg_temp_free(t0);
2010     tcg_temp_free(t1);
2011 #else
2012     tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2013                     cpu_gpr[rB(ctx->opcode)]);
2014 #endif
2015     if (unlikely(Rc(ctx->opcode) != 0)) {
2016         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2017     }
2018 }
2019 
2020 /* mullwo  mullwo. */
2021 static void gen_mullwo(DisasContext *ctx)
2022 {
2023     TCGv_i32 t0 = tcg_temp_new_i32();
2024     TCGv_i32 t1 = tcg_temp_new_i32();
2025 
2026     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2027     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2028     tcg_gen_muls2_i32(t0, t1, t0, t1);
2029 #if defined(TARGET_PPC64)
2030     tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2031 #else
2032     tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2033 #endif
2034 
2035     tcg_gen_sari_i32(t0, t0, 31);
2036     tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2037     tcg_gen_extu_i32_tl(cpu_ov, t0);
2038     if (is_isa300(ctx)) {
2039         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2040     }
2041     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2042 
2043     tcg_temp_free_i32(t0);
2044     tcg_temp_free_i32(t1);
2045     if (unlikely(Rc(ctx->opcode) != 0)) {
2046         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2047     }
2048 }
2049 
2050 /* mulli */
2051 static void gen_mulli(DisasContext *ctx)
2052 {
2053     tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2054                     SIMM(ctx->opcode));
2055 }
2056 
2057 #if defined(TARGET_PPC64)
2058 /* mulhd  mulhd. */
2059 static void gen_mulhd(DisasContext *ctx)
2060 {
2061     TCGv lo = tcg_temp_new();
2062     tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2063                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2064     tcg_temp_free(lo);
2065     if (unlikely(Rc(ctx->opcode) != 0)) {
2066         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2067     }
2068 }
2069 
2070 /* mulhdu  mulhdu. */
2071 static void gen_mulhdu(DisasContext *ctx)
2072 {
2073     TCGv lo = tcg_temp_new();
2074     tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2075                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2076     tcg_temp_free(lo);
2077     if (unlikely(Rc(ctx->opcode) != 0)) {
2078         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2079     }
2080 }
2081 
2082 /* mulld  mulld. */
2083 static void gen_mulld(DisasContext *ctx)
2084 {
2085     tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2086                    cpu_gpr[rB(ctx->opcode)]);
2087     if (unlikely(Rc(ctx->opcode) != 0)) {
2088         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2089     }
2090 }
2091 
2092 /* mulldo  mulldo. */
2093 static void gen_mulldo(DisasContext *ctx)
2094 {
2095     TCGv_i64 t0 = tcg_temp_new_i64();
2096     TCGv_i64 t1 = tcg_temp_new_i64();
2097 
2098     tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2099                       cpu_gpr[rB(ctx->opcode)]);
2100     tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2101 
2102     tcg_gen_sari_i64(t0, t0, 63);
2103     tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2104     if (is_isa300(ctx)) {
2105         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2106     }
2107     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2108 
2109     tcg_temp_free_i64(t0);
2110     tcg_temp_free_i64(t1);
2111 
2112     if (unlikely(Rc(ctx->opcode) != 0)) {
2113         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2114     }
2115 }
2116 #endif
2117 
2118 /* Common subf function */
2119 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2120                                      TCGv arg2, bool add_ca, bool compute_ca,
2121                                      bool compute_ov, bool compute_rc0)
2122 {
2123     TCGv t0 = ret;
2124 
2125     if (compute_ca || compute_ov) {
2126         t0 = tcg_temp_new();
2127     }
2128 
2129     if (compute_ca) {
2130         /* dest = ~arg1 + arg2 [+ ca].  */
2131         if (NARROW_MODE(ctx)) {
2132             /*
2133              * Caution: a non-obvious corner case of the spec is that
2134              * we must produce the *entire* 64-bit addition, but
2135              * produce the carry into bit 32.
2136              */
2137             TCGv inv1 = tcg_temp_new();
2138             TCGv t1 = tcg_temp_new();
2139             tcg_gen_not_tl(inv1, arg1);
2140             if (add_ca) {
2141                 tcg_gen_add_tl(t0, arg2, cpu_ca);
2142             } else {
2143                 tcg_gen_addi_tl(t0, arg2, 1);
2144             }
2145             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
2146             tcg_gen_add_tl(t0, t0, inv1);
2147             tcg_temp_free(inv1);
2148             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
2149             tcg_temp_free(t1);
2150             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2151             if (is_isa300(ctx)) {
2152                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2153             }
2154         } else if (add_ca) {
2155             TCGv zero, inv1 = tcg_temp_new();
2156             tcg_gen_not_tl(inv1, arg1);
2157             zero = tcg_const_tl(0);
2158             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2159             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2160             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2161             tcg_temp_free(zero);
2162             tcg_temp_free(inv1);
2163         } else {
2164             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2165             tcg_gen_sub_tl(t0, arg2, arg1);
2166             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2167         }
2168     } else if (add_ca) {
2169         /*
2170          * Since we're ignoring carry-out, we can simplify the
2171          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2172          */
2173         tcg_gen_sub_tl(t0, arg2, arg1);
2174         tcg_gen_add_tl(t0, t0, cpu_ca);
2175         tcg_gen_subi_tl(t0, t0, 1);
2176     } else {
2177         tcg_gen_sub_tl(t0, arg2, arg1);
2178     }
2179 
2180     if (compute_ov) {
2181         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2182     }
2183     if (unlikely(compute_rc0)) {
2184         gen_set_Rc0(ctx, t0);
2185     }
2186 
2187     if (t0 != ret) {
2188         tcg_gen_mov_tl(ret, t0);
2189         tcg_temp_free(t0);
2190     }
2191 }
2192 /* Sub functions with Two operands functions */
2193 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
2194 static void glue(gen_, name)(DisasContext *ctx)                               \
2195 {                                                                             \
2196     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2197                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
2198                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2199 }
2200 /* Sub functions with one operand and one immediate */
2201 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
2202                                 add_ca, compute_ca, compute_ov)               \
2203 static void glue(gen_, name)(DisasContext *ctx)                               \
2204 {                                                                             \
2205     TCGv t0 = tcg_const_tl(const_val);                                        \
2206     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2207                       cpu_gpr[rA(ctx->opcode)], t0,                           \
2208                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2209     tcg_temp_free(t0);                                                        \
2210 }
2211 /* subf  subf.  subfo  subfo. */
2212 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2213 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2214 /* subfc  subfc.  subfco  subfco. */
2215 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2216 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2217 /* subfe  subfe.  subfeo  subfo. */
2218 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2219 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2220 /* subfme  subfme.  subfmeo  subfmeo.  */
2221 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2222 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2223 /* subfze  subfze.  subfzeo  subfzeo.*/
2224 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2225 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2226 
2227 /* subfic */
2228 static void gen_subfic(DisasContext *ctx)
2229 {
2230     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2231     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2232                       c, 0, 1, 0, 0);
2233     tcg_temp_free(c);
2234 }
2235 
2236 /* neg neg. nego nego. */
2237 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2238 {
2239     TCGv zero = tcg_const_tl(0);
2240     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2241                       zero, 0, 0, compute_ov, Rc(ctx->opcode));
2242     tcg_temp_free(zero);
2243 }
2244 
2245 static void gen_neg(DisasContext *ctx)
2246 {
2247     tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2248     if (unlikely(Rc(ctx->opcode))) {
2249         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2250     }
2251 }
2252 
2253 static void gen_nego(DisasContext *ctx)
2254 {
2255     gen_op_arith_neg(ctx, 1);
2256 }
2257 
2258 /***                            Integer logical                            ***/
2259 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
2260 static void glue(gen_, name)(DisasContext *ctx)                               \
2261 {                                                                             \
2262     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],                \
2263        cpu_gpr[rB(ctx->opcode)]);                                             \
2264     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2265         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2266 }
2267 
2268 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
2269 static void glue(gen_, name)(DisasContext *ctx)                               \
2270 {                                                                             \
2271     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);               \
2272     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2273         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2274 }
2275 
2276 /* and & and. */
2277 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2278 /* andc & andc. */
2279 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2280 
2281 /* andi. */
2282 static void gen_andi_(DisasContext *ctx)
2283 {
2284     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2285                     UIMM(ctx->opcode));
2286     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2287 }
2288 
2289 /* andis. */
2290 static void gen_andis_(DisasContext *ctx)
2291 {
2292     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2293                     UIMM(ctx->opcode) << 16);
2294     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2295 }
2296 
2297 /* cntlzw */
2298 static void gen_cntlzw(DisasContext *ctx)
2299 {
2300     TCGv_i32 t = tcg_temp_new_i32();
2301 
2302     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2303     tcg_gen_clzi_i32(t, t, 32);
2304     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2305     tcg_temp_free_i32(t);
2306 
2307     if (unlikely(Rc(ctx->opcode) != 0)) {
2308         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2309     }
2310 }
2311 
2312 /* cnttzw */
2313 static void gen_cnttzw(DisasContext *ctx)
2314 {
2315     TCGv_i32 t = tcg_temp_new_i32();
2316 
2317     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2318     tcg_gen_ctzi_i32(t, t, 32);
2319     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2320     tcg_temp_free_i32(t);
2321 
2322     if (unlikely(Rc(ctx->opcode) != 0)) {
2323         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2324     }
2325 }
2326 
2327 /* eqv & eqv. */
2328 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2329 /* extsb & extsb. */
2330 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2331 /* extsh & extsh. */
2332 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2333 /* nand & nand. */
2334 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2335 /* nor & nor. */
2336 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2337 
2338 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2339 static void gen_pause(DisasContext *ctx)
2340 {
2341     TCGv_i32 t0 = tcg_const_i32(0);
2342     tcg_gen_st_i32(t0, cpu_env,
2343                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2344     tcg_temp_free_i32(t0);
2345 
2346     /* Stop translation, this gives other CPUs a chance to run */
2347     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2348 }
2349 #endif /* defined(TARGET_PPC64) */
2350 
2351 /* or & or. */
2352 static void gen_or(DisasContext *ctx)
2353 {
2354     int rs, ra, rb;
2355 
2356     rs = rS(ctx->opcode);
2357     ra = rA(ctx->opcode);
2358     rb = rB(ctx->opcode);
2359     /* Optimisation for mr. ri case */
2360     if (rs != ra || rs != rb) {
2361         if (rs != rb) {
2362             tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2363         } else {
2364             tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2365         }
2366         if (unlikely(Rc(ctx->opcode) != 0)) {
2367             gen_set_Rc0(ctx, cpu_gpr[ra]);
2368         }
2369     } else if (unlikely(Rc(ctx->opcode) != 0)) {
2370         gen_set_Rc0(ctx, cpu_gpr[rs]);
2371 #if defined(TARGET_PPC64)
2372     } else if (rs != 0) { /* 0 is nop */
2373         int prio = 0;
2374 
2375         switch (rs) {
2376         case 1:
2377             /* Set process priority to low */
2378             prio = 2;
2379             break;
2380         case 6:
2381             /* Set process priority to medium-low */
2382             prio = 3;
2383             break;
2384         case 2:
2385             /* Set process priority to normal */
2386             prio = 4;
2387             break;
2388 #if !defined(CONFIG_USER_ONLY)
2389         case 31:
2390             if (!ctx->pr) {
2391                 /* Set process priority to very low */
2392                 prio = 1;
2393             }
2394             break;
2395         case 5:
2396             if (!ctx->pr) {
2397                 /* Set process priority to medium-hight */
2398                 prio = 5;
2399             }
2400             break;
2401         case 3:
2402             if (!ctx->pr) {
2403                 /* Set process priority to high */
2404                 prio = 6;
2405             }
2406             break;
2407         case 7:
2408             if (ctx->hv && !ctx->pr) {
2409                 /* Set process priority to very high */
2410                 prio = 7;
2411             }
2412             break;
2413 #endif
2414         default:
2415             break;
2416         }
2417         if (prio) {
2418             TCGv t0 = tcg_temp_new();
2419             gen_load_spr(t0, SPR_PPR);
2420             tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2421             tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2422             gen_store_spr(SPR_PPR, t0);
2423             tcg_temp_free(t0);
2424         }
2425 #if !defined(CONFIG_USER_ONLY)
2426         /*
2427          * Pause out of TCG otherwise spin loops with smt_low eat too
2428          * much CPU and the kernel hangs.  This applies to all
2429          * encodings other than no-op, e.g., miso(rs=26), yield(27),
2430          * mdoio(29), mdoom(30), and all currently undefined.
2431          */
2432         gen_pause(ctx);
2433 #endif
2434 #endif
2435     }
2436 }
2437 /* orc & orc. */
2438 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2439 
2440 /* xor & xor. */
2441 static void gen_xor(DisasContext *ctx)
2442 {
2443     /* Optimisation for "set to zero" case */
2444     if (rS(ctx->opcode) != rB(ctx->opcode)) {
2445         tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2446                        cpu_gpr[rB(ctx->opcode)]);
2447     } else {
2448         tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2449     }
2450     if (unlikely(Rc(ctx->opcode) != 0)) {
2451         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2452     }
2453 }
2454 
2455 /* ori */
2456 static void gen_ori(DisasContext *ctx)
2457 {
2458     target_ulong uimm = UIMM(ctx->opcode);
2459 
2460     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2461         return;
2462     }
2463     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2464 }
2465 
2466 /* oris */
2467 static void gen_oris(DisasContext *ctx)
2468 {
2469     target_ulong uimm = UIMM(ctx->opcode);
2470 
2471     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2472         /* NOP */
2473         return;
2474     }
2475     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2476                    uimm << 16);
2477 }
2478 
2479 /* xori */
2480 static void gen_xori(DisasContext *ctx)
2481 {
2482     target_ulong uimm = UIMM(ctx->opcode);
2483 
2484     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2485         /* NOP */
2486         return;
2487     }
2488     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2489 }
2490 
2491 /* xoris */
2492 static void gen_xoris(DisasContext *ctx)
2493 {
2494     target_ulong uimm = UIMM(ctx->opcode);
2495 
2496     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2497         /* NOP */
2498         return;
2499     }
2500     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2501                     uimm << 16);
2502 }
2503 
2504 /* popcntb : PowerPC 2.03 specification */
2505 static void gen_popcntb(DisasContext *ctx)
2506 {
2507     gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2508 }
2509 
2510 static void gen_popcntw(DisasContext *ctx)
2511 {
2512 #if defined(TARGET_PPC64)
2513     gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2514 #else
2515     tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2516 #endif
2517 }
2518 
2519 #if defined(TARGET_PPC64)
2520 /* popcntd: PowerPC 2.06 specification */
2521 static void gen_popcntd(DisasContext *ctx)
2522 {
2523     tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2524 }
2525 #endif
2526 
2527 /* prtyw: PowerPC 2.05 specification */
2528 static void gen_prtyw(DisasContext *ctx)
2529 {
2530     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2531     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2532     TCGv t0 = tcg_temp_new();
2533     tcg_gen_shri_tl(t0, rs, 16);
2534     tcg_gen_xor_tl(ra, rs, t0);
2535     tcg_gen_shri_tl(t0, ra, 8);
2536     tcg_gen_xor_tl(ra, ra, t0);
2537     tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2538     tcg_temp_free(t0);
2539 }
2540 
2541 #if defined(TARGET_PPC64)
2542 /* prtyd: PowerPC 2.05 specification */
2543 static void gen_prtyd(DisasContext *ctx)
2544 {
2545     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2546     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2547     TCGv t0 = tcg_temp_new();
2548     tcg_gen_shri_tl(t0, rs, 32);
2549     tcg_gen_xor_tl(ra, rs, t0);
2550     tcg_gen_shri_tl(t0, ra, 16);
2551     tcg_gen_xor_tl(ra, ra, t0);
2552     tcg_gen_shri_tl(t0, ra, 8);
2553     tcg_gen_xor_tl(ra, ra, t0);
2554     tcg_gen_andi_tl(ra, ra, 1);
2555     tcg_temp_free(t0);
2556 }
2557 #endif
2558 
2559 #if defined(TARGET_PPC64)
2560 /* bpermd */
2561 static void gen_bpermd(DisasContext *ctx)
2562 {
2563     gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2564                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2565 }
2566 #endif
2567 
2568 #if defined(TARGET_PPC64)
2569 /* extsw & extsw. */
2570 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2571 
2572 /* cntlzd */
2573 static void gen_cntlzd(DisasContext *ctx)
2574 {
2575     tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2576     if (unlikely(Rc(ctx->opcode) != 0)) {
2577         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2578     }
2579 }
2580 
2581 /* cnttzd */
2582 static void gen_cnttzd(DisasContext *ctx)
2583 {
2584     tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2585     if (unlikely(Rc(ctx->opcode) != 0)) {
2586         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2587     }
2588 }
2589 
2590 /* darn */
2591 static void gen_darn(DisasContext *ctx)
2592 {
2593     int l = L(ctx->opcode);
2594 
2595     if (l > 2) {
2596         tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2597     } else {
2598         gen_icount_io_start(ctx);
2599         if (l == 0) {
2600             gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2601         } else {
2602             /* Return 64-bit random for both CRN and RRN */
2603             gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2604         }
2605     }
2606 }
2607 #endif
2608 
2609 /***                             Integer rotate                            ***/
2610 
2611 /* rlwimi & rlwimi. */
2612 static void gen_rlwimi(DisasContext *ctx)
2613 {
2614     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2615     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2616     uint32_t sh = SH(ctx->opcode);
2617     uint32_t mb = MB(ctx->opcode);
2618     uint32_t me = ME(ctx->opcode);
2619 
2620     if (sh == (31 - me) && mb <= me) {
2621         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2622     } else {
2623         target_ulong mask;
2624         bool mask_in_32b = true;
2625         TCGv t1;
2626 
2627 #if defined(TARGET_PPC64)
2628         mb += 32;
2629         me += 32;
2630 #endif
2631         mask = MASK(mb, me);
2632 
2633 #if defined(TARGET_PPC64)
2634         if (mask > 0xffffffffu) {
2635             mask_in_32b = false;
2636         }
2637 #endif
2638         t1 = tcg_temp_new();
2639         if (mask_in_32b) {
2640             TCGv_i32 t0 = tcg_temp_new_i32();
2641             tcg_gen_trunc_tl_i32(t0, t_rs);
2642             tcg_gen_rotli_i32(t0, t0, sh);
2643             tcg_gen_extu_i32_tl(t1, t0);
2644             tcg_temp_free_i32(t0);
2645         } else {
2646 #if defined(TARGET_PPC64)
2647             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2648             tcg_gen_rotli_i64(t1, t1, sh);
2649 #else
2650             g_assert_not_reached();
2651 #endif
2652         }
2653 
2654         tcg_gen_andi_tl(t1, t1, mask);
2655         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2656         tcg_gen_or_tl(t_ra, t_ra, t1);
2657         tcg_temp_free(t1);
2658     }
2659     if (unlikely(Rc(ctx->opcode) != 0)) {
2660         gen_set_Rc0(ctx, t_ra);
2661     }
2662 }
2663 
2664 /* rlwinm & rlwinm. */
2665 static void gen_rlwinm(DisasContext *ctx)
2666 {
2667     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2668     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2669     int sh = SH(ctx->opcode);
2670     int mb = MB(ctx->opcode);
2671     int me = ME(ctx->opcode);
2672     int len = me - mb + 1;
2673     int rsh = (32 - sh) & 31;
2674 
2675     if (sh != 0 && len > 0 && me == (31 - sh)) {
2676         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2677     } else if (me == 31 && rsh + len <= 32) {
2678         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2679     } else {
2680         target_ulong mask;
2681         bool mask_in_32b = true;
2682 #if defined(TARGET_PPC64)
2683         mb += 32;
2684         me += 32;
2685 #endif
2686         mask = MASK(mb, me);
2687 #if defined(TARGET_PPC64)
2688         if (mask > 0xffffffffu) {
2689             mask_in_32b = false;
2690         }
2691 #endif
2692         if (mask_in_32b) {
2693             if (sh == 0) {
2694                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2695             } else {
2696                 TCGv_i32 t0 = tcg_temp_new_i32();
2697                 tcg_gen_trunc_tl_i32(t0, t_rs);
2698                 tcg_gen_rotli_i32(t0, t0, sh);
2699                 tcg_gen_andi_i32(t0, t0, mask);
2700                 tcg_gen_extu_i32_tl(t_ra, t0);
2701                 tcg_temp_free_i32(t0);
2702             }
2703         } else {
2704 #if defined(TARGET_PPC64)
2705             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2706             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2707             tcg_gen_andi_i64(t_ra, t_ra, mask);
2708 #else
2709             g_assert_not_reached();
2710 #endif
2711         }
2712     }
2713     if (unlikely(Rc(ctx->opcode) != 0)) {
2714         gen_set_Rc0(ctx, t_ra);
2715     }
2716 }
2717 
2718 /* rlwnm & rlwnm. */
2719 static void gen_rlwnm(DisasContext *ctx)
2720 {
2721     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2722     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2723     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2724     uint32_t mb = MB(ctx->opcode);
2725     uint32_t me = ME(ctx->opcode);
2726     target_ulong mask;
2727     bool mask_in_32b = true;
2728 
2729 #if defined(TARGET_PPC64)
2730     mb += 32;
2731     me += 32;
2732 #endif
2733     mask = MASK(mb, me);
2734 
2735 #if defined(TARGET_PPC64)
2736     if (mask > 0xffffffffu) {
2737         mask_in_32b = false;
2738     }
2739 #endif
2740     if (mask_in_32b) {
2741         TCGv_i32 t0 = tcg_temp_new_i32();
2742         TCGv_i32 t1 = tcg_temp_new_i32();
2743         tcg_gen_trunc_tl_i32(t0, t_rb);
2744         tcg_gen_trunc_tl_i32(t1, t_rs);
2745         tcg_gen_andi_i32(t0, t0, 0x1f);
2746         tcg_gen_rotl_i32(t1, t1, t0);
2747         tcg_gen_extu_i32_tl(t_ra, t1);
2748         tcg_temp_free_i32(t0);
2749         tcg_temp_free_i32(t1);
2750     } else {
2751 #if defined(TARGET_PPC64)
2752         TCGv_i64 t0 = tcg_temp_new_i64();
2753         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2754         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2755         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2756         tcg_temp_free_i64(t0);
2757 #else
2758         g_assert_not_reached();
2759 #endif
2760     }
2761 
2762     tcg_gen_andi_tl(t_ra, t_ra, mask);
2763 
2764     if (unlikely(Rc(ctx->opcode) != 0)) {
2765         gen_set_Rc0(ctx, t_ra);
2766     }
2767 }
2768 
2769 #if defined(TARGET_PPC64)
2770 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2771 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2772 {                                                                             \
2773     gen_##name(ctx, 0);                                                       \
2774 }                                                                             \
2775                                                                               \
2776 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2777 {                                                                             \
2778     gen_##name(ctx, 1);                                                       \
2779 }
2780 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2781 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2782 {                                                                             \
2783     gen_##name(ctx, 0, 0);                                                    \
2784 }                                                                             \
2785                                                                               \
2786 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2787 {                                                                             \
2788     gen_##name(ctx, 0, 1);                                                    \
2789 }                                                                             \
2790                                                                               \
2791 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2792 {                                                                             \
2793     gen_##name(ctx, 1, 0);                                                    \
2794 }                                                                             \
2795                                                                               \
2796 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2797 {                                                                             \
2798     gen_##name(ctx, 1, 1);                                                    \
2799 }
2800 
2801 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2802 {
2803     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2804     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2805     int len = me - mb + 1;
2806     int rsh = (64 - sh) & 63;
2807 
2808     if (sh != 0 && len > 0 && me == (63 - sh)) {
2809         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2810     } else if (me == 63 && rsh + len <= 64) {
2811         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2812     } else {
2813         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2814         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2815     }
2816     if (unlikely(Rc(ctx->opcode) != 0)) {
2817         gen_set_Rc0(ctx, t_ra);
2818     }
2819 }
2820 
2821 /* rldicl - rldicl. */
2822 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2823 {
2824     uint32_t sh, mb;
2825 
2826     sh = SH(ctx->opcode) | (shn << 5);
2827     mb = MB(ctx->opcode) | (mbn << 5);
2828     gen_rldinm(ctx, mb, 63, sh);
2829 }
2830 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2831 
2832 /* rldicr - rldicr. */
2833 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2834 {
2835     uint32_t sh, me;
2836 
2837     sh = SH(ctx->opcode) | (shn << 5);
2838     me = MB(ctx->opcode) | (men << 5);
2839     gen_rldinm(ctx, 0, me, sh);
2840 }
2841 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2842 
2843 /* rldic - rldic. */
2844 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2845 {
2846     uint32_t sh, mb;
2847 
2848     sh = SH(ctx->opcode) | (shn << 5);
2849     mb = MB(ctx->opcode) | (mbn << 5);
2850     gen_rldinm(ctx, mb, 63 - sh, sh);
2851 }
2852 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2853 
2854 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2855 {
2856     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2857     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2858     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2859     TCGv t0;
2860 
2861     t0 = tcg_temp_new();
2862     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2863     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2864     tcg_temp_free(t0);
2865 
2866     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2867     if (unlikely(Rc(ctx->opcode) != 0)) {
2868         gen_set_Rc0(ctx, t_ra);
2869     }
2870 }
2871 
2872 /* rldcl - rldcl. */
2873 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2874 {
2875     uint32_t mb;
2876 
2877     mb = MB(ctx->opcode) | (mbn << 5);
2878     gen_rldnm(ctx, mb, 63);
2879 }
2880 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2881 
2882 /* rldcr - rldcr. */
2883 static inline void gen_rldcr(DisasContext *ctx, int men)
2884 {
2885     uint32_t me;
2886 
2887     me = MB(ctx->opcode) | (men << 5);
2888     gen_rldnm(ctx, 0, me);
2889 }
2890 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2891 
2892 /* rldimi - rldimi. */
2893 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2894 {
2895     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2896     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2897     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2898     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2899     uint32_t me = 63 - sh;
2900 
2901     if (mb <= me) {
2902         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2903     } else {
2904         target_ulong mask = MASK(mb, me);
2905         TCGv t1 = tcg_temp_new();
2906 
2907         tcg_gen_rotli_tl(t1, t_rs, sh);
2908         tcg_gen_andi_tl(t1, t1, mask);
2909         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2910         tcg_gen_or_tl(t_ra, t_ra, t1);
2911         tcg_temp_free(t1);
2912     }
2913     if (unlikely(Rc(ctx->opcode) != 0)) {
2914         gen_set_Rc0(ctx, t_ra);
2915     }
2916 }
2917 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2918 #endif
2919 
2920 /***                             Integer shift                             ***/
2921 
2922 /* slw & slw. */
2923 static void gen_slw(DisasContext *ctx)
2924 {
2925     TCGv t0, t1;
2926 
2927     t0 = tcg_temp_new();
2928     /* AND rS with a mask that is 0 when rB >= 0x20 */
2929 #if defined(TARGET_PPC64)
2930     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2931     tcg_gen_sari_tl(t0, t0, 0x3f);
2932 #else
2933     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2934     tcg_gen_sari_tl(t0, t0, 0x1f);
2935 #endif
2936     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2937     t1 = tcg_temp_new();
2938     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2939     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2940     tcg_temp_free(t1);
2941     tcg_temp_free(t0);
2942     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2943     if (unlikely(Rc(ctx->opcode) != 0)) {
2944         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2945     }
2946 }
2947 
2948 /* sraw & sraw. */
2949 static void gen_sraw(DisasContext *ctx)
2950 {
2951     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2952                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2953     if (unlikely(Rc(ctx->opcode) != 0)) {
2954         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2955     }
2956 }
2957 
2958 /* srawi & srawi. */
2959 static void gen_srawi(DisasContext *ctx)
2960 {
2961     int sh = SH(ctx->opcode);
2962     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2963     TCGv src = cpu_gpr[rS(ctx->opcode)];
2964     if (sh == 0) {
2965         tcg_gen_ext32s_tl(dst, src);
2966         tcg_gen_movi_tl(cpu_ca, 0);
2967         if (is_isa300(ctx)) {
2968             tcg_gen_movi_tl(cpu_ca32, 0);
2969         }
2970     } else {
2971         TCGv t0;
2972         tcg_gen_ext32s_tl(dst, src);
2973         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2974         t0 = tcg_temp_new();
2975         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2976         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2977         tcg_temp_free(t0);
2978         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2979         if (is_isa300(ctx)) {
2980             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2981         }
2982         tcg_gen_sari_tl(dst, dst, sh);
2983     }
2984     if (unlikely(Rc(ctx->opcode) != 0)) {
2985         gen_set_Rc0(ctx, dst);
2986     }
2987 }
2988 
2989 /* srw & srw. */
2990 static void gen_srw(DisasContext *ctx)
2991 {
2992     TCGv t0, t1;
2993 
2994     t0 = tcg_temp_new();
2995     /* AND rS with a mask that is 0 when rB >= 0x20 */
2996 #if defined(TARGET_PPC64)
2997     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2998     tcg_gen_sari_tl(t0, t0, 0x3f);
2999 #else
3000     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3001     tcg_gen_sari_tl(t0, t0, 0x1f);
3002 #endif
3003     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3004     tcg_gen_ext32u_tl(t0, t0);
3005     t1 = tcg_temp_new();
3006     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3007     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3008     tcg_temp_free(t1);
3009     tcg_temp_free(t0);
3010     if (unlikely(Rc(ctx->opcode) != 0)) {
3011         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3012     }
3013 }
3014 
3015 #if defined(TARGET_PPC64)
3016 /* sld & sld. */
3017 static void gen_sld(DisasContext *ctx)
3018 {
3019     TCGv t0, t1;
3020 
3021     t0 = tcg_temp_new();
3022     /* AND rS with a mask that is 0 when rB >= 0x40 */
3023     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3024     tcg_gen_sari_tl(t0, t0, 0x3f);
3025     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3026     t1 = tcg_temp_new();
3027     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3028     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3029     tcg_temp_free(t1);
3030     tcg_temp_free(t0);
3031     if (unlikely(Rc(ctx->opcode) != 0)) {
3032         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3033     }
3034 }
3035 
3036 /* srad & srad. */
3037 static void gen_srad(DisasContext *ctx)
3038 {
3039     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3040                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3041     if (unlikely(Rc(ctx->opcode) != 0)) {
3042         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3043     }
3044 }
3045 /* sradi & sradi. */
3046 static inline void gen_sradi(DisasContext *ctx, int n)
3047 {
3048     int sh = SH(ctx->opcode) + (n << 5);
3049     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3050     TCGv src = cpu_gpr[rS(ctx->opcode)];
3051     if (sh == 0) {
3052         tcg_gen_mov_tl(dst, src);
3053         tcg_gen_movi_tl(cpu_ca, 0);
3054         if (is_isa300(ctx)) {
3055             tcg_gen_movi_tl(cpu_ca32, 0);
3056         }
3057     } else {
3058         TCGv t0;
3059         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3060         t0 = tcg_temp_new();
3061         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3062         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3063         tcg_temp_free(t0);
3064         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3065         if (is_isa300(ctx)) {
3066             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3067         }
3068         tcg_gen_sari_tl(dst, src, sh);
3069     }
3070     if (unlikely(Rc(ctx->opcode) != 0)) {
3071         gen_set_Rc0(ctx, dst);
3072     }
3073 }
3074 
3075 static void gen_sradi0(DisasContext *ctx)
3076 {
3077     gen_sradi(ctx, 0);
3078 }
3079 
3080 static void gen_sradi1(DisasContext *ctx)
3081 {
3082     gen_sradi(ctx, 1);
3083 }
3084 
3085 /* extswsli & extswsli. */
3086 static inline void gen_extswsli(DisasContext *ctx, int n)
3087 {
3088     int sh = SH(ctx->opcode) + (n << 5);
3089     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3090     TCGv src = cpu_gpr[rS(ctx->opcode)];
3091 
3092     tcg_gen_ext32s_tl(dst, src);
3093     tcg_gen_shli_tl(dst, dst, sh);
3094     if (unlikely(Rc(ctx->opcode) != 0)) {
3095         gen_set_Rc0(ctx, dst);
3096     }
3097 }
3098 
3099 static void gen_extswsli0(DisasContext *ctx)
3100 {
3101     gen_extswsli(ctx, 0);
3102 }
3103 
3104 static void gen_extswsli1(DisasContext *ctx)
3105 {
3106     gen_extswsli(ctx, 1);
3107 }
3108 
3109 /* srd & srd. */
3110 static void gen_srd(DisasContext *ctx)
3111 {
3112     TCGv t0, t1;
3113 
3114     t0 = tcg_temp_new();
3115     /* AND rS with a mask that is 0 when rB >= 0x40 */
3116     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3117     tcg_gen_sari_tl(t0, t0, 0x3f);
3118     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3119     t1 = tcg_temp_new();
3120     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3121     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3122     tcg_temp_free(t1);
3123     tcg_temp_free(t0);
3124     if (unlikely(Rc(ctx->opcode) != 0)) {
3125         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3126     }
3127 }
3128 #endif
3129 
3130 /***                           Addressing modes                            ***/
3131 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3132 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3133                                       target_long maskl)
3134 {
3135     target_long simm = SIMM(ctx->opcode);
3136 
3137     simm &= ~maskl;
3138     if (rA(ctx->opcode) == 0) {
3139         if (NARROW_MODE(ctx)) {
3140             simm = (uint32_t)simm;
3141         }
3142         tcg_gen_movi_tl(EA, simm);
3143     } else if (likely(simm != 0)) {
3144         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3145         if (NARROW_MODE(ctx)) {
3146             tcg_gen_ext32u_tl(EA, EA);
3147         }
3148     } else {
3149         if (NARROW_MODE(ctx)) {
3150             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3151         } else {
3152             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3153         }
3154     }
3155 }
3156 
3157 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3158 {
3159     if (rA(ctx->opcode) == 0) {
3160         if (NARROW_MODE(ctx)) {
3161             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3162         } else {
3163             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3164         }
3165     } else {
3166         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3167         if (NARROW_MODE(ctx)) {
3168             tcg_gen_ext32u_tl(EA, EA);
3169         }
3170     }
3171 }
3172 
3173 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3174 {
3175     if (rA(ctx->opcode) == 0) {
3176         tcg_gen_movi_tl(EA, 0);
3177     } else if (NARROW_MODE(ctx)) {
3178         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3179     } else {
3180         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3181     }
3182 }
3183 
3184 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3185                                 target_long val)
3186 {
3187     tcg_gen_addi_tl(ret, arg1, val);
3188     if (NARROW_MODE(ctx)) {
3189         tcg_gen_ext32u_tl(ret, ret);
3190     }
3191 }
3192 
3193 static inline void gen_align_no_le(DisasContext *ctx)
3194 {
3195     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3196                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3197 }
3198 
3199 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
3200 {
3201     TCGv ea = tcg_temp_new();
3202     if (ra) {
3203         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
3204     } else {
3205         tcg_gen_mov_tl(ea, displ);
3206     }
3207     if (NARROW_MODE(ctx)) {
3208         tcg_gen_ext32u_tl(ea, ea);
3209     }
3210     return ea;
3211 }
3212 
3213 /***                             Integer load                              ***/
3214 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3215 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3216 
3217 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
3218 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
3219                                   TCGv val,                             \
3220                                   TCGv addr)                            \
3221 {                                                                       \
3222     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
3223 }
3224 
3225 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
3226 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3227 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3228 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3229 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3230 
3231 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3232 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3233 
3234 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
3235 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
3236                                              TCGv_i64 val,          \
3237                                              TCGv addr)             \
3238 {                                                                   \
3239     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
3240 }
3241 
3242 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
3243 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3244 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3245 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3246 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
3247 
3248 #if defined(TARGET_PPC64)
3249 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
3250 #endif
3251 
3252 #define GEN_QEMU_STORE_TL(stop, op)                                     \
3253 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
3254                                   TCGv val,                             \
3255                                   TCGv addr)                            \
3256 {                                                                       \
3257     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
3258 }
3259 
3260 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
3261 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
3262 #endif
3263 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3264 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3265 
3266 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3267 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3268 
3269 #define GEN_QEMU_STORE_64(stop, op)                               \
3270 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
3271                                               TCGv_i64 val,       \
3272                                               TCGv addr)          \
3273 {                                                                 \
3274     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
3275 }
3276 
3277 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
3278 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3279 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3280 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
3281 
3282 #if defined(TARGET_PPC64)
3283 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
3284 #endif
3285 
3286 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
3287 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3288 {                                                                             \
3289     TCGv EA;                                                                  \
3290     chk(ctx);                                                                 \
3291     gen_set_access_type(ctx, ACCESS_INT);                                     \
3292     EA = tcg_temp_new();                                                      \
3293     gen_addr_reg_index(ctx, EA);                                              \
3294     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
3295     tcg_temp_free(EA);                                                        \
3296 }
3297 
3298 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
3299     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3300 
3301 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
3302     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3303 
3304 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
3305 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3306 {                                                                             \
3307     TCGv EA;                                                                  \
3308     CHK_SV(ctx);                                                              \
3309     gen_set_access_type(ctx, ACCESS_INT);                                     \
3310     EA = tcg_temp_new();                                                      \
3311     gen_addr_reg_index(ctx, EA);                                              \
3312     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3313     tcg_temp_free(EA);                                                        \
3314 }
3315 
3316 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3317 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3318 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3319 #if defined(TARGET_PPC64)
3320 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
3321 #endif
3322 
3323 #if defined(TARGET_PPC64)
3324 /* CI load/store variants */
3325 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3326 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3327 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3328 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3329 #endif
3330 
3331 /***                              Integer store                            ***/
3332 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
3333 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3334 {                                                                             \
3335     TCGv EA;                                                                  \
3336     chk(ctx);                                                                 \
3337     gen_set_access_type(ctx, ACCESS_INT);                                     \
3338     EA = tcg_temp_new();                                                      \
3339     gen_addr_reg_index(ctx, EA);                                              \
3340     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
3341     tcg_temp_free(EA);                                                        \
3342 }
3343 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
3344     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3345 
3346 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
3347     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3348 
3349 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
3350 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3351 {                                                                             \
3352     TCGv EA;                                                                  \
3353     CHK_SV(ctx);                                                              \
3354     gen_set_access_type(ctx, ACCESS_INT);                                     \
3355     EA = tcg_temp_new();                                                      \
3356     gen_addr_reg_index(ctx, EA);                                              \
3357     tcg_gen_qemu_st_tl(                                                       \
3358         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
3359     tcg_temp_free(EA);                                                        \
3360 }
3361 
3362 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3363 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3364 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3365 #if defined(TARGET_PPC64)
3366 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
3367 #endif
3368 
3369 #if defined(TARGET_PPC64)
3370 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3371 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3372 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3373 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3374 #endif
3375 /***                Integer load and store with byte reverse               ***/
3376 
3377 /* lhbrx */
3378 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3379 
3380 /* lwbrx */
3381 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3382 
3383 #if defined(TARGET_PPC64)
3384 /* ldbrx */
3385 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3386 /* stdbrx */
3387 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3388 #endif  /* TARGET_PPC64 */
3389 
3390 /* sthbrx */
3391 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3392 /* stwbrx */
3393 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3394 
3395 /***                    Integer load and store multiple                    ***/
3396 
3397 /* lmw */
3398 static void gen_lmw(DisasContext *ctx)
3399 {
3400     TCGv t0;
3401     TCGv_i32 t1;
3402 
3403     if (ctx->le_mode) {
3404         gen_align_no_le(ctx);
3405         return;
3406     }
3407     gen_set_access_type(ctx, ACCESS_INT);
3408     t0 = tcg_temp_new();
3409     t1 = tcg_const_i32(rD(ctx->opcode));
3410     gen_addr_imm_index(ctx, t0, 0);
3411     gen_helper_lmw(cpu_env, t0, t1);
3412     tcg_temp_free(t0);
3413     tcg_temp_free_i32(t1);
3414 }
3415 
3416 /* stmw */
3417 static void gen_stmw(DisasContext *ctx)
3418 {
3419     TCGv t0;
3420     TCGv_i32 t1;
3421 
3422     if (ctx->le_mode) {
3423         gen_align_no_le(ctx);
3424         return;
3425     }
3426     gen_set_access_type(ctx, ACCESS_INT);
3427     t0 = tcg_temp_new();
3428     t1 = tcg_const_i32(rS(ctx->opcode));
3429     gen_addr_imm_index(ctx, t0, 0);
3430     gen_helper_stmw(cpu_env, t0, t1);
3431     tcg_temp_free(t0);
3432     tcg_temp_free_i32(t1);
3433 }
3434 
3435 /***                    Integer load and store strings                     ***/
3436 
3437 /* lswi */
3438 /*
3439  * PowerPC32 specification says we must generate an exception if rA is
3440  * in the range of registers to be loaded.  In an other hand, IBM says
3441  * this is valid, but rA won't be loaded.  For now, I'll follow the
3442  * spec...
3443  */
3444 static void gen_lswi(DisasContext *ctx)
3445 {
3446     TCGv t0;
3447     TCGv_i32 t1, t2;
3448     int nb = NB(ctx->opcode);
3449     int start = rD(ctx->opcode);
3450     int ra = rA(ctx->opcode);
3451     int nr;
3452 
3453     if (ctx->le_mode) {
3454         gen_align_no_le(ctx);
3455         return;
3456     }
3457     if (nb == 0) {
3458         nb = 32;
3459     }
3460     nr = DIV_ROUND_UP(nb, 4);
3461     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3462         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3463         return;
3464     }
3465     gen_set_access_type(ctx, ACCESS_INT);
3466     t0 = tcg_temp_new();
3467     gen_addr_register(ctx, t0);
3468     t1 = tcg_const_i32(nb);
3469     t2 = tcg_const_i32(start);
3470     gen_helper_lsw(cpu_env, t0, t1, t2);
3471     tcg_temp_free(t0);
3472     tcg_temp_free_i32(t1);
3473     tcg_temp_free_i32(t2);
3474 }
3475 
3476 /* lswx */
3477 static void gen_lswx(DisasContext *ctx)
3478 {
3479     TCGv t0;
3480     TCGv_i32 t1, t2, t3;
3481 
3482     if (ctx->le_mode) {
3483         gen_align_no_le(ctx);
3484         return;
3485     }
3486     gen_set_access_type(ctx, ACCESS_INT);
3487     t0 = tcg_temp_new();
3488     gen_addr_reg_index(ctx, t0);
3489     t1 = tcg_const_i32(rD(ctx->opcode));
3490     t2 = tcg_const_i32(rA(ctx->opcode));
3491     t3 = tcg_const_i32(rB(ctx->opcode));
3492     gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3493     tcg_temp_free(t0);
3494     tcg_temp_free_i32(t1);
3495     tcg_temp_free_i32(t2);
3496     tcg_temp_free_i32(t3);
3497 }
3498 
3499 /* stswi */
3500 static void gen_stswi(DisasContext *ctx)
3501 {
3502     TCGv t0;
3503     TCGv_i32 t1, t2;
3504     int nb = NB(ctx->opcode);
3505 
3506     if (ctx->le_mode) {
3507         gen_align_no_le(ctx);
3508         return;
3509     }
3510     gen_set_access_type(ctx, ACCESS_INT);
3511     t0 = tcg_temp_new();
3512     gen_addr_register(ctx, t0);
3513     if (nb == 0) {
3514         nb = 32;
3515     }
3516     t1 = tcg_const_i32(nb);
3517     t2 = tcg_const_i32(rS(ctx->opcode));
3518     gen_helper_stsw(cpu_env, t0, t1, t2);
3519     tcg_temp_free(t0);
3520     tcg_temp_free_i32(t1);
3521     tcg_temp_free_i32(t2);
3522 }
3523 
3524 /* stswx */
3525 static void gen_stswx(DisasContext *ctx)
3526 {
3527     TCGv t0;
3528     TCGv_i32 t1, t2;
3529 
3530     if (ctx->le_mode) {
3531         gen_align_no_le(ctx);
3532         return;
3533     }
3534     gen_set_access_type(ctx, ACCESS_INT);
3535     t0 = tcg_temp_new();
3536     gen_addr_reg_index(ctx, t0);
3537     t1 = tcg_temp_new_i32();
3538     tcg_gen_trunc_tl_i32(t1, cpu_xer);
3539     tcg_gen_andi_i32(t1, t1, 0x7F);
3540     t2 = tcg_const_i32(rS(ctx->opcode));
3541     gen_helper_stsw(cpu_env, t0, t1, t2);
3542     tcg_temp_free(t0);
3543     tcg_temp_free_i32(t1);
3544     tcg_temp_free_i32(t2);
3545 }
3546 
3547 /***                        Memory synchronisation                         ***/
3548 /* eieio */
3549 static void gen_eieio(DisasContext *ctx)
3550 {
3551     TCGBar bar = TCG_MO_ALL;
3552 
3553     /*
3554      * eieio has complex semanitcs. It provides memory ordering between
3555      * operations in the set:
3556      * - loads from CI memory.
3557      * - stores to CI memory.
3558      * - stores to WT memory.
3559      *
3560      * It separately also orders memory for operations in the set:
3561      * - stores to cacheble memory.
3562      *
3563      * It also serializes instructions:
3564      * - dcbt and dcbst.
3565      *
3566      * It separately serializes:
3567      * - tlbie and tlbsync.
3568      *
3569      * And separately serializes:
3570      * - slbieg, slbiag, and slbsync.
3571      *
3572      * The end result is that CI memory ordering requires TCG_MO_ALL
3573      * and it is not possible to special-case more relaxed ordering for
3574      * cacheable accesses. TCG_BAR_SC is required to provide this
3575      * serialization.
3576      */
3577 
3578     /*
3579      * POWER9 has a eieio instruction variant using bit 6 as a hint to
3580      * tell the CPU it is a store-forwarding barrier.
3581      */
3582     if (ctx->opcode & 0x2000000) {
3583         /*
3584          * ISA says that "Reserved fields in instructions are ignored
3585          * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3586          * as this is not an instruction software should be using,
3587          * complain to the user.
3588          */
3589         if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3590             qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3591                           TARGET_FMT_lx "\n", ctx->cia);
3592         } else {
3593             bar = TCG_MO_ST_LD;
3594         }
3595     }
3596 
3597     tcg_gen_mb(bar | TCG_BAR_SC);
3598 }
3599 
3600 #if !defined(CONFIG_USER_ONLY)
3601 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3602 {
3603     TCGv_i32 t;
3604     TCGLabel *l;
3605 
3606     if (!ctx->lazy_tlb_flush) {
3607         return;
3608     }
3609     l = gen_new_label();
3610     t = tcg_temp_new_i32();
3611     tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3612     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3613     if (global) {
3614         gen_helper_check_tlb_flush_global(cpu_env);
3615     } else {
3616         gen_helper_check_tlb_flush_local(cpu_env);
3617     }
3618     gen_set_label(l);
3619     tcg_temp_free_i32(t);
3620 }
3621 #else
3622 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3623 #endif
3624 
3625 /* isync */
3626 static void gen_isync(DisasContext *ctx)
3627 {
3628     /*
3629      * We need to check for a pending TLB flush. This can only happen in
3630      * kernel mode however so check MSR_PR
3631      */
3632     if (!ctx->pr) {
3633         gen_check_tlb_flush(ctx, false);
3634     }
3635     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3636     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3637 }
3638 
3639 #define MEMOP_GET_SIZE(x)  (1 << ((x) & MO_SIZE))
3640 
3641 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3642 {
3643     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3644     TCGv t0 = tcg_temp_new();
3645 
3646     gen_set_access_type(ctx, ACCESS_RES);
3647     gen_addr_reg_index(ctx, t0);
3648     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3649     tcg_gen_mov_tl(cpu_reserve, t0);
3650     tcg_gen_mov_tl(cpu_reserve_val, gpr);
3651     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3652     tcg_temp_free(t0);
3653 }
3654 
3655 #define LARX(name, memop)                  \
3656 static void gen_##name(DisasContext *ctx)  \
3657 {                                          \
3658     gen_load_locked(ctx, memop);           \
3659 }
3660 
3661 /* lwarx */
3662 LARX(lbarx, DEF_MEMOP(MO_UB))
3663 LARX(lharx, DEF_MEMOP(MO_UW))
3664 LARX(lwarx, DEF_MEMOP(MO_UL))
3665 
3666 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
3667                                       TCGv EA, TCGCond cond, int addend)
3668 {
3669     TCGv t = tcg_temp_new();
3670     TCGv t2 = tcg_temp_new();
3671     TCGv u = tcg_temp_new();
3672 
3673     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3674     tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
3675     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3676     tcg_gen_addi_tl(u, t, addend);
3677 
3678     /* E.g. for fetch and increment bounded... */
3679     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3680     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3681     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3682 
3683     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3684     tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
3685     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
3686 
3687     tcg_temp_free(t);
3688     tcg_temp_free(t2);
3689     tcg_temp_free(u);
3690 }
3691 
3692 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3693 {
3694     uint32_t gpr_FC = FC(ctx->opcode);
3695     TCGv EA = tcg_temp_new();
3696     int rt = rD(ctx->opcode);
3697     bool need_serial;
3698     TCGv src, dst;
3699 
3700     gen_addr_register(ctx, EA);
3701     dst = cpu_gpr[rt];
3702     src = cpu_gpr[(rt + 1) & 31];
3703 
3704     need_serial = false;
3705     memop |= MO_ALIGN;
3706     switch (gpr_FC) {
3707     case 0: /* Fetch and add */
3708         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3709         break;
3710     case 1: /* Fetch and xor */
3711         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3712         break;
3713     case 2: /* Fetch and or */
3714         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3715         break;
3716     case 3: /* Fetch and 'and' */
3717         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3718         break;
3719     case 4:  /* Fetch and max unsigned */
3720         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3721         break;
3722     case 5:  /* Fetch and max signed */
3723         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3724         break;
3725     case 6:  /* Fetch and min unsigned */
3726         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3727         break;
3728     case 7:  /* Fetch and min signed */
3729         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3730         break;
3731     case 8: /* Swap */
3732         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3733         break;
3734 
3735     case 16: /* Compare and swap not equal */
3736         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3737             need_serial = true;
3738         } else {
3739             TCGv t0 = tcg_temp_new();
3740             TCGv t1 = tcg_temp_new();
3741 
3742             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3743             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3744                 tcg_gen_mov_tl(t1, src);
3745             } else {
3746                 tcg_gen_ext32u_tl(t1, src);
3747             }
3748             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3749                                cpu_gpr[(rt + 2) & 31], t0);
3750             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3751             tcg_gen_mov_tl(dst, t0);
3752 
3753             tcg_temp_free(t0);
3754             tcg_temp_free(t1);
3755         }
3756         break;
3757 
3758     case 24: /* Fetch and increment bounded */
3759         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3760             need_serial = true;
3761         } else {
3762             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3763         }
3764         break;
3765     case 25: /* Fetch and increment equal */
3766         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3767             need_serial = true;
3768         } else {
3769             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3770         }
3771         break;
3772     case 28: /* Fetch and decrement bounded */
3773         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3774             need_serial = true;
3775         } else {
3776             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3777         }
3778         break;
3779 
3780     default:
3781         /* invoke data storage error handler */
3782         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3783     }
3784     tcg_temp_free(EA);
3785 
3786     if (need_serial) {
3787         /* Restart with exclusive lock.  */
3788         gen_helper_exit_atomic(cpu_env);
3789         ctx->base.is_jmp = DISAS_NORETURN;
3790     }
3791 }
3792 
3793 static void gen_lwat(DisasContext *ctx)
3794 {
3795     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3796 }
3797 
3798 #ifdef TARGET_PPC64
3799 static void gen_ldat(DisasContext *ctx)
3800 {
3801     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3802 }
3803 #endif
3804 
3805 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3806 {
3807     uint32_t gpr_FC = FC(ctx->opcode);
3808     TCGv EA = tcg_temp_new();
3809     TCGv src, discard;
3810 
3811     gen_addr_register(ctx, EA);
3812     src = cpu_gpr[rD(ctx->opcode)];
3813     discard = tcg_temp_new();
3814 
3815     memop |= MO_ALIGN;
3816     switch (gpr_FC) {
3817     case 0: /* add and Store */
3818         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3819         break;
3820     case 1: /* xor and Store */
3821         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3822         break;
3823     case 2: /* Or and Store */
3824         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3825         break;
3826     case 3: /* 'and' and Store */
3827         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3828         break;
3829     case 4:  /* Store max unsigned */
3830         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3831         break;
3832     case 5:  /* Store max signed */
3833         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3834         break;
3835     case 6:  /* Store min unsigned */
3836         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3837         break;
3838     case 7:  /* Store min signed */
3839         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3840         break;
3841     case 24: /* Store twin  */
3842         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3843             /* Restart with exclusive lock.  */
3844             gen_helper_exit_atomic(cpu_env);
3845             ctx->base.is_jmp = DISAS_NORETURN;
3846         } else {
3847             TCGv t = tcg_temp_new();
3848             TCGv t2 = tcg_temp_new();
3849             TCGv s = tcg_temp_new();
3850             TCGv s2 = tcg_temp_new();
3851             TCGv ea_plus_s = tcg_temp_new();
3852 
3853             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3854             tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
3855             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3856             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3857             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3858             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3859             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3860 
3861             tcg_temp_free(ea_plus_s);
3862             tcg_temp_free(s2);
3863             tcg_temp_free(s);
3864             tcg_temp_free(t2);
3865             tcg_temp_free(t);
3866         }
3867         break;
3868     default:
3869         /* invoke data storage error handler */
3870         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3871     }
3872     tcg_temp_free(discard);
3873     tcg_temp_free(EA);
3874 }
3875 
3876 static void gen_stwat(DisasContext *ctx)
3877 {
3878     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3879 }
3880 
3881 #ifdef TARGET_PPC64
3882 static void gen_stdat(DisasContext *ctx)
3883 {
3884     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3885 }
3886 #endif
3887 
3888 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3889 {
3890     TCGLabel *l1 = gen_new_label();
3891     TCGLabel *l2 = gen_new_label();
3892     TCGv t0 = tcg_temp_new();
3893     int reg = rS(ctx->opcode);
3894 
3895     gen_set_access_type(ctx, ACCESS_RES);
3896     gen_addr_reg_index(ctx, t0);
3897     tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3898     tcg_temp_free(t0);
3899 
3900     t0 = tcg_temp_new();
3901     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3902                               cpu_gpr[reg], ctx->mem_idx,
3903                               DEF_MEMOP(memop) | MO_ALIGN);
3904     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3905     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3906     tcg_gen_or_tl(t0, t0, cpu_so);
3907     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3908     tcg_temp_free(t0);
3909     tcg_gen_br(l2);
3910 
3911     gen_set_label(l1);
3912 
3913     /*
3914      * Address mismatch implies failure.  But we still need to provide
3915      * the memory barrier semantics of the instruction.
3916      */
3917     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3918     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3919 
3920     gen_set_label(l2);
3921     tcg_gen_movi_tl(cpu_reserve, -1);
3922 }
3923 
3924 #define STCX(name, memop)                  \
3925 static void gen_##name(DisasContext *ctx)  \
3926 {                                          \
3927     gen_conditional_store(ctx, memop);     \
3928 }
3929 
3930 STCX(stbcx_, DEF_MEMOP(MO_UB))
3931 STCX(sthcx_, DEF_MEMOP(MO_UW))
3932 STCX(stwcx_, DEF_MEMOP(MO_UL))
3933 
3934 #if defined(TARGET_PPC64)
3935 /* ldarx */
3936 LARX(ldarx, DEF_MEMOP(MO_UQ))
3937 /* stdcx. */
3938 STCX(stdcx_, DEF_MEMOP(MO_UQ))
3939 
3940 /* lqarx */
3941 static void gen_lqarx(DisasContext *ctx)
3942 {
3943     int rd = rD(ctx->opcode);
3944     TCGv EA, hi, lo;
3945 
3946     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3947                  (rd == rB(ctx->opcode)))) {
3948         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3949         return;
3950     }
3951 
3952     gen_set_access_type(ctx, ACCESS_RES);
3953     EA = tcg_temp_new();
3954     gen_addr_reg_index(ctx, EA);
3955 
3956     /* Note that the low part is always in RD+1, even in LE mode.  */
3957     lo = cpu_gpr[rd + 1];
3958     hi = cpu_gpr[rd];
3959 
3960     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3961         if (HAVE_ATOMIC128) {
3962             TCGv_i32 oi = tcg_temp_new_i32();
3963             if (ctx->le_mode) {
3964                 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN,
3965                                                     ctx->mem_idx));
3966                 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3967             } else {
3968                 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN,
3969                                                     ctx->mem_idx));
3970                 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3971             }
3972             tcg_temp_free_i32(oi);
3973             tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3974         } else {
3975             /* Restart with exclusive lock.  */
3976             gen_helper_exit_atomic(cpu_env);
3977             ctx->base.is_jmp = DISAS_NORETURN;
3978             tcg_temp_free(EA);
3979             return;
3980         }
3981     } else if (ctx->le_mode) {
3982         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEUQ | MO_ALIGN_16);
3983         tcg_gen_mov_tl(cpu_reserve, EA);
3984         gen_addr_add(ctx, EA, EA, 8);
3985         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEUQ);
3986     } else {
3987         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEUQ | MO_ALIGN_16);
3988         tcg_gen_mov_tl(cpu_reserve, EA);
3989         gen_addr_add(ctx, EA, EA, 8);
3990         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEUQ);
3991     }
3992     tcg_temp_free(EA);
3993 
3994     tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
3995     tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
3996 }
3997 
3998 /* stqcx. */
3999 static void gen_stqcx_(DisasContext *ctx)
4000 {
4001     int rs = rS(ctx->opcode);
4002     TCGv EA, hi, lo;
4003 
4004     if (unlikely(rs & 1)) {
4005         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4006         return;
4007     }
4008 
4009     gen_set_access_type(ctx, ACCESS_RES);
4010     EA = tcg_temp_new();
4011     gen_addr_reg_index(ctx, EA);
4012 
4013     /* Note that the low part is always in RS+1, even in LE mode.  */
4014     lo = cpu_gpr[rs + 1];
4015     hi = cpu_gpr[rs];
4016 
4017     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
4018         if (HAVE_CMPXCHG128) {
4019             TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN);
4020             if (ctx->le_mode) {
4021                 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env,
4022                                              EA, lo, hi, oi);
4023             } else {
4024                 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env,
4025                                              EA, lo, hi, oi);
4026             }
4027             tcg_temp_free_i32(oi);
4028         } else {
4029             /* Restart with exclusive lock.  */
4030             gen_helper_exit_atomic(cpu_env);
4031             ctx->base.is_jmp = DISAS_NORETURN;
4032         }
4033         tcg_temp_free(EA);
4034     } else {
4035         TCGLabel *lab_fail = gen_new_label();
4036         TCGLabel *lab_over = gen_new_label();
4037         TCGv_i64 t0 = tcg_temp_new_i64();
4038         TCGv_i64 t1 = tcg_temp_new_i64();
4039 
4040         tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4041         tcg_temp_free(EA);
4042 
4043         gen_qemu_ld64_i64(ctx, t0, cpu_reserve);
4044         tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4045                                      ? offsetof(CPUPPCState, reserve_val2)
4046                                      : offsetof(CPUPPCState, reserve_val)));
4047         tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4048 
4049         tcg_gen_addi_i64(t0, cpu_reserve, 8);
4050         gen_qemu_ld64_i64(ctx, t0, t0);
4051         tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4052                                      ? offsetof(CPUPPCState, reserve_val)
4053                                      : offsetof(CPUPPCState, reserve_val2)));
4054         tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4055 
4056         /* Success */
4057         gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve);
4058         tcg_gen_addi_i64(t0, cpu_reserve, 8);
4059         gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0);
4060 
4061         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4062         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
4063         tcg_gen_br(lab_over);
4064 
4065         gen_set_label(lab_fail);
4066         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4067 
4068         gen_set_label(lab_over);
4069         tcg_gen_movi_tl(cpu_reserve, -1);
4070         tcg_temp_free_i64(t0);
4071         tcg_temp_free_i64(t1);
4072     }
4073 }
4074 #endif /* defined(TARGET_PPC64) */
4075 
4076 /* sync */
4077 static void gen_sync(DisasContext *ctx)
4078 {
4079     TCGBar bar = TCG_MO_ALL;
4080     uint32_t l = (ctx->opcode >> 21) & 3;
4081 
4082     if ((l == 1) && (ctx->insns_flags2 & PPC2_MEM_LWSYNC)) {
4083         bar = TCG_MO_LD_LD | TCG_MO_LD_ST | TCG_MO_ST_ST;
4084     }
4085 
4086     /*
4087      * We may need to check for a pending TLB flush.
4088      *
4089      * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4090      *
4091      * Additionally, this can only happen in kernel mode however so
4092      * check MSR_PR as well.
4093      */
4094     if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4095         gen_check_tlb_flush(ctx, true);
4096     }
4097 
4098     tcg_gen_mb(bar | TCG_BAR_SC);
4099 }
4100 
4101 /* wait */
4102 static void gen_wait(DisasContext *ctx)
4103 {
4104     uint32_t wc;
4105 
4106     if (ctx->insns_flags & PPC_WAIT) {
4107         /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
4108 
4109         if (ctx->insns_flags2 & PPC2_PM_ISA206) {
4110             /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
4111             wc = WC(ctx->opcode);
4112         } else {
4113             wc = 0;
4114         }
4115 
4116     } else if (ctx->insns_flags2 & PPC2_ISA300) {
4117         /* v3.0 defines a new 'wait' encoding. */
4118         wc = WC(ctx->opcode);
4119         if (ctx->insns_flags2 & PPC2_ISA310) {
4120             uint32_t pl = PL(ctx->opcode);
4121 
4122             /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
4123             if (wc == 3) {
4124                 gen_invalid(ctx);
4125                 return;
4126             }
4127 
4128             /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
4129             if (pl > 0 && wc != 2) {
4130                 gen_invalid(ctx);
4131                 return;
4132             }
4133 
4134         } else { /* ISA300 */
4135             /* WC 1-3 are reserved */
4136             if (wc > 0) {
4137                 gen_invalid(ctx);
4138                 return;
4139             }
4140         }
4141 
4142     } else {
4143         warn_report("wait instruction decoded with wrong ISA flags.");
4144         gen_invalid(ctx);
4145         return;
4146     }
4147 
4148     /*
4149      * wait without WC field or with WC=0 waits for an exception / interrupt
4150      * to occur.
4151      */
4152     if (wc == 0) {
4153         TCGv_i32 t0 = tcg_const_i32(1);
4154         tcg_gen_st_i32(t0, cpu_env,
4155                        -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4156         tcg_temp_free_i32(t0);
4157         /* Stop translation, as the CPU is supposed to sleep from now */
4158         gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4159     }
4160 
4161     /*
4162      * Other wait types must not just wait until an exception occurs because
4163      * ignoring their other wake-up conditions could cause a hang.
4164      *
4165      * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
4166      * no-ops.
4167      *
4168      * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
4169      *
4170      * wc=2 waits for an implementation-specific condition, such could be
4171      * always true, so it can be implemented as a no-op.
4172      *
4173      * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
4174      *
4175      * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
4176      * Reservation-loss may have implementation-specific conditions, so it
4177      * can be implemented as a no-op.
4178      *
4179      * wc=2 waits for an exception or an amount of time to pass. This
4180      * amount is implementation-specific so it can be implemented as a
4181      * no-op.
4182      *
4183      * ISA v3.1 allows for execution to resume "in the rare case of
4184      * an implementation-dependent event", so in any case software must
4185      * not depend on the architected resumption condition to become
4186      * true, so no-op implementations should be architecturally correct
4187      * (if suboptimal).
4188      */
4189 }
4190 
4191 #if defined(TARGET_PPC64)
4192 static void gen_doze(DisasContext *ctx)
4193 {
4194 #if defined(CONFIG_USER_ONLY)
4195     GEN_PRIV(ctx);
4196 #else
4197     TCGv_i32 t;
4198 
4199     CHK_HV(ctx);
4200     t = tcg_const_i32(PPC_PM_DOZE);
4201     gen_helper_pminsn(cpu_env, t);
4202     tcg_temp_free_i32(t);
4203     /* Stop translation, as the CPU is supposed to sleep from now */
4204     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4205 #endif /* defined(CONFIG_USER_ONLY) */
4206 }
4207 
4208 static void gen_nap(DisasContext *ctx)
4209 {
4210 #if defined(CONFIG_USER_ONLY)
4211     GEN_PRIV(ctx);
4212 #else
4213     TCGv_i32 t;
4214 
4215     CHK_HV(ctx);
4216     t = tcg_const_i32(PPC_PM_NAP);
4217     gen_helper_pminsn(cpu_env, t);
4218     tcg_temp_free_i32(t);
4219     /* Stop translation, as the CPU is supposed to sleep from now */
4220     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4221 #endif /* defined(CONFIG_USER_ONLY) */
4222 }
4223 
4224 static void gen_stop(DisasContext *ctx)
4225 {
4226 #if defined(CONFIG_USER_ONLY)
4227     GEN_PRIV(ctx);
4228 #else
4229     TCGv_i32 t;
4230 
4231     CHK_HV(ctx);
4232     t = tcg_const_i32(PPC_PM_STOP);
4233     gen_helper_pminsn(cpu_env, t);
4234     tcg_temp_free_i32(t);
4235     /* Stop translation, as the CPU is supposed to sleep from now */
4236     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4237 #endif /* defined(CONFIG_USER_ONLY) */
4238 }
4239 
4240 static void gen_sleep(DisasContext *ctx)
4241 {
4242 #if defined(CONFIG_USER_ONLY)
4243     GEN_PRIV(ctx);
4244 #else
4245     TCGv_i32 t;
4246 
4247     CHK_HV(ctx);
4248     t = tcg_const_i32(PPC_PM_SLEEP);
4249     gen_helper_pminsn(cpu_env, t);
4250     tcg_temp_free_i32(t);
4251     /* Stop translation, as the CPU is supposed to sleep from now */
4252     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4253 #endif /* defined(CONFIG_USER_ONLY) */
4254 }
4255 
4256 static void gen_rvwinkle(DisasContext *ctx)
4257 {
4258 #if defined(CONFIG_USER_ONLY)
4259     GEN_PRIV(ctx);
4260 #else
4261     TCGv_i32 t;
4262 
4263     CHK_HV(ctx);
4264     t = tcg_const_i32(PPC_PM_RVWINKLE);
4265     gen_helper_pminsn(cpu_env, t);
4266     tcg_temp_free_i32(t);
4267     /* Stop translation, as the CPU is supposed to sleep from now */
4268     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4269 #endif /* defined(CONFIG_USER_ONLY) */
4270 }
4271 #endif /* #if defined(TARGET_PPC64) */
4272 
4273 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4274 {
4275 #if defined(TARGET_PPC64)
4276     if (ctx->has_cfar) {
4277         tcg_gen_movi_tl(cpu_cfar, nip);
4278     }
4279 #endif
4280 }
4281 
4282 #if defined(TARGET_PPC64)
4283 static void pmu_count_insns(DisasContext *ctx)
4284 {
4285     /*
4286      * Do not bother calling the helper if the PMU isn't counting
4287      * instructions.
4288      */
4289     if (!ctx->pmu_insn_cnt) {
4290         return;
4291     }
4292 
4293  #if !defined(CONFIG_USER_ONLY)
4294     TCGLabel *l;
4295     TCGv t0;
4296 
4297     /*
4298      * The PMU insns_inc() helper stops the internal PMU timer if a
4299      * counter overflows happens. In that case, if the guest is
4300      * running with icount and we do not handle it beforehand,
4301      * the helper can trigger a 'bad icount read'.
4302      */
4303     gen_icount_io_start(ctx);
4304 
4305     /* Avoid helper calls when only PMC5-6 are enabled. */
4306     if (!ctx->pmc_other) {
4307         l = gen_new_label();
4308         t0 = tcg_temp_new();
4309 
4310         gen_load_spr(t0, SPR_POWER_PMC5);
4311         tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4312         gen_store_spr(SPR_POWER_PMC5, t0);
4313         /* Check for overflow, if it's enabled */
4314         if (ctx->mmcr0_pmcjce) {
4315             tcg_gen_brcondi_tl(TCG_COND_LT, t0, PMC_COUNTER_NEGATIVE_VAL, l);
4316             gen_helper_handle_pmc5_overflow(cpu_env);
4317         }
4318 
4319         gen_set_label(l);
4320         tcg_temp_free(t0);
4321     } else {
4322         gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns));
4323     }
4324   #else
4325     /*
4326      * User mode can read (but not write) PMC5 and start/stop
4327      * the PMU via MMCR0_FC. In this case just increment
4328      * PMC5 with base.num_insns.
4329      */
4330     TCGv t0 = tcg_temp_new();
4331 
4332     gen_load_spr(t0, SPR_POWER_PMC5);
4333     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4334     gen_store_spr(SPR_POWER_PMC5, t0);
4335 
4336     tcg_temp_free(t0);
4337   #endif /* #if !defined(CONFIG_USER_ONLY) */
4338 }
4339 #else
4340 static void pmu_count_insns(DisasContext *ctx)
4341 {
4342     return;
4343 }
4344 #endif /* #if defined(TARGET_PPC64) */
4345 
4346 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4347 {
4348     return translator_use_goto_tb(&ctx->base, dest);
4349 }
4350 
4351 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4352 {
4353     if (unlikely(ctx->singlestep_enabled)) {
4354         gen_debug_exception(ctx);
4355     } else {
4356         /*
4357          * tcg_gen_lookup_and_goto_ptr will exit the TB if
4358          * CF_NO_GOTO_PTR is set. Count insns now.
4359          */
4360         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
4361             pmu_count_insns(ctx);
4362         }
4363 
4364         tcg_gen_lookup_and_goto_ptr();
4365     }
4366 }
4367 
4368 /***                                Branch                                 ***/
4369 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4370 {
4371     if (NARROW_MODE(ctx)) {
4372         dest = (uint32_t) dest;
4373     }
4374     if (use_goto_tb(ctx, dest)) {
4375         pmu_count_insns(ctx);
4376         tcg_gen_goto_tb(n);
4377         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4378         tcg_gen_exit_tb(ctx->base.tb, n);
4379     } else {
4380         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4381         gen_lookup_and_goto_ptr(ctx);
4382     }
4383 }
4384 
4385 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4386 {
4387     if (NARROW_MODE(ctx)) {
4388         nip = (uint32_t)nip;
4389     }
4390     tcg_gen_movi_tl(cpu_lr, nip);
4391 }
4392 
4393 /* b ba bl bla */
4394 static void gen_b(DisasContext *ctx)
4395 {
4396     target_ulong li, target;
4397 
4398     /* sign extend LI */
4399     li = LI(ctx->opcode);
4400     li = (li ^ 0x02000000) - 0x02000000;
4401     if (likely(AA(ctx->opcode) == 0)) {
4402         target = ctx->cia + li;
4403     } else {
4404         target = li;
4405     }
4406     if (LK(ctx->opcode)) {
4407         gen_setlr(ctx, ctx->base.pc_next);
4408     }
4409     gen_update_cfar(ctx, ctx->cia);
4410     gen_goto_tb(ctx, 0, target);
4411     ctx->base.is_jmp = DISAS_NORETURN;
4412 }
4413 
4414 #define BCOND_IM  0
4415 #define BCOND_LR  1
4416 #define BCOND_CTR 2
4417 #define BCOND_TAR 3
4418 
4419 static void gen_bcond(DisasContext *ctx, int type)
4420 {
4421     uint32_t bo = BO(ctx->opcode);
4422     TCGLabel *l1;
4423     TCGv target;
4424 
4425     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4426         target = tcg_temp_local_new();
4427         if (type == BCOND_CTR) {
4428             tcg_gen_mov_tl(target, cpu_ctr);
4429         } else if (type == BCOND_TAR) {
4430             gen_load_spr(target, SPR_TAR);
4431         } else {
4432             tcg_gen_mov_tl(target, cpu_lr);
4433         }
4434     } else {
4435         target = NULL;
4436     }
4437     if (LK(ctx->opcode)) {
4438         gen_setlr(ctx, ctx->base.pc_next);
4439     }
4440     l1 = gen_new_label();
4441     if ((bo & 0x4) == 0) {
4442         /* Decrement and test CTR */
4443         TCGv temp = tcg_temp_new();
4444 
4445         if (type == BCOND_CTR) {
4446             /*
4447              * All ISAs up to v3 describe this form of bcctr as invalid but
4448              * some processors, ie. 64-bit server processors compliant with
4449              * arch 2.x, do implement a "test and decrement" logic instead,
4450              * as described in their respective UMs. This logic involves CTR
4451              * to act as both the branch target and a counter, which makes
4452              * it basically useless and thus never used in real code.
4453              *
4454              * This form was hence chosen to trigger extra micro-architectural
4455              * side-effect on real HW needed for the Spectre v2 workaround.
4456              * It is up to guests that implement such workaround, ie. linux, to
4457              * use this form in a way it just triggers the side-effect without
4458              * doing anything else harmful.
4459              */
4460             if (unlikely(!is_book3s_arch2x(ctx))) {
4461                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4462                 tcg_temp_free(temp);
4463                 tcg_temp_free(target);
4464                 return;
4465             }
4466 
4467             if (NARROW_MODE(ctx)) {
4468                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4469             } else {
4470                 tcg_gen_mov_tl(temp, cpu_ctr);
4471             }
4472             if (bo & 0x2) {
4473                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4474             } else {
4475                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4476             }
4477             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4478         } else {
4479             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4480             if (NARROW_MODE(ctx)) {
4481                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4482             } else {
4483                 tcg_gen_mov_tl(temp, cpu_ctr);
4484             }
4485             if (bo & 0x2) {
4486                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4487             } else {
4488                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4489             }
4490         }
4491         tcg_temp_free(temp);
4492     }
4493     if ((bo & 0x10) == 0) {
4494         /* Test CR */
4495         uint32_t bi = BI(ctx->opcode);
4496         uint32_t mask = 0x08 >> (bi & 0x03);
4497         TCGv_i32 temp = tcg_temp_new_i32();
4498 
4499         if (bo & 0x8) {
4500             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4501             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4502         } else {
4503             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4504             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4505         }
4506         tcg_temp_free_i32(temp);
4507     }
4508     gen_update_cfar(ctx, ctx->cia);
4509     if (type == BCOND_IM) {
4510         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4511         if (likely(AA(ctx->opcode) == 0)) {
4512             gen_goto_tb(ctx, 0, ctx->cia + li);
4513         } else {
4514             gen_goto_tb(ctx, 0, li);
4515         }
4516     } else {
4517         if (NARROW_MODE(ctx)) {
4518             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4519         } else {
4520             tcg_gen_andi_tl(cpu_nip, target, ~3);
4521         }
4522         gen_lookup_and_goto_ptr(ctx);
4523         tcg_temp_free(target);
4524     }
4525     if ((bo & 0x14) != 0x14) {
4526         /* fallthrough case */
4527         gen_set_label(l1);
4528         gen_goto_tb(ctx, 1, ctx->base.pc_next);
4529     }
4530     ctx->base.is_jmp = DISAS_NORETURN;
4531 }
4532 
4533 static void gen_bc(DisasContext *ctx)
4534 {
4535     gen_bcond(ctx, BCOND_IM);
4536 }
4537 
4538 static void gen_bcctr(DisasContext *ctx)
4539 {
4540     gen_bcond(ctx, BCOND_CTR);
4541 }
4542 
4543 static void gen_bclr(DisasContext *ctx)
4544 {
4545     gen_bcond(ctx, BCOND_LR);
4546 }
4547 
4548 static void gen_bctar(DisasContext *ctx)
4549 {
4550     gen_bcond(ctx, BCOND_TAR);
4551 }
4552 
4553 /***                      Condition register logical                       ***/
4554 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
4555 static void glue(gen_, name)(DisasContext *ctx)                               \
4556 {                                                                             \
4557     uint8_t bitmask;                                                          \
4558     int sh;                                                                   \
4559     TCGv_i32 t0, t1;                                                          \
4560     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
4561     t0 = tcg_temp_new_i32();                                                  \
4562     if (sh > 0)                                                               \
4563         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
4564     else if (sh < 0)                                                          \
4565         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
4566     else                                                                      \
4567         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
4568     t1 = tcg_temp_new_i32();                                                  \
4569     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
4570     if (sh > 0)                                                               \
4571         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
4572     else if (sh < 0)                                                          \
4573         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
4574     else                                                                      \
4575         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
4576     tcg_op(t0, t0, t1);                                                       \
4577     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
4578     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
4579     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
4580     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
4581     tcg_temp_free_i32(t0);                                                    \
4582     tcg_temp_free_i32(t1);                                                    \
4583 }
4584 
4585 /* crand */
4586 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4587 /* crandc */
4588 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4589 /* creqv */
4590 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4591 /* crnand */
4592 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4593 /* crnor */
4594 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4595 /* cror */
4596 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4597 /* crorc */
4598 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4599 /* crxor */
4600 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4601 
4602 /* mcrf */
4603 static void gen_mcrf(DisasContext *ctx)
4604 {
4605     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4606 }
4607 
4608 /***                           System linkage                              ***/
4609 
4610 /* rfi (supervisor only) */
4611 static void gen_rfi(DisasContext *ctx)
4612 {
4613 #if defined(CONFIG_USER_ONLY)
4614     GEN_PRIV(ctx);
4615 #else
4616     /*
4617      * This instruction doesn't exist anymore on 64-bit server
4618      * processors compliant with arch 2.x
4619      */
4620     if (is_book3s_arch2x(ctx)) {
4621         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4622         return;
4623     }
4624     /* Restore CPU state */
4625     CHK_SV(ctx);
4626     gen_icount_io_start(ctx);
4627     gen_update_cfar(ctx, ctx->cia);
4628     gen_helper_rfi(cpu_env);
4629     ctx->base.is_jmp = DISAS_EXIT;
4630 #endif
4631 }
4632 
4633 #if defined(TARGET_PPC64)
4634 static void gen_rfid(DisasContext *ctx)
4635 {
4636 #if defined(CONFIG_USER_ONLY)
4637     GEN_PRIV(ctx);
4638 #else
4639     /* Restore CPU state */
4640     CHK_SV(ctx);
4641     gen_icount_io_start(ctx);
4642     gen_update_cfar(ctx, ctx->cia);
4643     gen_helper_rfid(cpu_env);
4644     ctx->base.is_jmp = DISAS_EXIT;
4645 #endif
4646 }
4647 
4648 #if !defined(CONFIG_USER_ONLY)
4649 static void gen_rfscv(DisasContext *ctx)
4650 {
4651 #if defined(CONFIG_USER_ONLY)
4652     GEN_PRIV(ctx);
4653 #else
4654     /* Restore CPU state */
4655     CHK_SV(ctx);
4656     gen_icount_io_start(ctx);
4657     gen_update_cfar(ctx, ctx->cia);
4658     gen_helper_rfscv(cpu_env);
4659     ctx->base.is_jmp = DISAS_EXIT;
4660 #endif
4661 }
4662 #endif
4663 
4664 static void gen_hrfid(DisasContext *ctx)
4665 {
4666 #if defined(CONFIG_USER_ONLY)
4667     GEN_PRIV(ctx);
4668 #else
4669     /* Restore CPU state */
4670     CHK_HV(ctx);
4671     gen_helper_hrfid(cpu_env);
4672     ctx->base.is_jmp = DISAS_EXIT;
4673 #endif
4674 }
4675 #endif
4676 
4677 /* sc */
4678 #if defined(CONFIG_USER_ONLY)
4679 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4680 #else
4681 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4682 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4683 #endif
4684 static void gen_sc(DisasContext *ctx)
4685 {
4686     uint32_t lev;
4687 
4688     lev = (ctx->opcode >> 5) & 0x7F;
4689     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4690 }
4691 
4692 #if defined(TARGET_PPC64)
4693 #if !defined(CONFIG_USER_ONLY)
4694 static void gen_scv(DisasContext *ctx)
4695 {
4696     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4697 
4698     /* Set the PC back to the faulting instruction. */
4699     gen_update_nip(ctx, ctx->cia);
4700     gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4701 
4702     ctx->base.is_jmp = DISAS_NORETURN;
4703 }
4704 #endif
4705 #endif
4706 
4707 /***                                Trap                                   ***/
4708 
4709 /* Check for unconditional traps (always or never) */
4710 static bool check_unconditional_trap(DisasContext *ctx)
4711 {
4712     /* Trap never */
4713     if (TO(ctx->opcode) == 0) {
4714         return true;
4715     }
4716     /* Trap always */
4717     if (TO(ctx->opcode) == 31) {
4718         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4719         return true;
4720     }
4721     return false;
4722 }
4723 
4724 /* tw */
4725 static void gen_tw(DisasContext *ctx)
4726 {
4727     TCGv_i32 t0;
4728 
4729     if (check_unconditional_trap(ctx)) {
4730         return;
4731     }
4732     t0 = tcg_const_i32(TO(ctx->opcode));
4733     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4734                   t0);
4735     tcg_temp_free_i32(t0);
4736 }
4737 
4738 /* twi */
4739 static void gen_twi(DisasContext *ctx)
4740 {
4741     TCGv t0;
4742     TCGv_i32 t1;
4743 
4744     if (check_unconditional_trap(ctx)) {
4745         return;
4746     }
4747     t0 = tcg_const_tl(SIMM(ctx->opcode));
4748     t1 = tcg_const_i32(TO(ctx->opcode));
4749     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4750     tcg_temp_free(t0);
4751     tcg_temp_free_i32(t1);
4752 }
4753 
4754 #if defined(TARGET_PPC64)
4755 /* td */
4756 static void gen_td(DisasContext *ctx)
4757 {
4758     TCGv_i32 t0;
4759 
4760     if (check_unconditional_trap(ctx)) {
4761         return;
4762     }
4763     t0 = tcg_const_i32(TO(ctx->opcode));
4764     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4765                   t0);
4766     tcg_temp_free_i32(t0);
4767 }
4768 
4769 /* tdi */
4770 static void gen_tdi(DisasContext *ctx)
4771 {
4772     TCGv t0;
4773     TCGv_i32 t1;
4774 
4775     if (check_unconditional_trap(ctx)) {
4776         return;
4777     }
4778     t0 = tcg_const_tl(SIMM(ctx->opcode));
4779     t1 = tcg_const_i32(TO(ctx->opcode));
4780     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4781     tcg_temp_free(t0);
4782     tcg_temp_free_i32(t1);
4783 }
4784 #endif
4785 
4786 /***                          Processor control                            ***/
4787 
4788 /* mcrxr */
4789 static void gen_mcrxr(DisasContext *ctx)
4790 {
4791     TCGv_i32 t0 = tcg_temp_new_i32();
4792     TCGv_i32 t1 = tcg_temp_new_i32();
4793     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4794 
4795     tcg_gen_trunc_tl_i32(t0, cpu_so);
4796     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4797     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4798     tcg_gen_shli_i32(t0, t0, 3);
4799     tcg_gen_shli_i32(t1, t1, 2);
4800     tcg_gen_shli_i32(dst, dst, 1);
4801     tcg_gen_or_i32(dst, dst, t0);
4802     tcg_gen_or_i32(dst, dst, t1);
4803     tcg_temp_free_i32(t0);
4804     tcg_temp_free_i32(t1);
4805 
4806     tcg_gen_movi_tl(cpu_so, 0);
4807     tcg_gen_movi_tl(cpu_ov, 0);
4808     tcg_gen_movi_tl(cpu_ca, 0);
4809 }
4810 
4811 #ifdef TARGET_PPC64
4812 /* mcrxrx */
4813 static void gen_mcrxrx(DisasContext *ctx)
4814 {
4815     TCGv t0 = tcg_temp_new();
4816     TCGv t1 = tcg_temp_new();
4817     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4818 
4819     /* copy OV and OV32 */
4820     tcg_gen_shli_tl(t0, cpu_ov, 1);
4821     tcg_gen_or_tl(t0, t0, cpu_ov32);
4822     tcg_gen_shli_tl(t0, t0, 2);
4823     /* copy CA and CA32 */
4824     tcg_gen_shli_tl(t1, cpu_ca, 1);
4825     tcg_gen_or_tl(t1, t1, cpu_ca32);
4826     tcg_gen_or_tl(t0, t0, t1);
4827     tcg_gen_trunc_tl_i32(dst, t0);
4828     tcg_temp_free(t0);
4829     tcg_temp_free(t1);
4830 }
4831 #endif
4832 
4833 /* mfcr mfocrf */
4834 static void gen_mfcr(DisasContext *ctx)
4835 {
4836     uint32_t crm, crn;
4837 
4838     if (likely(ctx->opcode & 0x00100000)) {
4839         crm = CRM(ctx->opcode);
4840         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4841             crn = ctz32(crm);
4842             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4843             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4844                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4845         }
4846     } else {
4847         TCGv_i32 t0 = tcg_temp_new_i32();
4848         tcg_gen_mov_i32(t0, cpu_crf[0]);
4849         tcg_gen_shli_i32(t0, t0, 4);
4850         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4851         tcg_gen_shli_i32(t0, t0, 4);
4852         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4853         tcg_gen_shli_i32(t0, t0, 4);
4854         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4855         tcg_gen_shli_i32(t0, t0, 4);
4856         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4857         tcg_gen_shli_i32(t0, t0, 4);
4858         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4859         tcg_gen_shli_i32(t0, t0, 4);
4860         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4861         tcg_gen_shli_i32(t0, t0, 4);
4862         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4863         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4864         tcg_temp_free_i32(t0);
4865     }
4866 }
4867 
4868 /* mfmsr */
4869 static void gen_mfmsr(DisasContext *ctx)
4870 {
4871     CHK_SV(ctx);
4872     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4873 }
4874 
4875 /* mfspr */
4876 static inline void gen_op_mfspr(DisasContext *ctx)
4877 {
4878     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4879     uint32_t sprn = SPR(ctx->opcode);
4880 
4881 #if defined(CONFIG_USER_ONLY)
4882     read_cb = ctx->spr_cb[sprn].uea_read;
4883 #else
4884     if (ctx->pr) {
4885         read_cb = ctx->spr_cb[sprn].uea_read;
4886     } else if (ctx->hv) {
4887         read_cb = ctx->spr_cb[sprn].hea_read;
4888     } else {
4889         read_cb = ctx->spr_cb[sprn].oea_read;
4890     }
4891 #endif
4892     if (likely(read_cb != NULL)) {
4893         if (likely(read_cb != SPR_NOACCESS)) {
4894             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4895         } else {
4896             /* Privilege exception */
4897             /*
4898              * This is a hack to avoid warnings when running Linux:
4899              * this OS breaks the PowerPC virtualisation model,
4900              * allowing userland application to read the PVR
4901              */
4902             if (sprn != SPR_PVR) {
4903                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4904                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4905                               ctx->cia);
4906             }
4907             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4908         }
4909     } else {
4910         /* ISA 2.07 defines these as no-ops */
4911         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4912             (sprn >= 808 && sprn <= 811)) {
4913             /* This is a nop */
4914             return;
4915         }
4916         /* Not defined */
4917         qemu_log_mask(LOG_GUEST_ERROR,
4918                       "Trying to read invalid spr %d (0x%03x) at "
4919                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4920 
4921         /*
4922          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4923          * generate a priv, a hv emu or a no-op
4924          */
4925         if (sprn & 0x10) {
4926             if (ctx->pr) {
4927                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4928             }
4929         } else {
4930             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4931                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4932             }
4933         }
4934     }
4935 }
4936 
4937 static void gen_mfspr(DisasContext *ctx)
4938 {
4939     gen_op_mfspr(ctx);
4940 }
4941 
4942 /* mftb */
4943 static void gen_mftb(DisasContext *ctx)
4944 {
4945     gen_op_mfspr(ctx);
4946 }
4947 
4948 /* mtcrf mtocrf*/
4949 static void gen_mtcrf(DisasContext *ctx)
4950 {
4951     uint32_t crm, crn;
4952 
4953     crm = CRM(ctx->opcode);
4954     if (likely((ctx->opcode & 0x00100000))) {
4955         if (crm && ((crm & (crm - 1)) == 0)) {
4956             TCGv_i32 temp = tcg_temp_new_i32();
4957             crn = ctz32(crm);
4958             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4959             tcg_gen_shri_i32(temp, temp, crn * 4);
4960             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4961             tcg_temp_free_i32(temp);
4962         }
4963     } else {
4964         TCGv_i32 temp = tcg_temp_new_i32();
4965         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4966         for (crn = 0 ; crn < 8 ; crn++) {
4967             if (crm & (1 << crn)) {
4968                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4969                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4970             }
4971         }
4972         tcg_temp_free_i32(temp);
4973     }
4974 }
4975 
4976 /* mtmsr */
4977 #if defined(TARGET_PPC64)
4978 static void gen_mtmsrd(DisasContext *ctx)
4979 {
4980     if (unlikely(!is_book3s_arch2x(ctx))) {
4981         gen_invalid(ctx);
4982         return;
4983     }
4984 
4985     CHK_SV(ctx);
4986 
4987 #if !defined(CONFIG_USER_ONLY)
4988     TCGv t0, t1;
4989     target_ulong mask;
4990 
4991     t0 = tcg_temp_new();
4992     t1 = tcg_temp_new();
4993 
4994     gen_icount_io_start(ctx);
4995 
4996     if (ctx->opcode & 0x00010000) {
4997         /* L=1 form only updates EE and RI */
4998         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4999     } else {
5000         /* mtmsrd does not alter HV, S, ME, or LE */
5001         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
5002                  (1ULL << MSR_HV));
5003         /*
5004          * XXX: we need to update nip before the store if we enter
5005          *      power saving mode, we will exit the loop directly from
5006          *      ppc_store_msr
5007          */
5008         gen_update_nip(ctx, ctx->base.pc_next);
5009     }
5010 
5011     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
5012     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
5013     tcg_gen_or_tl(t0, t0, t1);
5014 
5015     gen_helper_store_msr(cpu_env, t0);
5016 
5017     /* Must stop the translation as machine state (may have) changed */
5018     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5019 
5020     tcg_temp_free(t0);
5021     tcg_temp_free(t1);
5022 #endif /* !defined(CONFIG_USER_ONLY) */
5023 }
5024 #endif /* defined(TARGET_PPC64) */
5025 
5026 static void gen_mtmsr(DisasContext *ctx)
5027 {
5028     CHK_SV(ctx);
5029 
5030 #if !defined(CONFIG_USER_ONLY)
5031     TCGv t0, t1;
5032     target_ulong mask = 0xFFFFFFFF;
5033 
5034     t0 = tcg_temp_new();
5035     t1 = tcg_temp_new();
5036 
5037     gen_icount_io_start(ctx);
5038     if (ctx->opcode & 0x00010000) {
5039         /* L=1 form only updates EE and RI */
5040         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
5041     } else {
5042         /* mtmsr does not alter S, ME, or LE */
5043         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
5044 
5045         /*
5046          * XXX: we need to update nip before the store if we enter
5047          *      power saving mode, we will exit the loop directly from
5048          *      ppc_store_msr
5049          */
5050         gen_update_nip(ctx, ctx->base.pc_next);
5051     }
5052 
5053     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
5054     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
5055     tcg_gen_or_tl(t0, t0, t1);
5056 
5057     gen_helper_store_msr(cpu_env, t0);
5058 
5059     /* Must stop the translation as machine state (may have) changed */
5060     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5061 
5062     tcg_temp_free(t0);
5063     tcg_temp_free(t1);
5064 #endif
5065 }
5066 
5067 /* mtspr */
5068 static void gen_mtspr(DisasContext *ctx)
5069 {
5070     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
5071     uint32_t sprn = SPR(ctx->opcode);
5072 
5073 #if defined(CONFIG_USER_ONLY)
5074     write_cb = ctx->spr_cb[sprn].uea_write;
5075 #else
5076     if (ctx->pr) {
5077         write_cb = ctx->spr_cb[sprn].uea_write;
5078     } else if (ctx->hv) {
5079         write_cb = ctx->spr_cb[sprn].hea_write;
5080     } else {
5081         write_cb = ctx->spr_cb[sprn].oea_write;
5082     }
5083 #endif
5084     if (likely(write_cb != NULL)) {
5085         if (likely(write_cb != SPR_NOACCESS)) {
5086             (*write_cb)(ctx, sprn, rS(ctx->opcode));
5087         } else {
5088             /* Privilege exception */
5089             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
5090                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5091                           ctx->cia);
5092             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5093         }
5094     } else {
5095         /* ISA 2.07 defines these as no-ops */
5096         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5097             (sprn >= 808 && sprn <= 811)) {
5098             /* This is a nop */
5099             return;
5100         }
5101 
5102         /* Not defined */
5103         qemu_log_mask(LOG_GUEST_ERROR,
5104                       "Trying to write invalid spr %d (0x%03x) at "
5105                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5106 
5107 
5108         /*
5109          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5110          * generate a priv, a hv emu or a no-op
5111          */
5112         if (sprn & 0x10) {
5113             if (ctx->pr) {
5114                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5115             }
5116         } else {
5117             if (ctx->pr || sprn == 0) {
5118                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5119             }
5120         }
5121     }
5122 }
5123 
5124 #if defined(TARGET_PPC64)
5125 /* setb */
5126 static void gen_setb(DisasContext *ctx)
5127 {
5128     TCGv_i32 t0 = tcg_temp_new_i32();
5129     TCGv_i32 t8 = tcg_constant_i32(8);
5130     TCGv_i32 tm1 = tcg_constant_i32(-1);
5131     int crf = crfS(ctx->opcode);
5132 
5133     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5134     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5135     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5136 
5137     tcg_temp_free_i32(t0);
5138 }
5139 #endif
5140 
5141 /***                         Cache management                              ***/
5142 
5143 /* dcbf */
5144 static void gen_dcbf(DisasContext *ctx)
5145 {
5146     /* XXX: specification says this is treated as a load by the MMU */
5147     TCGv t0;
5148     gen_set_access_type(ctx, ACCESS_CACHE);
5149     t0 = tcg_temp_new();
5150     gen_addr_reg_index(ctx, t0);
5151     gen_qemu_ld8u(ctx, t0, t0);
5152     tcg_temp_free(t0);
5153 }
5154 
5155 /* dcbfep (external PID dcbf) */
5156 static void gen_dcbfep(DisasContext *ctx)
5157 {
5158     /* XXX: specification says this is treated as a load by the MMU */
5159     TCGv t0;
5160     CHK_SV(ctx);
5161     gen_set_access_type(ctx, ACCESS_CACHE);
5162     t0 = tcg_temp_new();
5163     gen_addr_reg_index(ctx, t0);
5164     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5165     tcg_temp_free(t0);
5166 }
5167 
5168 /* dcbi (Supervisor only) */
5169 static void gen_dcbi(DisasContext *ctx)
5170 {
5171 #if defined(CONFIG_USER_ONLY)
5172     GEN_PRIV(ctx);
5173 #else
5174     TCGv EA, val;
5175 
5176     CHK_SV(ctx);
5177     EA = tcg_temp_new();
5178     gen_set_access_type(ctx, ACCESS_CACHE);
5179     gen_addr_reg_index(ctx, EA);
5180     val = tcg_temp_new();
5181     /* XXX: specification says this should be treated as a store by the MMU */
5182     gen_qemu_ld8u(ctx, val, EA);
5183     gen_qemu_st8(ctx, val, EA);
5184     tcg_temp_free(val);
5185     tcg_temp_free(EA);
5186 #endif /* defined(CONFIG_USER_ONLY) */
5187 }
5188 
5189 /* dcdst */
5190 static void gen_dcbst(DisasContext *ctx)
5191 {
5192     /* XXX: specification say this is treated as a load by the MMU */
5193     TCGv t0;
5194     gen_set_access_type(ctx, ACCESS_CACHE);
5195     t0 = tcg_temp_new();
5196     gen_addr_reg_index(ctx, t0);
5197     gen_qemu_ld8u(ctx, t0, t0);
5198     tcg_temp_free(t0);
5199 }
5200 
5201 /* dcbstep (dcbstep External PID version) */
5202 static void gen_dcbstep(DisasContext *ctx)
5203 {
5204     /* XXX: specification say this is treated as a load by the MMU */
5205     TCGv t0;
5206     gen_set_access_type(ctx, ACCESS_CACHE);
5207     t0 = tcg_temp_new();
5208     gen_addr_reg_index(ctx, t0);
5209     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5210     tcg_temp_free(t0);
5211 }
5212 
5213 /* dcbt */
5214 static void gen_dcbt(DisasContext *ctx)
5215 {
5216     /*
5217      * interpreted as no-op
5218      * XXX: specification say this is treated as a load by the MMU but
5219      *      does not generate any exception
5220      */
5221 }
5222 
5223 /* dcbtep */
5224 static void gen_dcbtep(DisasContext *ctx)
5225 {
5226     /*
5227      * interpreted as no-op
5228      * XXX: specification say this is treated as a load by the MMU but
5229      *      does not generate any exception
5230      */
5231 }
5232 
5233 /* dcbtst */
5234 static void gen_dcbtst(DisasContext *ctx)
5235 {
5236     /*
5237      * interpreted as no-op
5238      * XXX: specification say this is treated as a load by the MMU but
5239      *      does not generate any exception
5240      */
5241 }
5242 
5243 /* dcbtstep */
5244 static void gen_dcbtstep(DisasContext *ctx)
5245 {
5246     /*
5247      * interpreted as no-op
5248      * XXX: specification say this is treated as a load by the MMU but
5249      *      does not generate any exception
5250      */
5251 }
5252 
5253 /* dcbtls */
5254 static void gen_dcbtls(DisasContext *ctx)
5255 {
5256     /* Always fails locking the cache */
5257     TCGv t0 = tcg_temp_new();
5258     gen_load_spr(t0, SPR_Exxx_L1CSR0);
5259     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5260     gen_store_spr(SPR_Exxx_L1CSR0, t0);
5261     tcg_temp_free(t0);
5262 }
5263 
5264 /* dcbz */
5265 static void gen_dcbz(DisasContext *ctx)
5266 {
5267     TCGv tcgv_addr;
5268     TCGv_i32 tcgv_op;
5269 
5270     gen_set_access_type(ctx, ACCESS_CACHE);
5271     tcgv_addr = tcg_temp_new();
5272     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5273     gen_addr_reg_index(ctx, tcgv_addr);
5274     gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5275     tcg_temp_free(tcgv_addr);
5276     tcg_temp_free_i32(tcgv_op);
5277 }
5278 
5279 /* dcbzep */
5280 static void gen_dcbzep(DisasContext *ctx)
5281 {
5282     TCGv tcgv_addr;
5283     TCGv_i32 tcgv_op;
5284 
5285     gen_set_access_type(ctx, ACCESS_CACHE);
5286     tcgv_addr = tcg_temp_new();
5287     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5288     gen_addr_reg_index(ctx, tcgv_addr);
5289     gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5290     tcg_temp_free(tcgv_addr);
5291     tcg_temp_free_i32(tcgv_op);
5292 }
5293 
5294 /* dst / dstt */
5295 static void gen_dst(DisasContext *ctx)
5296 {
5297     if (rA(ctx->opcode) == 0) {
5298         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5299     } else {
5300         /* interpreted as no-op */
5301     }
5302 }
5303 
5304 /* dstst /dststt */
5305 static void gen_dstst(DisasContext *ctx)
5306 {
5307     if (rA(ctx->opcode) == 0) {
5308         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5309     } else {
5310         /* interpreted as no-op */
5311     }
5312 
5313 }
5314 
5315 /* dss / dssall */
5316 static void gen_dss(DisasContext *ctx)
5317 {
5318     /* interpreted as no-op */
5319 }
5320 
5321 /* icbi */
5322 static void gen_icbi(DisasContext *ctx)
5323 {
5324     TCGv t0;
5325     gen_set_access_type(ctx, ACCESS_CACHE);
5326     t0 = tcg_temp_new();
5327     gen_addr_reg_index(ctx, t0);
5328     gen_helper_icbi(cpu_env, t0);
5329     tcg_temp_free(t0);
5330 }
5331 
5332 /* icbiep */
5333 static void gen_icbiep(DisasContext *ctx)
5334 {
5335     TCGv t0;
5336     gen_set_access_type(ctx, ACCESS_CACHE);
5337     t0 = tcg_temp_new();
5338     gen_addr_reg_index(ctx, t0);
5339     gen_helper_icbiep(cpu_env, t0);
5340     tcg_temp_free(t0);
5341 }
5342 
5343 /* Optional: */
5344 /* dcba */
5345 static void gen_dcba(DisasContext *ctx)
5346 {
5347     /*
5348      * interpreted as no-op
5349      * XXX: specification say this is treated as a store by the MMU
5350      *      but does not generate any exception
5351      */
5352 }
5353 
5354 /***                    Segment register manipulation                      ***/
5355 /* Supervisor only: */
5356 
5357 /* mfsr */
5358 static void gen_mfsr(DisasContext *ctx)
5359 {
5360 #if defined(CONFIG_USER_ONLY)
5361     GEN_PRIV(ctx);
5362 #else
5363     TCGv t0;
5364 
5365     CHK_SV(ctx);
5366     t0 = tcg_const_tl(SR(ctx->opcode));
5367     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5368     tcg_temp_free(t0);
5369 #endif /* defined(CONFIG_USER_ONLY) */
5370 }
5371 
5372 /* mfsrin */
5373 static void gen_mfsrin(DisasContext *ctx)
5374 {
5375 #if defined(CONFIG_USER_ONLY)
5376     GEN_PRIV(ctx);
5377 #else
5378     TCGv t0;
5379 
5380     CHK_SV(ctx);
5381     t0 = tcg_temp_new();
5382     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5383     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5384     tcg_temp_free(t0);
5385 #endif /* defined(CONFIG_USER_ONLY) */
5386 }
5387 
5388 /* mtsr */
5389 static void gen_mtsr(DisasContext *ctx)
5390 {
5391 #if defined(CONFIG_USER_ONLY)
5392     GEN_PRIV(ctx);
5393 #else
5394     TCGv t0;
5395 
5396     CHK_SV(ctx);
5397     t0 = tcg_const_tl(SR(ctx->opcode));
5398     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5399     tcg_temp_free(t0);
5400 #endif /* defined(CONFIG_USER_ONLY) */
5401 }
5402 
5403 /* mtsrin */
5404 static void gen_mtsrin(DisasContext *ctx)
5405 {
5406 #if defined(CONFIG_USER_ONLY)
5407     GEN_PRIV(ctx);
5408 #else
5409     TCGv t0;
5410     CHK_SV(ctx);
5411 
5412     t0 = tcg_temp_new();
5413     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5414     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5415     tcg_temp_free(t0);
5416 #endif /* defined(CONFIG_USER_ONLY) */
5417 }
5418 
5419 #if defined(TARGET_PPC64)
5420 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5421 
5422 /* mfsr */
5423 static void gen_mfsr_64b(DisasContext *ctx)
5424 {
5425 #if defined(CONFIG_USER_ONLY)
5426     GEN_PRIV(ctx);
5427 #else
5428     TCGv t0;
5429 
5430     CHK_SV(ctx);
5431     t0 = tcg_const_tl(SR(ctx->opcode));
5432     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5433     tcg_temp_free(t0);
5434 #endif /* defined(CONFIG_USER_ONLY) */
5435 }
5436 
5437 /* mfsrin */
5438 static void gen_mfsrin_64b(DisasContext *ctx)
5439 {
5440 #if defined(CONFIG_USER_ONLY)
5441     GEN_PRIV(ctx);
5442 #else
5443     TCGv t0;
5444 
5445     CHK_SV(ctx);
5446     t0 = tcg_temp_new();
5447     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5448     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5449     tcg_temp_free(t0);
5450 #endif /* defined(CONFIG_USER_ONLY) */
5451 }
5452 
5453 /* mtsr */
5454 static void gen_mtsr_64b(DisasContext *ctx)
5455 {
5456 #if defined(CONFIG_USER_ONLY)
5457     GEN_PRIV(ctx);
5458 #else
5459     TCGv t0;
5460 
5461     CHK_SV(ctx);
5462     t0 = tcg_const_tl(SR(ctx->opcode));
5463     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5464     tcg_temp_free(t0);
5465 #endif /* defined(CONFIG_USER_ONLY) */
5466 }
5467 
5468 /* mtsrin */
5469 static void gen_mtsrin_64b(DisasContext *ctx)
5470 {
5471 #if defined(CONFIG_USER_ONLY)
5472     GEN_PRIV(ctx);
5473 #else
5474     TCGv t0;
5475 
5476     CHK_SV(ctx);
5477     t0 = tcg_temp_new();
5478     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5479     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5480     tcg_temp_free(t0);
5481 #endif /* defined(CONFIG_USER_ONLY) */
5482 }
5483 
5484 #endif /* defined(TARGET_PPC64) */
5485 
5486 /***                      Lookaside buffer management                      ***/
5487 /* Optional & supervisor only: */
5488 
5489 /* tlbia */
5490 static void gen_tlbia(DisasContext *ctx)
5491 {
5492 #if defined(CONFIG_USER_ONLY)
5493     GEN_PRIV(ctx);
5494 #else
5495     CHK_HV(ctx);
5496 
5497     gen_helper_tlbia(cpu_env);
5498 #endif  /* defined(CONFIG_USER_ONLY) */
5499 }
5500 
5501 /* tlbsync */
5502 static void gen_tlbsync(DisasContext *ctx)
5503 {
5504 #if defined(CONFIG_USER_ONLY)
5505     GEN_PRIV(ctx);
5506 #else
5507 
5508     if (ctx->gtse) {
5509         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
5510     } else {
5511         CHK_HV(ctx); /* Else hypervisor privileged */
5512     }
5513 
5514     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5515     if (ctx->insns_flags & PPC_BOOKE) {
5516         gen_check_tlb_flush(ctx, true);
5517     }
5518 #endif /* defined(CONFIG_USER_ONLY) */
5519 }
5520 
5521 /***                              External control                         ***/
5522 /* Optional: */
5523 
5524 /* eciwx */
5525 static void gen_eciwx(DisasContext *ctx)
5526 {
5527     TCGv t0;
5528     /* Should check EAR[E] ! */
5529     gen_set_access_type(ctx, ACCESS_EXT);
5530     t0 = tcg_temp_new();
5531     gen_addr_reg_index(ctx, t0);
5532     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5533                        DEF_MEMOP(MO_UL | MO_ALIGN));
5534     tcg_temp_free(t0);
5535 }
5536 
5537 /* ecowx */
5538 static void gen_ecowx(DisasContext *ctx)
5539 {
5540     TCGv t0;
5541     /* Should check EAR[E] ! */
5542     gen_set_access_type(ctx, ACCESS_EXT);
5543     t0 = tcg_temp_new();
5544     gen_addr_reg_index(ctx, t0);
5545     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5546                        DEF_MEMOP(MO_UL | MO_ALIGN));
5547     tcg_temp_free(t0);
5548 }
5549 
5550 /* 602 - 603 - G2 TLB management */
5551 
5552 /* tlbld */
5553 static void gen_tlbld_6xx(DisasContext *ctx)
5554 {
5555 #if defined(CONFIG_USER_ONLY)
5556     GEN_PRIV(ctx);
5557 #else
5558     CHK_SV(ctx);
5559     gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5560 #endif /* defined(CONFIG_USER_ONLY) */
5561 }
5562 
5563 /* tlbli */
5564 static void gen_tlbli_6xx(DisasContext *ctx)
5565 {
5566 #if defined(CONFIG_USER_ONLY)
5567     GEN_PRIV(ctx);
5568 #else
5569     CHK_SV(ctx);
5570     gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5571 #endif /* defined(CONFIG_USER_ONLY) */
5572 }
5573 
5574 /* BookE specific instructions */
5575 
5576 /* XXX: not implemented on 440 ? */
5577 static void gen_mfapidi(DisasContext *ctx)
5578 {
5579     /* XXX: TODO */
5580     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5581 }
5582 
5583 /* XXX: not implemented on 440 ? */
5584 static void gen_tlbiva(DisasContext *ctx)
5585 {
5586 #if defined(CONFIG_USER_ONLY)
5587     GEN_PRIV(ctx);
5588 #else
5589     TCGv t0;
5590 
5591     CHK_SV(ctx);
5592     t0 = tcg_temp_new();
5593     gen_addr_reg_index(ctx, t0);
5594     gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5595     tcg_temp_free(t0);
5596 #endif /* defined(CONFIG_USER_ONLY) */
5597 }
5598 
5599 /* All 405 MAC instructions are translated here */
5600 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5601                                         int ra, int rb, int rt, int Rc)
5602 {
5603     TCGv t0, t1;
5604 
5605     t0 = tcg_temp_local_new();
5606     t1 = tcg_temp_local_new();
5607 
5608     switch (opc3 & 0x0D) {
5609     case 0x05:
5610         /* macchw    - macchw.    - macchwo   - macchwo.   */
5611         /* macchws   - macchws.   - macchwso  - macchwso.  */
5612         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
5613         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
5614         /* mulchw - mulchw. */
5615         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5616         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5617         tcg_gen_ext16s_tl(t1, t1);
5618         break;
5619     case 0x04:
5620         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
5621         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
5622         /* mulchwu - mulchwu. */
5623         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5624         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5625         tcg_gen_ext16u_tl(t1, t1);
5626         break;
5627     case 0x01:
5628         /* machhw    - machhw.    - machhwo   - machhwo.   */
5629         /* machhws   - machhws.   - machhwso  - machhwso.  */
5630         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
5631         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
5632         /* mulhhw - mulhhw. */
5633         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5634         tcg_gen_ext16s_tl(t0, t0);
5635         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5636         tcg_gen_ext16s_tl(t1, t1);
5637         break;
5638     case 0x00:
5639         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
5640         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
5641         /* mulhhwu - mulhhwu. */
5642         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5643         tcg_gen_ext16u_tl(t0, t0);
5644         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5645         tcg_gen_ext16u_tl(t1, t1);
5646         break;
5647     case 0x0D:
5648         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
5649         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
5650         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
5651         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
5652         /* mullhw - mullhw. */
5653         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5654         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5655         break;
5656     case 0x0C:
5657         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
5658         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
5659         /* mullhwu - mullhwu. */
5660         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5661         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5662         break;
5663     }
5664     if (opc2 & 0x04) {
5665         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5666         tcg_gen_mul_tl(t1, t0, t1);
5667         if (opc2 & 0x02) {
5668             /* nmultiply-and-accumulate (0x0E) */
5669             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5670         } else {
5671             /* multiply-and-accumulate (0x0C) */
5672             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5673         }
5674 
5675         if (opc3 & 0x12) {
5676             /* Check overflow and/or saturate */
5677             TCGLabel *l1 = gen_new_label();
5678 
5679             if (opc3 & 0x10) {
5680                 /* Start with XER OV disabled, the most likely case */
5681                 tcg_gen_movi_tl(cpu_ov, 0);
5682             }
5683             if (opc3 & 0x01) {
5684                 /* Signed */
5685                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5686                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5687                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5688                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5689                 if (opc3 & 0x02) {
5690                     /* Saturate */
5691                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5692                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5693                 }
5694             } else {
5695                 /* Unsigned */
5696                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5697                 if (opc3 & 0x02) {
5698                     /* Saturate */
5699                     tcg_gen_movi_tl(t0, UINT32_MAX);
5700                 }
5701             }
5702             if (opc3 & 0x10) {
5703                 /* Check overflow */
5704                 tcg_gen_movi_tl(cpu_ov, 1);
5705                 tcg_gen_movi_tl(cpu_so, 1);
5706             }
5707             gen_set_label(l1);
5708             tcg_gen_mov_tl(cpu_gpr[rt], t0);
5709         }
5710     } else {
5711         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5712     }
5713     tcg_temp_free(t0);
5714     tcg_temp_free(t1);
5715     if (unlikely(Rc) != 0) {
5716         /* Update Rc0 */
5717         gen_set_Rc0(ctx, cpu_gpr[rt]);
5718     }
5719 }
5720 
5721 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
5722 static void glue(gen_, name)(DisasContext *ctx)                               \
5723 {                                                                             \
5724     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
5725                          rD(ctx->opcode), Rc(ctx->opcode));                   \
5726 }
5727 
5728 /* macchw    - macchw.    */
5729 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5730 /* macchwo   - macchwo.   */
5731 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5732 /* macchws   - macchws.   */
5733 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5734 /* macchwso  - macchwso.  */
5735 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5736 /* macchwsu  - macchwsu.  */
5737 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5738 /* macchwsuo - macchwsuo. */
5739 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5740 /* macchwu   - macchwu.   */
5741 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5742 /* macchwuo  - macchwuo.  */
5743 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5744 /* machhw    - machhw.    */
5745 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5746 /* machhwo   - machhwo.   */
5747 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5748 /* machhws   - machhws.   */
5749 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5750 /* machhwso  - machhwso.  */
5751 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5752 /* machhwsu  - machhwsu.  */
5753 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5754 /* machhwsuo - machhwsuo. */
5755 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5756 /* machhwu   - machhwu.   */
5757 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5758 /* machhwuo  - machhwuo.  */
5759 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5760 /* maclhw    - maclhw.    */
5761 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5762 /* maclhwo   - maclhwo.   */
5763 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5764 /* maclhws   - maclhws.   */
5765 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5766 /* maclhwso  - maclhwso.  */
5767 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5768 /* maclhwu   - maclhwu.   */
5769 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5770 /* maclhwuo  - maclhwuo.  */
5771 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5772 /* maclhwsu  - maclhwsu.  */
5773 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5774 /* maclhwsuo - maclhwsuo. */
5775 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5776 /* nmacchw   - nmacchw.   */
5777 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5778 /* nmacchwo  - nmacchwo.  */
5779 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5780 /* nmacchws  - nmacchws.  */
5781 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5782 /* nmacchwso - nmacchwso. */
5783 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5784 /* nmachhw   - nmachhw.   */
5785 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5786 /* nmachhwo  - nmachhwo.  */
5787 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5788 /* nmachhws  - nmachhws.  */
5789 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5790 /* nmachhwso - nmachhwso. */
5791 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5792 /* nmaclhw   - nmaclhw.   */
5793 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5794 /* nmaclhwo  - nmaclhwo.  */
5795 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5796 /* nmaclhws  - nmaclhws.  */
5797 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5798 /* nmaclhwso - nmaclhwso. */
5799 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5800 
5801 /* mulchw  - mulchw.  */
5802 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5803 /* mulchwu - mulchwu. */
5804 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5805 /* mulhhw  - mulhhw.  */
5806 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5807 /* mulhhwu - mulhhwu. */
5808 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5809 /* mullhw  - mullhw.  */
5810 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5811 /* mullhwu - mullhwu. */
5812 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5813 
5814 /* mfdcr */
5815 static void gen_mfdcr(DisasContext *ctx)
5816 {
5817 #if defined(CONFIG_USER_ONLY)
5818     GEN_PRIV(ctx);
5819 #else
5820     TCGv dcrn;
5821 
5822     CHK_SV(ctx);
5823     dcrn = tcg_const_tl(SPR(ctx->opcode));
5824     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5825     tcg_temp_free(dcrn);
5826 #endif /* defined(CONFIG_USER_ONLY) */
5827 }
5828 
5829 /* mtdcr */
5830 static void gen_mtdcr(DisasContext *ctx)
5831 {
5832 #if defined(CONFIG_USER_ONLY)
5833     GEN_PRIV(ctx);
5834 #else
5835     TCGv dcrn;
5836 
5837     CHK_SV(ctx);
5838     dcrn = tcg_const_tl(SPR(ctx->opcode));
5839     gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5840     tcg_temp_free(dcrn);
5841 #endif /* defined(CONFIG_USER_ONLY) */
5842 }
5843 
5844 /* mfdcrx */
5845 /* XXX: not implemented on 440 ? */
5846 static void gen_mfdcrx(DisasContext *ctx)
5847 {
5848 #if defined(CONFIG_USER_ONLY)
5849     GEN_PRIV(ctx);
5850 #else
5851     CHK_SV(ctx);
5852     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5853                         cpu_gpr[rA(ctx->opcode)]);
5854     /* Note: Rc update flag set leads to undefined state of Rc0 */
5855 #endif /* defined(CONFIG_USER_ONLY) */
5856 }
5857 
5858 /* mtdcrx */
5859 /* XXX: not implemented on 440 ? */
5860 static void gen_mtdcrx(DisasContext *ctx)
5861 {
5862 #if defined(CONFIG_USER_ONLY)
5863     GEN_PRIV(ctx);
5864 #else
5865     CHK_SV(ctx);
5866     gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5867                          cpu_gpr[rS(ctx->opcode)]);
5868     /* Note: Rc update flag set leads to undefined state of Rc0 */
5869 #endif /* defined(CONFIG_USER_ONLY) */
5870 }
5871 
5872 /* dccci */
5873 static void gen_dccci(DisasContext *ctx)
5874 {
5875     CHK_SV(ctx);
5876     /* interpreted as no-op */
5877 }
5878 
5879 /* dcread */
5880 static void gen_dcread(DisasContext *ctx)
5881 {
5882 #if defined(CONFIG_USER_ONLY)
5883     GEN_PRIV(ctx);
5884 #else
5885     TCGv EA, val;
5886 
5887     CHK_SV(ctx);
5888     gen_set_access_type(ctx, ACCESS_CACHE);
5889     EA = tcg_temp_new();
5890     gen_addr_reg_index(ctx, EA);
5891     val = tcg_temp_new();
5892     gen_qemu_ld32u(ctx, val, EA);
5893     tcg_temp_free(val);
5894     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5895     tcg_temp_free(EA);
5896 #endif /* defined(CONFIG_USER_ONLY) */
5897 }
5898 
5899 /* icbt */
5900 static void gen_icbt_40x(DisasContext *ctx)
5901 {
5902     /*
5903      * interpreted as no-op
5904      * XXX: specification say this is treated as a load by the MMU but
5905      *      does not generate any exception
5906      */
5907 }
5908 
5909 /* iccci */
5910 static void gen_iccci(DisasContext *ctx)
5911 {
5912     CHK_SV(ctx);
5913     /* interpreted as no-op */
5914 }
5915 
5916 /* icread */
5917 static void gen_icread(DisasContext *ctx)
5918 {
5919     CHK_SV(ctx);
5920     /* interpreted as no-op */
5921 }
5922 
5923 /* rfci (supervisor only) */
5924 static void gen_rfci_40x(DisasContext *ctx)
5925 {
5926 #if defined(CONFIG_USER_ONLY)
5927     GEN_PRIV(ctx);
5928 #else
5929     CHK_SV(ctx);
5930     /* Restore CPU state */
5931     gen_helper_40x_rfci(cpu_env);
5932     ctx->base.is_jmp = DISAS_EXIT;
5933 #endif /* defined(CONFIG_USER_ONLY) */
5934 }
5935 
5936 static void gen_rfci(DisasContext *ctx)
5937 {
5938 #if defined(CONFIG_USER_ONLY)
5939     GEN_PRIV(ctx);
5940 #else
5941     CHK_SV(ctx);
5942     /* Restore CPU state */
5943     gen_helper_rfci(cpu_env);
5944     ctx->base.is_jmp = DISAS_EXIT;
5945 #endif /* defined(CONFIG_USER_ONLY) */
5946 }
5947 
5948 /* BookE specific */
5949 
5950 /* XXX: not implemented on 440 ? */
5951 static void gen_rfdi(DisasContext *ctx)
5952 {
5953 #if defined(CONFIG_USER_ONLY)
5954     GEN_PRIV(ctx);
5955 #else
5956     CHK_SV(ctx);
5957     /* Restore CPU state */
5958     gen_helper_rfdi(cpu_env);
5959     ctx->base.is_jmp = DISAS_EXIT;
5960 #endif /* defined(CONFIG_USER_ONLY) */
5961 }
5962 
5963 /* XXX: not implemented on 440 ? */
5964 static void gen_rfmci(DisasContext *ctx)
5965 {
5966 #if defined(CONFIG_USER_ONLY)
5967     GEN_PRIV(ctx);
5968 #else
5969     CHK_SV(ctx);
5970     /* Restore CPU state */
5971     gen_helper_rfmci(cpu_env);
5972     ctx->base.is_jmp = DISAS_EXIT;
5973 #endif /* defined(CONFIG_USER_ONLY) */
5974 }
5975 
5976 /* TLB management - PowerPC 405 implementation */
5977 
5978 /* tlbre */
5979 static void gen_tlbre_40x(DisasContext *ctx)
5980 {
5981 #if defined(CONFIG_USER_ONLY)
5982     GEN_PRIV(ctx);
5983 #else
5984     CHK_SV(ctx);
5985     switch (rB(ctx->opcode)) {
5986     case 0:
5987         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5988                                 cpu_gpr[rA(ctx->opcode)]);
5989         break;
5990     case 1:
5991         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5992                                 cpu_gpr[rA(ctx->opcode)]);
5993         break;
5994     default:
5995         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5996         break;
5997     }
5998 #endif /* defined(CONFIG_USER_ONLY) */
5999 }
6000 
6001 /* tlbsx - tlbsx. */
6002 static void gen_tlbsx_40x(DisasContext *ctx)
6003 {
6004 #if defined(CONFIG_USER_ONLY)
6005     GEN_PRIV(ctx);
6006 #else
6007     TCGv t0;
6008 
6009     CHK_SV(ctx);
6010     t0 = tcg_temp_new();
6011     gen_addr_reg_index(ctx, t0);
6012     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6013     tcg_temp_free(t0);
6014     if (Rc(ctx->opcode)) {
6015         TCGLabel *l1 = gen_new_label();
6016         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6017         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6018         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6019         gen_set_label(l1);
6020     }
6021 #endif /* defined(CONFIG_USER_ONLY) */
6022 }
6023 
6024 /* tlbwe */
6025 static void gen_tlbwe_40x(DisasContext *ctx)
6026 {
6027 #if defined(CONFIG_USER_ONLY)
6028     GEN_PRIV(ctx);
6029 #else
6030     CHK_SV(ctx);
6031 
6032     switch (rB(ctx->opcode)) {
6033     case 0:
6034         gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
6035                                 cpu_gpr[rS(ctx->opcode)]);
6036         break;
6037     case 1:
6038         gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
6039                                 cpu_gpr[rS(ctx->opcode)]);
6040         break;
6041     default:
6042         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6043         break;
6044     }
6045 #endif /* defined(CONFIG_USER_ONLY) */
6046 }
6047 
6048 /* TLB management - PowerPC 440 implementation */
6049 
6050 /* tlbre */
6051 static void gen_tlbre_440(DisasContext *ctx)
6052 {
6053 #if defined(CONFIG_USER_ONLY)
6054     GEN_PRIV(ctx);
6055 #else
6056     CHK_SV(ctx);
6057 
6058     switch (rB(ctx->opcode)) {
6059     case 0:
6060     case 1:
6061     case 2:
6062         {
6063             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6064             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
6065                                  t0, cpu_gpr[rA(ctx->opcode)]);
6066             tcg_temp_free_i32(t0);
6067         }
6068         break;
6069     default:
6070         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6071         break;
6072     }
6073 #endif /* defined(CONFIG_USER_ONLY) */
6074 }
6075 
6076 /* tlbsx - tlbsx. */
6077 static void gen_tlbsx_440(DisasContext *ctx)
6078 {
6079 #if defined(CONFIG_USER_ONLY)
6080     GEN_PRIV(ctx);
6081 #else
6082     TCGv t0;
6083 
6084     CHK_SV(ctx);
6085     t0 = tcg_temp_new();
6086     gen_addr_reg_index(ctx, t0);
6087     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6088     tcg_temp_free(t0);
6089     if (Rc(ctx->opcode)) {
6090         TCGLabel *l1 = gen_new_label();
6091         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6092         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6093         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6094         gen_set_label(l1);
6095     }
6096 #endif /* defined(CONFIG_USER_ONLY) */
6097 }
6098 
6099 /* tlbwe */
6100 static void gen_tlbwe_440(DisasContext *ctx)
6101 {
6102 #if defined(CONFIG_USER_ONLY)
6103     GEN_PRIV(ctx);
6104 #else
6105     CHK_SV(ctx);
6106     switch (rB(ctx->opcode)) {
6107     case 0:
6108     case 1:
6109     case 2:
6110         {
6111             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6112             gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6113                                  cpu_gpr[rS(ctx->opcode)]);
6114             tcg_temp_free_i32(t0);
6115         }
6116         break;
6117     default:
6118         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6119         break;
6120     }
6121 #endif /* defined(CONFIG_USER_ONLY) */
6122 }
6123 
6124 /* TLB management - PowerPC BookE 2.06 implementation */
6125 
6126 /* tlbre */
6127 static void gen_tlbre_booke206(DisasContext *ctx)
6128 {
6129  #if defined(CONFIG_USER_ONLY)
6130     GEN_PRIV(ctx);
6131 #else
6132    CHK_SV(ctx);
6133     gen_helper_booke206_tlbre(cpu_env);
6134 #endif /* defined(CONFIG_USER_ONLY) */
6135 }
6136 
6137 /* tlbsx - tlbsx. */
6138 static void gen_tlbsx_booke206(DisasContext *ctx)
6139 {
6140 #if defined(CONFIG_USER_ONLY)
6141     GEN_PRIV(ctx);
6142 #else
6143     TCGv t0;
6144 
6145     CHK_SV(ctx);
6146     if (rA(ctx->opcode)) {
6147         t0 = tcg_temp_new();
6148         tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6149     } else {
6150         t0 = tcg_const_tl(0);
6151     }
6152 
6153     tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6154     gen_helper_booke206_tlbsx(cpu_env, t0);
6155     tcg_temp_free(t0);
6156 #endif /* defined(CONFIG_USER_ONLY) */
6157 }
6158 
6159 /* tlbwe */
6160 static void gen_tlbwe_booke206(DisasContext *ctx)
6161 {
6162 #if defined(CONFIG_USER_ONLY)
6163     GEN_PRIV(ctx);
6164 #else
6165     CHK_SV(ctx);
6166     gen_helper_booke206_tlbwe(cpu_env);
6167 #endif /* defined(CONFIG_USER_ONLY) */
6168 }
6169 
6170 static void gen_tlbivax_booke206(DisasContext *ctx)
6171 {
6172 #if defined(CONFIG_USER_ONLY)
6173     GEN_PRIV(ctx);
6174 #else
6175     TCGv t0;
6176 
6177     CHK_SV(ctx);
6178     t0 = tcg_temp_new();
6179     gen_addr_reg_index(ctx, t0);
6180     gen_helper_booke206_tlbivax(cpu_env, t0);
6181     tcg_temp_free(t0);
6182 #endif /* defined(CONFIG_USER_ONLY) */
6183 }
6184 
6185 static void gen_tlbilx_booke206(DisasContext *ctx)
6186 {
6187 #if defined(CONFIG_USER_ONLY)
6188     GEN_PRIV(ctx);
6189 #else
6190     TCGv t0;
6191 
6192     CHK_SV(ctx);
6193     t0 = tcg_temp_new();
6194     gen_addr_reg_index(ctx, t0);
6195 
6196     switch ((ctx->opcode >> 21) & 0x3) {
6197     case 0:
6198         gen_helper_booke206_tlbilx0(cpu_env, t0);
6199         break;
6200     case 1:
6201         gen_helper_booke206_tlbilx1(cpu_env, t0);
6202         break;
6203     case 3:
6204         gen_helper_booke206_tlbilx3(cpu_env, t0);
6205         break;
6206     default:
6207         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6208         break;
6209     }
6210 
6211     tcg_temp_free(t0);
6212 #endif /* defined(CONFIG_USER_ONLY) */
6213 }
6214 
6215 /* wrtee */
6216 static void gen_wrtee(DisasContext *ctx)
6217 {
6218 #if defined(CONFIG_USER_ONLY)
6219     GEN_PRIV(ctx);
6220 #else
6221     TCGv t0;
6222 
6223     CHK_SV(ctx);
6224     t0 = tcg_temp_new();
6225     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6226     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6227     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6228     gen_ppc_maybe_interrupt(ctx);
6229     tcg_temp_free(t0);
6230     /*
6231      * Stop translation to have a chance to raise an exception if we
6232      * just set msr_ee to 1
6233      */
6234     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6235 #endif /* defined(CONFIG_USER_ONLY) */
6236 }
6237 
6238 /* wrteei */
6239 static void gen_wrteei(DisasContext *ctx)
6240 {
6241 #if defined(CONFIG_USER_ONLY)
6242     GEN_PRIV(ctx);
6243 #else
6244     CHK_SV(ctx);
6245     if (ctx->opcode & 0x00008000) {
6246         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6247         gen_ppc_maybe_interrupt(ctx);
6248         /* Stop translation to have a chance to raise an exception */
6249         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6250     } else {
6251         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6252     }
6253 #endif /* defined(CONFIG_USER_ONLY) */
6254 }
6255 
6256 /* PowerPC 440 specific instructions */
6257 
6258 /* dlmzb */
6259 static void gen_dlmzb(DisasContext *ctx)
6260 {
6261     TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6262     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6263                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6264     tcg_temp_free_i32(t0);
6265 }
6266 
6267 /* mbar replaces eieio on 440 */
6268 static void gen_mbar(DisasContext *ctx)
6269 {
6270     /* interpreted as no-op */
6271 }
6272 
6273 /* msync replaces sync on 440 */
6274 static void gen_msync_4xx(DisasContext *ctx)
6275 {
6276     /* Only e500 seems to treat reserved bits as invalid */
6277     if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
6278         (ctx->opcode & 0x03FFF801)) {
6279         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6280     }
6281     /* otherwise interpreted as no-op */
6282 }
6283 
6284 /* icbt */
6285 static void gen_icbt_440(DisasContext *ctx)
6286 {
6287     /*
6288      * interpreted as no-op
6289      * XXX: specification say this is treated as a load by the MMU but
6290      *      does not generate any exception
6291      */
6292 }
6293 
6294 #if defined(TARGET_PPC64)
6295 static void gen_maddld(DisasContext *ctx)
6296 {
6297     TCGv_i64 t1 = tcg_temp_new_i64();
6298 
6299     tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6300     tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
6301     tcg_temp_free_i64(t1);
6302 }
6303 
6304 /* maddhd maddhdu */
6305 static void gen_maddhd_maddhdu(DisasContext *ctx)
6306 {
6307     TCGv_i64 lo = tcg_temp_new_i64();
6308     TCGv_i64 hi = tcg_temp_new_i64();
6309     TCGv_i64 t1 = tcg_temp_new_i64();
6310 
6311     if (Rc(ctx->opcode)) {
6312         tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6313                           cpu_gpr[rB(ctx->opcode)]);
6314         tcg_gen_movi_i64(t1, 0);
6315     } else {
6316         tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6317                           cpu_gpr[rB(ctx->opcode)]);
6318         tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6319     }
6320     tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6321                      cpu_gpr[rC(ctx->opcode)], t1);
6322     tcg_temp_free_i64(lo);
6323     tcg_temp_free_i64(hi);
6324     tcg_temp_free_i64(t1);
6325 }
6326 #endif /* defined(TARGET_PPC64) */
6327 
6328 static void gen_tbegin(DisasContext *ctx)
6329 {
6330     if (unlikely(!ctx->tm_enabled)) {
6331         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6332         return;
6333     }
6334     gen_helper_tbegin(cpu_env);
6335 }
6336 
6337 #define GEN_TM_NOOP(name)                                      \
6338 static inline void gen_##name(DisasContext *ctx)               \
6339 {                                                              \
6340     if (unlikely(!ctx->tm_enabled)) {                          \
6341         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6342         return;                                                \
6343     }                                                          \
6344     /*                                                         \
6345      * Because tbegin always fails in QEMU, these user         \
6346      * space instructions all have a simple implementation:    \
6347      *                                                         \
6348      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
6349      *           = 0b0 || 0b00    || 0b0                       \
6350      */                                                        \
6351     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6352 }
6353 
6354 GEN_TM_NOOP(tend);
6355 GEN_TM_NOOP(tabort);
6356 GEN_TM_NOOP(tabortwc);
6357 GEN_TM_NOOP(tabortwci);
6358 GEN_TM_NOOP(tabortdc);
6359 GEN_TM_NOOP(tabortdci);
6360 GEN_TM_NOOP(tsr);
6361 
6362 static inline void gen_cp_abort(DisasContext *ctx)
6363 {
6364     /* Do Nothing */
6365 }
6366 
6367 #define GEN_CP_PASTE_NOOP(name)                           \
6368 static inline void gen_##name(DisasContext *ctx)          \
6369 {                                                         \
6370     /*                                                    \
6371      * Generate invalid exception until we have an        \
6372      * implementation of the copy paste facility          \
6373      */                                                   \
6374     gen_invalid(ctx);                                     \
6375 }
6376 
6377 GEN_CP_PASTE_NOOP(copy)
6378 GEN_CP_PASTE_NOOP(paste)
6379 
6380 static void gen_tcheck(DisasContext *ctx)
6381 {
6382     if (unlikely(!ctx->tm_enabled)) {
6383         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6384         return;
6385     }
6386     /*
6387      * Because tbegin always fails, the tcheck implementation is
6388      * simple:
6389      *
6390      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6391      *         = 0b1 || 0b00 || 0b0
6392      */
6393     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6394 }
6395 
6396 #if defined(CONFIG_USER_ONLY)
6397 #define GEN_TM_PRIV_NOOP(name)                                 \
6398 static inline void gen_##name(DisasContext *ctx)               \
6399 {                                                              \
6400     gen_priv_opc(ctx);                                         \
6401 }
6402 
6403 #else
6404 
6405 #define GEN_TM_PRIV_NOOP(name)                                 \
6406 static inline void gen_##name(DisasContext *ctx)               \
6407 {                                                              \
6408     CHK_SV(ctx);                                               \
6409     if (unlikely(!ctx->tm_enabled)) {                          \
6410         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6411         return;                                                \
6412     }                                                          \
6413     /*                                                         \
6414      * Because tbegin always fails, the implementation is      \
6415      * simple:                                                 \
6416      *                                                         \
6417      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
6418      *         = 0b0 || 0b00 | 0b0                             \
6419      */                                                        \
6420     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6421 }
6422 
6423 #endif
6424 
6425 GEN_TM_PRIV_NOOP(treclaim);
6426 GEN_TM_PRIV_NOOP(trechkpt);
6427 
6428 static inline void get_fpr(TCGv_i64 dst, int regno)
6429 {
6430     tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
6431 }
6432 
6433 static inline void set_fpr(int regno, TCGv_i64 src)
6434 {
6435     tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
6436     /*
6437      * Before PowerISA v3.1 the result of doubleword 1 of the VSR
6438      * corresponding to the target FPR was undefined. However,
6439      * most (if not all) real hardware were setting the result to 0.
6440      * Starting at ISA v3.1, the result for doubleword 1 is now defined
6441      * to be 0.
6442      */
6443     tcg_gen_st_i64(tcg_constant_i64(0), cpu_env, vsr64_offset(regno, false));
6444 }
6445 
6446 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
6447 {
6448     tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
6449 }
6450 
6451 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
6452 {
6453     tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
6454 }
6455 
6456 /*
6457  * Helpers for decodetree used by !function for decoding arguments.
6458  */
6459 static int times_2(DisasContext *ctx, int x)
6460 {
6461     return x * 2;
6462 }
6463 
6464 static int times_4(DisasContext *ctx, int x)
6465 {
6466     return x * 4;
6467 }
6468 
6469 static int times_16(DisasContext *ctx, int x)
6470 {
6471     return x * 16;
6472 }
6473 
6474 static int64_t dw_compose_ea(DisasContext *ctx, int x)
6475 {
6476     return deposit64(0xfffffffffffffe00, 3, 6, x);
6477 }
6478 
6479 /*
6480  * Helpers for trans_* functions to check for specific insns flags.
6481  * Use token pasting to ensure that we use the proper flag with the
6482  * proper variable.
6483  */
6484 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
6485     do {                                                \
6486         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
6487             return false;                               \
6488         }                                               \
6489     } while (0)
6490 
6491 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
6492     do {                                                \
6493         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
6494             return false;                               \
6495         }                                               \
6496     } while (0)
6497 
6498 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
6499 #if TARGET_LONG_BITS == 32
6500 # define REQUIRE_64BIT(CTX)  return false
6501 #else
6502 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
6503 #endif
6504 
6505 #define REQUIRE_VECTOR(CTX)                             \
6506     do {                                                \
6507         if (unlikely(!(CTX)->altivec_enabled)) {        \
6508             gen_exception((CTX), POWERPC_EXCP_VPU);     \
6509             return true;                                \
6510         }                                               \
6511     } while (0)
6512 
6513 #define REQUIRE_VSX(CTX)                                \
6514     do {                                                \
6515         if (unlikely(!(CTX)->vsx_enabled)) {            \
6516             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
6517             return true;                                \
6518         }                                               \
6519     } while (0)
6520 
6521 #define REQUIRE_FPU(ctx)                                \
6522     do {                                                \
6523         if (unlikely(!(ctx)->fpu_enabled)) {            \
6524             gen_exception((ctx), POWERPC_EXCP_FPU);     \
6525             return true;                                \
6526         }                                               \
6527     } while (0)
6528 
6529 #if !defined(CONFIG_USER_ONLY)
6530 #define REQUIRE_SV(CTX)             \
6531     do {                            \
6532         if (unlikely((CTX)->pr)) {  \
6533             gen_priv_opc(CTX);      \
6534             return true;            \
6535         }                           \
6536     } while (0)
6537 
6538 #define REQUIRE_HV(CTX)                             \
6539     do {                                            \
6540         if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
6541             gen_priv_opc(CTX);                      \
6542             return true;                            \
6543         }                                           \
6544     } while (0)
6545 #else
6546 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6547 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6548 #endif
6549 
6550 /*
6551  * Helpers for implementing sets of trans_* functions.
6552  * Defer the implementation of NAME to FUNC, with optional extra arguments.
6553  */
6554 #define TRANS(NAME, FUNC, ...) \
6555     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6556     { return FUNC(ctx, a, __VA_ARGS__); }
6557 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
6558     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6559     {                                                          \
6560         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
6561         return FUNC(ctx, a, __VA_ARGS__);                      \
6562     }
6563 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6564     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6565     {                                                          \
6566         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6567         return FUNC(ctx, a, __VA_ARGS__);                      \
6568     }
6569 
6570 #define TRANS64(NAME, FUNC, ...) \
6571     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6572     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
6573 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6574     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6575     {                                                          \
6576         REQUIRE_64BIT(ctx);                                    \
6577         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6578         return FUNC(ctx, a, __VA_ARGS__);                      \
6579     }
6580 
6581 /* TODO: More TRANS* helpers for extra insn_flags checks. */
6582 
6583 
6584 #include "decode-insn32.c.inc"
6585 #include "decode-insn64.c.inc"
6586 #include "power8-pmu-regs.c.inc"
6587 
6588 /*
6589  * Incorporate CIA into the constant when R=1.
6590  * Validate that when R=1, RA=0.
6591  */
6592 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
6593 {
6594     d->rt = a->rt;
6595     d->ra = a->ra;
6596     d->si = a->si;
6597     if (a->r) {
6598         if (unlikely(a->ra != 0)) {
6599             gen_invalid(ctx);
6600             return false;
6601         }
6602         d->si += ctx->cia;
6603     }
6604     return true;
6605 }
6606 
6607 #include "translate/fixedpoint-impl.c.inc"
6608 
6609 #include "translate/fp-impl.c.inc"
6610 
6611 #include "translate/vmx-impl.c.inc"
6612 
6613 #include "translate/vsx-impl.c.inc"
6614 
6615 #include "translate/dfp-impl.c.inc"
6616 
6617 #include "translate/spe-impl.c.inc"
6618 
6619 #include "translate/branch-impl.c.inc"
6620 
6621 #include "translate/processor-ctrl-impl.c.inc"
6622 
6623 #include "translate/storage-ctrl-impl.c.inc"
6624 
6625 /* Handles lfdp */
6626 static void gen_dform39(DisasContext *ctx)
6627 {
6628     if ((ctx->opcode & 0x3) == 0) {
6629         if (ctx->insns_flags2 & PPC2_ISA205) {
6630             return gen_lfdp(ctx);
6631         }
6632     }
6633     return gen_invalid(ctx);
6634 }
6635 
6636 /* Handles stfdp */
6637 static void gen_dform3D(DisasContext *ctx)
6638 {
6639     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
6640         /* stfdp */
6641         if (ctx->insns_flags2 & PPC2_ISA205) {
6642             return gen_stfdp(ctx);
6643         }
6644     }
6645     return gen_invalid(ctx);
6646 }
6647 
6648 #if defined(TARGET_PPC64)
6649 /* brd */
6650 static void gen_brd(DisasContext *ctx)
6651 {
6652     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6653 }
6654 
6655 /* brw */
6656 static void gen_brw(DisasContext *ctx)
6657 {
6658     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6659     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
6660 
6661 }
6662 
6663 /* brh */
6664 static void gen_brh(DisasContext *ctx)
6665 {
6666     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
6667     TCGv_i64 t1 = tcg_temp_new_i64();
6668     TCGv_i64 t2 = tcg_temp_new_i64();
6669 
6670     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
6671     tcg_gen_and_i64(t2, t1, mask);
6672     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
6673     tcg_gen_shli_i64(t1, t1, 8);
6674     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
6675 
6676     tcg_temp_free_i64(t1);
6677     tcg_temp_free_i64(t2);
6678 }
6679 #endif
6680 
6681 static opcode_t opcodes[] = {
6682 #if defined(TARGET_PPC64)
6683 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
6684 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
6685 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
6686 #endif
6687 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6688 #if defined(TARGET_PPC64)
6689 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6690 #endif
6691 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6692 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6693 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6694 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6695 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6696 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6697 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6698 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6699 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6700 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6701 #if defined(TARGET_PPC64)
6702 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6703 #endif
6704 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6705 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6706 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6707 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6708 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6709 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6710 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6711 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
6712 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6713 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
6714 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6715 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6716 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6717 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6718 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6719 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6720 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6721 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6722 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6723 #if defined(TARGET_PPC64)
6724 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6725 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6726 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6727 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6728 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6729 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6730 #endif
6731 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6732 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6733 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6734 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6735 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6736 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6737 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6738 #if defined(TARGET_PPC64)
6739 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6740 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6741 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6742 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6743 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6744 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6745                PPC_NONE, PPC2_ISA300),
6746 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6747                PPC_NONE, PPC2_ISA300),
6748 #endif
6749 /* handles lfdp, lxsd, lxssp */
6750 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6751 /* handles stfdp, stxsd, stxssp */
6752 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6753 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6754 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6755 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6756 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6757 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6758 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6759 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
6760 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6761 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6762 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6763 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6764 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
6765 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
6766 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6767 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6768 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6769 #if defined(TARGET_PPC64)
6770 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
6771 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
6772 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6773 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6774 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6775 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6776 #endif
6777 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6778 /* ISA v3.0 changed the extended opcode from 62 to 30 */
6779 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
6780 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
6781 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6782 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6783 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6784 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6785 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6786 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6787 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6788 #if defined(TARGET_PPC64)
6789 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6790 #if !defined(CONFIG_USER_ONLY)
6791 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6792 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6793 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6794 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
6795 #endif
6796 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6797 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6798 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6799 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6800 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6801 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6802 #endif
6803 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6804 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
6805 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
6806 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6807 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6808 #if defined(TARGET_PPC64)
6809 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6810 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6811 #endif
6812 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6813 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6814 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6815 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6816 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6817 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6818 #if defined(TARGET_PPC64)
6819 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6820 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6821 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
6822 #endif
6823 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6824 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6825 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6826 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6827 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6828 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6829 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6830 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6831 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6832 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6833 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6834 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6835 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6836 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6837 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6838 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
6839 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6840 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6841 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6842 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6843 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6844 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6845 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6846 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6847 #if defined(TARGET_PPC64)
6848 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6849 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6850              PPC_SEGMENT_64B),
6851 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6852 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6853              PPC_SEGMENT_64B),
6854 #endif
6855 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6856 /*
6857  * XXX Those instructions will need to be handled differently for
6858  * different ISA versions
6859  */
6860 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6861 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6862 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6863 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6864 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6865 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6866 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6867 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6868 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6869 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6870 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6871 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6872 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6873 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6874 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6875 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6876 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6877 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6878 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6879 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6880 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6881 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6882 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6883 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6884 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6885 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6886 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6887                PPC_NONE, PPC2_BOOKE206),
6888 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6889                PPC_NONE, PPC2_BOOKE206),
6890 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6891                PPC_NONE, PPC2_BOOKE206),
6892 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6893                PPC_NONE, PPC2_BOOKE206),
6894 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6895                PPC_NONE, PPC2_BOOKE206),
6896 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6897 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6898 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6899 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6900               PPC_BOOKE, PPC2_BOOKE206),
6901 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
6902 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6903                PPC_BOOKE, PPC2_BOOKE206),
6904 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
6905              PPC_440_SPEC),
6906 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6907 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6908 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6909 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6910 #if defined(TARGET_PPC64)
6911 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6912               PPC2_ISA300),
6913 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6914 #endif
6915 
6916 #undef GEN_INT_ARITH_ADD
6917 #undef GEN_INT_ARITH_ADD_CONST
6918 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov)         \
6919 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6920 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val,                        \
6921                                 add_ca, compute_ca, compute_ov)               \
6922 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6923 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6924 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6925 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6926 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6927 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
6928 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
6929 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
6930 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
6931 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
6932 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
6933 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
6934 
6935 #undef GEN_INT_ARITH_DIVW
6936 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
6937 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
6938 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
6939 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
6940 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
6941 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
6942 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6943 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6944 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6945 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6946 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6947 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6948 
6949 #if defined(TARGET_PPC64)
6950 #undef GEN_INT_ARITH_DIVD
6951 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
6952 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6953 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
6954 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
6955 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
6956 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
6957 
6958 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6959 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6960 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6961 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6962 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6963 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6964 
6965 #undef GEN_INT_ARITH_MUL_HELPER
6966 #define GEN_INT_ARITH_MUL_HELPER(name, opc3)                                  \
6967 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6968 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
6969 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
6970 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
6971 #endif
6972 
6973 #undef GEN_INT_ARITH_SUBF
6974 #undef GEN_INT_ARITH_SUBF_CONST
6975 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
6976 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
6977 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
6978                                 add_ca, compute_ca, compute_ov)               \
6979 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
6980 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
6981 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
6982 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
6983 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
6984 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
6985 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
6986 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
6987 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
6988 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
6989 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
6990 
6991 #undef GEN_LOGICAL1
6992 #undef GEN_LOGICAL2
6993 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
6994 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
6995 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
6996 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
6997 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
6998 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
6999 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
7000 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
7001 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
7002 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
7003 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
7004 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
7005 #if defined(TARGET_PPC64)
7006 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
7007 #endif
7008 
7009 #if defined(TARGET_PPC64)
7010 #undef GEN_PPC64_R2
7011 #undef GEN_PPC64_R4
7012 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
7013 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
7014 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
7015              PPC_64B)
7016 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
7017 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
7018 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
7019              PPC_64B),                                                        \
7020 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
7021              PPC_64B),                                                        \
7022 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
7023              PPC_64B)
7024 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
7025 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
7026 GEN_PPC64_R4(rldic, 0x1E, 0x04),
7027 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
7028 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
7029 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
7030 #endif
7031 
7032 #undef GEN_LDX_E
7033 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
7034 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
7035 
7036 #if defined(TARGET_PPC64)
7037 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
7038 
7039 /* HV/P7 and later only */
7040 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
7041 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
7042 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
7043 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
7044 #endif
7045 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
7046 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
7047 
7048 /* External PID based load */
7049 #undef GEN_LDEPX
7050 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
7051 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7052               0x00000001, PPC_NONE, PPC2_BOOKE206),
7053 
7054 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
7055 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
7056 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
7057 #if defined(TARGET_PPC64)
7058 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
7059 #endif
7060 
7061 #undef GEN_STX_E
7062 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
7063 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
7064 
7065 #if defined(TARGET_PPC64)
7066 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
7067 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
7068 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
7069 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
7070 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
7071 #endif
7072 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
7073 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
7074 
7075 #undef GEN_STEPX
7076 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
7077 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7078               0x00000001, PPC_NONE, PPC2_BOOKE206),
7079 
7080 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
7081 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
7082 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
7083 #if defined(TARGET_PPC64)
7084 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
7085 #endif
7086 
7087 #undef GEN_CRLOGIC
7088 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
7089 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
7090 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
7091 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
7092 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
7093 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
7094 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
7095 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
7096 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
7097 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
7098 
7099 #undef GEN_MAC_HANDLER
7100 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
7101 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
7102 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
7103 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
7104 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
7105 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
7106 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
7107 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
7108 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
7109 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
7110 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
7111 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
7112 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
7113 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
7114 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
7115 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
7116 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
7117 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
7118 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
7119 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
7120 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
7121 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
7122 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
7123 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
7124 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
7125 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
7126 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
7127 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
7128 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
7129 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
7130 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
7131 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
7132 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
7133 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
7134 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
7135 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
7136 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
7137 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
7138 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
7139 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
7140 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
7141 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
7142 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
7143 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
7144 
7145 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
7146                PPC_NONE, PPC2_TM),
7147 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
7148                PPC_NONE, PPC2_TM),
7149 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
7150                PPC_NONE, PPC2_TM),
7151 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
7152                PPC_NONE, PPC2_TM),
7153 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
7154                PPC_NONE, PPC2_TM),
7155 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
7156                PPC_NONE, PPC2_TM),
7157 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
7158                PPC_NONE, PPC2_TM),
7159 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
7160                PPC_NONE, PPC2_TM),
7161 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
7162                PPC_NONE, PPC2_TM),
7163 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
7164                PPC_NONE, PPC2_TM),
7165 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
7166                PPC_NONE, PPC2_TM),
7167 
7168 #include "translate/fp-ops.c.inc"
7169 
7170 #include "translate/vmx-ops.c.inc"
7171 
7172 #include "translate/vsx-ops.c.inc"
7173 
7174 #include "translate/spe-ops.c.inc"
7175 };
7176 
7177 /*****************************************************************************/
7178 /* Opcode types */
7179 enum {
7180     PPC_DIRECT   = 0, /* Opcode routine        */
7181     PPC_INDIRECT = 1, /* Indirect opcode table */
7182 };
7183 
7184 #define PPC_OPCODE_MASK 0x3
7185 
7186 static inline int is_indirect_opcode(void *handler)
7187 {
7188     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
7189 }
7190 
7191 static inline opc_handler_t **ind_table(void *handler)
7192 {
7193     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
7194 }
7195 
7196 /* Instruction table creation */
7197 /* Opcodes tables creation */
7198 static void fill_new_table(opc_handler_t **table, int len)
7199 {
7200     int i;
7201 
7202     for (i = 0; i < len; i++) {
7203         table[i] = &invalid_handler;
7204     }
7205 }
7206 
7207 static int create_new_table(opc_handler_t **table, unsigned char idx)
7208 {
7209     opc_handler_t **tmp;
7210 
7211     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
7212     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
7213     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
7214 
7215     return 0;
7216 }
7217 
7218 static int insert_in_table(opc_handler_t **table, unsigned char idx,
7219                             opc_handler_t *handler)
7220 {
7221     if (table[idx] != &invalid_handler) {
7222         return -1;
7223     }
7224     table[idx] = handler;
7225 
7226     return 0;
7227 }
7228 
7229 static int register_direct_insn(opc_handler_t **ppc_opcodes,
7230                                 unsigned char idx, opc_handler_t *handler)
7231 {
7232     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
7233         printf("*** ERROR: opcode %02x already assigned in main "
7234                "opcode table\n", idx);
7235         return -1;
7236     }
7237 
7238     return 0;
7239 }
7240 
7241 static int register_ind_in_table(opc_handler_t **table,
7242                                  unsigned char idx1, unsigned char idx2,
7243                                  opc_handler_t *handler)
7244 {
7245     if (table[idx1] == &invalid_handler) {
7246         if (create_new_table(table, idx1) < 0) {
7247             printf("*** ERROR: unable to create indirect table "
7248                    "idx=%02x\n", idx1);
7249             return -1;
7250         }
7251     } else {
7252         if (!is_indirect_opcode(table[idx1])) {
7253             printf("*** ERROR: idx %02x already assigned to a direct "
7254                    "opcode\n", idx1);
7255             return -1;
7256         }
7257     }
7258     if (handler != NULL &&
7259         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
7260         printf("*** ERROR: opcode %02x already assigned in "
7261                "opcode table %02x\n", idx2, idx1);
7262         return -1;
7263     }
7264 
7265     return 0;
7266 }
7267 
7268 static int register_ind_insn(opc_handler_t **ppc_opcodes,
7269                              unsigned char idx1, unsigned char idx2,
7270                              opc_handler_t *handler)
7271 {
7272     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
7273 }
7274 
7275 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
7276                                 unsigned char idx1, unsigned char idx2,
7277                                 unsigned char idx3, opc_handler_t *handler)
7278 {
7279     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7280         printf("*** ERROR: unable to join indirect table idx "
7281                "[%02x-%02x]\n", idx1, idx2);
7282         return -1;
7283     }
7284     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
7285                               handler) < 0) {
7286         printf("*** ERROR: unable to insert opcode "
7287                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7288         return -1;
7289     }
7290 
7291     return 0;
7292 }
7293 
7294 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
7295                                  unsigned char idx1, unsigned char idx2,
7296                                  unsigned char idx3, unsigned char idx4,
7297                                  opc_handler_t *handler)
7298 {
7299     opc_handler_t **table;
7300 
7301     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7302         printf("*** ERROR: unable to join indirect table idx "
7303                "[%02x-%02x]\n", idx1, idx2);
7304         return -1;
7305     }
7306     table = ind_table(ppc_opcodes[idx1]);
7307     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
7308         printf("*** ERROR: unable to join 2nd-level indirect table idx "
7309                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7310         return -1;
7311     }
7312     table = ind_table(table[idx2]);
7313     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
7314         printf("*** ERROR: unable to insert opcode "
7315                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
7316         return -1;
7317     }
7318     return 0;
7319 }
7320 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
7321 {
7322     if (insn->opc2 != 0xFF) {
7323         if (insn->opc3 != 0xFF) {
7324             if (insn->opc4 != 0xFF) {
7325                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7326                                           insn->opc3, insn->opc4,
7327                                           &insn->handler) < 0) {
7328                     return -1;
7329                 }
7330             } else {
7331                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7332                                          insn->opc3, &insn->handler) < 0) {
7333                     return -1;
7334                 }
7335             }
7336         } else {
7337             if (register_ind_insn(ppc_opcodes, insn->opc1,
7338                                   insn->opc2, &insn->handler) < 0) {
7339                 return -1;
7340             }
7341         }
7342     } else {
7343         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
7344             return -1;
7345         }
7346     }
7347 
7348     return 0;
7349 }
7350 
7351 static int test_opcode_table(opc_handler_t **table, int len)
7352 {
7353     int i, count, tmp;
7354 
7355     for (i = 0, count = 0; i < len; i++) {
7356         /* Consistency fixup */
7357         if (table[i] == NULL) {
7358             table[i] = &invalid_handler;
7359         }
7360         if (table[i] != &invalid_handler) {
7361             if (is_indirect_opcode(table[i])) {
7362                 tmp = test_opcode_table(ind_table(table[i]),
7363                     PPC_CPU_INDIRECT_OPCODES_LEN);
7364                 if (tmp == 0) {
7365                     free(table[i]);
7366                     table[i] = &invalid_handler;
7367                 } else {
7368                     count++;
7369                 }
7370             } else {
7371                 count++;
7372             }
7373         }
7374     }
7375 
7376     return count;
7377 }
7378 
7379 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
7380 {
7381     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
7382         printf("*** WARNING: no opcode defined !\n");
7383     }
7384 }
7385 
7386 /*****************************************************************************/
7387 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
7388 {
7389     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
7390     opcode_t *opc;
7391 
7392     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
7393     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
7394         if (((opc->handler.type & pcc->insns_flags) != 0) ||
7395             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
7396             if (register_insn(cpu->opcodes, opc) < 0) {
7397                 error_setg(errp, "ERROR initializing PowerPC instruction "
7398                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
7399                            opc->opc3);
7400                 return;
7401             }
7402         }
7403     }
7404     fix_opcode_tables(cpu->opcodes);
7405     fflush(stdout);
7406     fflush(stderr);
7407 }
7408 
7409 void destroy_ppc_opcodes(PowerPCCPU *cpu)
7410 {
7411     opc_handler_t **table, **table_2;
7412     int i, j, k;
7413 
7414     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
7415         if (cpu->opcodes[i] == &invalid_handler) {
7416             continue;
7417         }
7418         if (is_indirect_opcode(cpu->opcodes[i])) {
7419             table = ind_table(cpu->opcodes[i]);
7420             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
7421                 if (table[j] == &invalid_handler) {
7422                     continue;
7423                 }
7424                 if (is_indirect_opcode(table[j])) {
7425                     table_2 = ind_table(table[j]);
7426                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
7427                         if (table_2[k] != &invalid_handler &&
7428                             is_indirect_opcode(table_2[k])) {
7429                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
7430                                                      ~PPC_INDIRECT));
7431                         }
7432                     }
7433                     g_free((opc_handler_t *)((uintptr_t)table[j] &
7434                                              ~PPC_INDIRECT));
7435                 }
7436             }
7437             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
7438                 ~PPC_INDIRECT));
7439         }
7440     }
7441 }
7442 
7443 int ppc_fixup_cpu(PowerPCCPU *cpu)
7444 {
7445     CPUPPCState *env = &cpu->env;
7446 
7447     /*
7448      * TCG doesn't (yet) emulate some groups of instructions that are
7449      * implemented on some otherwise supported CPUs (e.g. VSX and
7450      * decimal floating point instructions on POWER7).  We remove
7451      * unsupported instruction groups from the cpu state's instruction
7452      * masks and hope the guest can cope.  For at least the pseries
7453      * machine, the unavailability of these instructions can be
7454      * advertised to the guest via the device tree.
7455      */
7456     if ((env->insns_flags & ~PPC_TCG_INSNS)
7457         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
7458         warn_report("Disabling some instructions which are not "
7459                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
7460                     env->insns_flags & ~PPC_TCG_INSNS,
7461                     env->insns_flags2 & ~PPC_TCG_INSNS2);
7462     }
7463     env->insns_flags &= PPC_TCG_INSNS;
7464     env->insns_flags2 &= PPC_TCG_INSNS2;
7465     return 0;
7466 }
7467 
7468 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
7469 {
7470     opc_handler_t **table, *handler;
7471     uint32_t inval;
7472 
7473     ctx->opcode = insn;
7474 
7475     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7476               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7477               ctx->le_mode ? "little" : "big");
7478 
7479     table = cpu->opcodes;
7480     handler = table[opc1(insn)];
7481     if (is_indirect_opcode(handler)) {
7482         table = ind_table(handler);
7483         handler = table[opc2(insn)];
7484         if (is_indirect_opcode(handler)) {
7485             table = ind_table(handler);
7486             handler = table[opc3(insn)];
7487             if (is_indirect_opcode(handler)) {
7488                 table = ind_table(handler);
7489                 handler = table[opc4(insn)];
7490             }
7491         }
7492     }
7493 
7494     /* Is opcode *REALLY* valid ? */
7495     if (unlikely(handler->handler == &gen_invalid)) {
7496         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7497                       "%02x - %02x - %02x - %02x (%08x) "
7498                       TARGET_FMT_lx "\n",
7499                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7500                       insn, ctx->cia);
7501         return false;
7502     }
7503 
7504     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
7505                  && Rc(insn))) {
7506         inval = handler->inval2;
7507     } else {
7508         inval = handler->inval1;
7509     }
7510 
7511     if (unlikely((insn & inval) != 0)) {
7512         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7513                       "%02x - %02x - %02x - %02x (%08x) "
7514                       TARGET_FMT_lx "\n", insn & inval,
7515                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7516                       insn, ctx->cia);
7517         return false;
7518     }
7519 
7520     handler->handler(ctx);
7521     return true;
7522 }
7523 
7524 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
7525 {
7526     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7527     CPUPPCState *env = cs->env_ptr;
7528     uint32_t hflags = ctx->base.tb->flags;
7529 
7530     ctx->spr_cb = env->spr_cb;
7531     ctx->pr = (hflags >> HFLAGS_PR) & 1;
7532     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
7533     ctx->dr = (hflags >> HFLAGS_DR) & 1;
7534     ctx->hv = (hflags >> HFLAGS_HV) & 1;
7535     ctx->insns_flags = env->insns_flags;
7536     ctx->insns_flags2 = env->insns_flags2;
7537     ctx->access_type = -1;
7538     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
7539     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
7540     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
7541     ctx->flags = env->flags;
7542 #if defined(TARGET_PPC64)
7543     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
7544     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7545 #endif
7546     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
7547         || env->mmu_model & POWERPC_MMU_64;
7548 
7549     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
7550     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
7551     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
7552     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
7553     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
7554     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
7555     ctx->hr = (hflags >> HFLAGS_HR) & 1;
7556     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
7557     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
7558     ctx->mmcr0_pmcjce = (hflags >> HFLAGS_PMCJCE) & 1;
7559     ctx->pmc_other = (hflags >> HFLAGS_PMC_OTHER) & 1;
7560     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
7561 
7562     ctx->singlestep_enabled = 0;
7563     if ((hflags >> HFLAGS_SE) & 1) {
7564         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
7565         ctx->base.max_insns = 1;
7566     }
7567     if ((hflags >> HFLAGS_BE) & 1) {
7568         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
7569     }
7570 }
7571 
7572 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
7573 {
7574 }
7575 
7576 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
7577 {
7578     tcg_gen_insn_start(dcbase->pc_next);
7579 }
7580 
7581 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
7582 {
7583     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
7584     return opc1(insn) == 1;
7585 }
7586 
7587 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
7588 {
7589     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7590     PowerPCCPU *cpu = POWERPC_CPU(cs);
7591     CPUPPCState *env = cs->env_ptr;
7592     target_ulong pc;
7593     uint32_t insn;
7594     bool ok;
7595 
7596     LOG_DISAS("----------------\n");
7597     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7598               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
7599 
7600     ctx->cia = pc = ctx->base.pc_next;
7601     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
7602     ctx->base.pc_next = pc += 4;
7603 
7604     if (!is_prefix_insn(ctx, insn)) {
7605         ok = (decode_insn32(ctx, insn) ||
7606               decode_legacy(cpu, ctx, insn));
7607     } else if ((pc & 63) == 0) {
7608         /*
7609          * Power v3.1, section 1.9 Exceptions:
7610          * attempt to execute a prefixed instruction that crosses a
7611          * 64-byte address boundary (system alignment error).
7612          */
7613         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
7614         ok = true;
7615     } else {
7616         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
7617                                              need_byteswap(ctx));
7618         ctx->base.pc_next = pc += 4;
7619         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
7620     }
7621     if (!ok) {
7622         gen_invalid(ctx);
7623     }
7624 
7625     /* End the TB when crossing a page boundary. */
7626     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
7627         ctx->base.is_jmp = DISAS_TOO_MANY;
7628     }
7629 
7630     translator_loop_temp_check(&ctx->base);
7631 }
7632 
7633 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
7634 {
7635     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7636     DisasJumpType is_jmp = ctx->base.is_jmp;
7637     target_ulong nip = ctx->base.pc_next;
7638 
7639     if (is_jmp == DISAS_NORETURN) {
7640         /* We have already exited the TB. */
7641         return;
7642     }
7643 
7644     /* Honor single stepping. */
7645     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)
7646         && (nip <= 0x100 || nip > 0xf00)) {
7647         switch (is_jmp) {
7648         case DISAS_TOO_MANY:
7649         case DISAS_EXIT_UPDATE:
7650         case DISAS_CHAIN_UPDATE:
7651             gen_update_nip(ctx, nip);
7652             break;
7653         case DISAS_EXIT:
7654         case DISAS_CHAIN:
7655             break;
7656         default:
7657             g_assert_not_reached();
7658         }
7659 
7660         gen_debug_exception(ctx);
7661         return;
7662     }
7663 
7664     switch (is_jmp) {
7665     case DISAS_TOO_MANY:
7666         if (use_goto_tb(ctx, nip)) {
7667             pmu_count_insns(ctx);
7668             tcg_gen_goto_tb(0);
7669             gen_update_nip(ctx, nip);
7670             tcg_gen_exit_tb(ctx->base.tb, 0);
7671             break;
7672         }
7673         /* fall through */
7674     case DISAS_CHAIN_UPDATE:
7675         gen_update_nip(ctx, nip);
7676         /* fall through */
7677     case DISAS_CHAIN:
7678         /*
7679          * tcg_gen_lookup_and_goto_ptr will exit the TB if
7680          * CF_NO_GOTO_PTR is set. Count insns now.
7681          */
7682         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
7683             pmu_count_insns(ctx);
7684         }
7685 
7686         tcg_gen_lookup_and_goto_ptr();
7687         break;
7688 
7689     case DISAS_EXIT_UPDATE:
7690         gen_update_nip(ctx, nip);
7691         /* fall through */
7692     case DISAS_EXIT:
7693         pmu_count_insns(ctx);
7694         tcg_gen_exit_tb(NULL, 0);
7695         break;
7696 
7697     default:
7698         g_assert_not_reached();
7699     }
7700 }
7701 
7702 static void ppc_tr_disas_log(const DisasContextBase *dcbase,
7703                              CPUState *cs, FILE *logfile)
7704 {
7705     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
7706     target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size);
7707 }
7708 
7709 static const TranslatorOps ppc_tr_ops = {
7710     .init_disas_context = ppc_tr_init_disas_context,
7711     .tb_start           = ppc_tr_tb_start,
7712     .insn_start         = ppc_tr_insn_start,
7713     .translate_insn     = ppc_tr_translate_insn,
7714     .tb_stop            = ppc_tr_tb_stop,
7715     .disas_log          = ppc_tr_disas_log,
7716 };
7717 
7718 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns,
7719                            target_ulong pc, void *host_pc)
7720 {
7721     DisasContext ctx;
7722 
7723     translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
7724 }
7725