xref: /openbmc/qemu/target/ppc/translate.c (revision 5df022cf)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31 
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34 
35 #include "exec/translator.h"
36 #include "exec/log.h"
37 #include "qemu/atomic128.h"
38 #include "spr_common.h"
39 
40 #include "qemu/qemu-print.h"
41 #include "qapi/error.h"
42 
43 #define CPU_SINGLE_STEP 0x1
44 #define CPU_BRANCH_STEP 0x2
45 
46 /* Include definitions for instructions classes and implementations flags */
47 /* #define PPC_DEBUG_DISAS */
48 
49 #ifdef PPC_DEBUG_DISAS
50 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
51 #else
52 #  define LOG_DISAS(...) do { } while (0)
53 #endif
54 /*****************************************************************************/
55 /* Code translation helpers                                                  */
56 
57 /* global register indexes */
58 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
59                           + 10 * 4 + 22 * 5 /* SPE GPRh */
60                           + 8 * 5           /* CRF */];
61 static TCGv cpu_gpr[32];
62 static TCGv cpu_gprh[32];
63 static TCGv_i32 cpu_crf[8];
64 static TCGv cpu_nip;
65 static TCGv cpu_msr;
66 static TCGv cpu_ctr;
67 static TCGv cpu_lr;
68 #if defined(TARGET_PPC64)
69 static TCGv cpu_cfar;
70 #endif
71 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
72 static TCGv cpu_reserve;
73 static TCGv cpu_reserve_val;
74 static TCGv cpu_fpscr;
75 static TCGv_i32 cpu_access_type;
76 
77 #include "exec/gen-icount.h"
78 
79 void ppc_translate_init(void)
80 {
81     int i;
82     char *p;
83     size_t cpu_reg_names_size;
84 
85     p = cpu_reg_names;
86     cpu_reg_names_size = sizeof(cpu_reg_names);
87 
88     for (i = 0; i < 8; i++) {
89         snprintf(p, cpu_reg_names_size, "crf%d", i);
90         cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
91                                             offsetof(CPUPPCState, crf[i]), p);
92         p += 5;
93         cpu_reg_names_size -= 5;
94     }
95 
96     for (i = 0; i < 32; i++) {
97         snprintf(p, cpu_reg_names_size, "r%d", i);
98         cpu_gpr[i] = tcg_global_mem_new(cpu_env,
99                                         offsetof(CPUPPCState, gpr[i]), p);
100         p += (i < 10) ? 3 : 4;
101         cpu_reg_names_size -= (i < 10) ? 3 : 4;
102         snprintf(p, cpu_reg_names_size, "r%dH", i);
103         cpu_gprh[i] = tcg_global_mem_new(cpu_env,
104                                          offsetof(CPUPPCState, gprh[i]), p);
105         p += (i < 10) ? 4 : 5;
106         cpu_reg_names_size -= (i < 10) ? 4 : 5;
107     }
108 
109     cpu_nip = tcg_global_mem_new(cpu_env,
110                                  offsetof(CPUPPCState, nip), "nip");
111 
112     cpu_msr = tcg_global_mem_new(cpu_env,
113                                  offsetof(CPUPPCState, msr), "msr");
114 
115     cpu_ctr = tcg_global_mem_new(cpu_env,
116                                  offsetof(CPUPPCState, ctr), "ctr");
117 
118     cpu_lr = tcg_global_mem_new(cpu_env,
119                                 offsetof(CPUPPCState, lr), "lr");
120 
121 #if defined(TARGET_PPC64)
122     cpu_cfar = tcg_global_mem_new(cpu_env,
123                                   offsetof(CPUPPCState, cfar), "cfar");
124 #endif
125 
126     cpu_xer = tcg_global_mem_new(cpu_env,
127                                  offsetof(CPUPPCState, xer), "xer");
128     cpu_so = tcg_global_mem_new(cpu_env,
129                                 offsetof(CPUPPCState, so), "SO");
130     cpu_ov = tcg_global_mem_new(cpu_env,
131                                 offsetof(CPUPPCState, ov), "OV");
132     cpu_ca = tcg_global_mem_new(cpu_env,
133                                 offsetof(CPUPPCState, ca), "CA");
134     cpu_ov32 = tcg_global_mem_new(cpu_env,
135                                   offsetof(CPUPPCState, ov32), "OV32");
136     cpu_ca32 = tcg_global_mem_new(cpu_env,
137                                   offsetof(CPUPPCState, ca32), "CA32");
138 
139     cpu_reserve = tcg_global_mem_new(cpu_env,
140                                      offsetof(CPUPPCState, reserve_addr),
141                                      "reserve_addr");
142     cpu_reserve_val = tcg_global_mem_new(cpu_env,
143                                      offsetof(CPUPPCState, reserve_val),
144                                      "reserve_val");
145 
146     cpu_fpscr = tcg_global_mem_new(cpu_env,
147                                    offsetof(CPUPPCState, fpscr), "fpscr");
148 
149     cpu_access_type = tcg_global_mem_new_i32(cpu_env,
150                                              offsetof(CPUPPCState, access_type),
151                                              "access_type");
152 }
153 
154 /* internal defines */
155 struct DisasContext {
156     DisasContextBase base;
157     target_ulong cia;  /* current instruction address */
158     uint32_t opcode;
159     /* Routine used to access memory */
160     bool pr, hv, dr, le_mode;
161     bool lazy_tlb_flush;
162     bool need_access_type;
163     int mem_idx;
164     int access_type;
165     /* Translation flags */
166     MemOp default_tcg_memop_mask;
167 #if defined(TARGET_PPC64)
168     bool sf_mode;
169     bool has_cfar;
170 #endif
171     bool fpu_enabled;
172     bool altivec_enabled;
173     bool vsx_enabled;
174     bool spe_enabled;
175     bool tm_enabled;
176     bool gtse;
177     bool hr;
178     bool mmcr0_pmcc0;
179     bool mmcr0_pmcc1;
180     bool pmu_insn_cnt;
181     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
182     int singlestep_enabled;
183     uint32_t flags;
184     uint64_t insns_flags;
185     uint64_t insns_flags2;
186 };
187 
188 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
189 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
190 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
191 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
192 
193 /* Return true iff byteswap is needed in a scalar memop */
194 static inline bool need_byteswap(const DisasContext *ctx)
195 {
196 #if defined(TARGET_WORDS_BIGENDIAN)
197      return ctx->le_mode;
198 #else
199      return !ctx->le_mode;
200 #endif
201 }
202 
203 /* True when active word size < size of target_long.  */
204 #ifdef TARGET_PPC64
205 # define NARROW_MODE(C)  (!(C)->sf_mode)
206 #else
207 # define NARROW_MODE(C)  0
208 #endif
209 
210 struct opc_handler_t {
211     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
212     uint32_t inval1;
213     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
214     uint32_t inval2;
215     /* instruction type */
216     uint64_t type;
217     /* extended instruction type */
218     uint64_t type2;
219     /* handler */
220     void (*handler)(DisasContext *ctx);
221 };
222 
223 /* SPR load/store helpers */
224 static inline void gen_load_spr(TCGv t, int reg)
225 {
226     tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
227 }
228 
229 static inline void gen_store_spr(int reg, TCGv t)
230 {
231     tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
232 }
233 
234 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
235 {
236     if (ctx->need_access_type && ctx->access_type != access_type) {
237         tcg_gen_movi_i32(cpu_access_type, access_type);
238         ctx->access_type = access_type;
239     }
240 }
241 
242 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
243 {
244     if (NARROW_MODE(ctx)) {
245         nip = (uint32_t)nip;
246     }
247     tcg_gen_movi_tl(cpu_nip, nip);
248 }
249 
250 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
251 {
252     TCGv_i32 t0, t1;
253 
254     /*
255      * These are all synchronous exceptions, we set the PC back to the
256      * faulting instruction
257      */
258     gen_update_nip(ctx, ctx->cia);
259     t0 = tcg_const_i32(excp);
260     t1 = tcg_const_i32(error);
261     gen_helper_raise_exception_err(cpu_env, t0, t1);
262     tcg_temp_free_i32(t0);
263     tcg_temp_free_i32(t1);
264     ctx->base.is_jmp = DISAS_NORETURN;
265 }
266 
267 static void gen_exception(DisasContext *ctx, uint32_t excp)
268 {
269     TCGv_i32 t0;
270 
271     /*
272      * These are all synchronous exceptions, we set the PC back to the
273      * faulting instruction
274      */
275     gen_update_nip(ctx, ctx->cia);
276     t0 = tcg_const_i32(excp);
277     gen_helper_raise_exception(cpu_env, t0);
278     tcg_temp_free_i32(t0);
279     ctx->base.is_jmp = DISAS_NORETURN;
280 }
281 
282 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
283                               target_ulong nip)
284 {
285     TCGv_i32 t0;
286 
287     gen_update_nip(ctx, nip);
288     t0 = tcg_const_i32(excp);
289     gen_helper_raise_exception(cpu_env, t0);
290     tcg_temp_free_i32(t0);
291     ctx->base.is_jmp = DISAS_NORETURN;
292 }
293 
294 static void gen_icount_io_start(DisasContext *ctx)
295 {
296     if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
297         gen_io_start();
298         /*
299          * An I/O instruction must be last in the TB.
300          * Chain to the next TB, and let the code from gen_tb_start
301          * decide if we need to return to the main loop.
302          * Doing this first also allows this value to be overridden.
303          */
304         ctx->base.is_jmp = DISAS_TOO_MANY;
305     }
306 }
307 
308 /*
309  * Tells the caller what is the appropriate exception to generate and prepares
310  * SPR registers for this exception.
311  *
312  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
313  * POWERPC_EXCP_DEBUG (on BookE).
314  */
315 static uint32_t gen_prep_dbgex(DisasContext *ctx)
316 {
317     if (ctx->flags & POWERPC_FLAG_DE) {
318         target_ulong dbsr = 0;
319         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
320             dbsr = DBCR0_ICMP;
321         } else {
322             /* Must have been branch */
323             dbsr = DBCR0_BRT;
324         }
325         TCGv t0 = tcg_temp_new();
326         gen_load_spr(t0, SPR_BOOKE_DBSR);
327         tcg_gen_ori_tl(t0, t0, dbsr);
328         gen_store_spr(SPR_BOOKE_DBSR, t0);
329         tcg_temp_free(t0);
330         return POWERPC_EXCP_DEBUG;
331     } else {
332         return POWERPC_EXCP_TRACE;
333     }
334 }
335 
336 static void gen_debug_exception(DisasContext *ctx)
337 {
338     gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx)));
339     ctx->base.is_jmp = DISAS_NORETURN;
340 }
341 
342 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
343 {
344     /* Will be converted to program check if needed */
345     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
346 }
347 
348 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
349 {
350     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
351 }
352 
353 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
354 {
355     /* Will be converted to program check if needed */
356     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
357 }
358 
359 /*****************************************************************************/
360 /* SPR READ/WRITE CALLBACKS */
361 
362 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
363 {
364 #if 0
365     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
366     printf("ERROR: try to access SPR %d !\n", sprn);
367 #endif
368 }
369 
370 /* #define PPC_DUMP_SPR_ACCESSES */
371 
372 /*
373  * Generic callbacks:
374  * do nothing but store/retrieve spr value
375  */
376 static void spr_load_dump_spr(int sprn)
377 {
378 #ifdef PPC_DUMP_SPR_ACCESSES
379     TCGv_i32 t0 = tcg_const_i32(sprn);
380     gen_helper_load_dump_spr(cpu_env, t0);
381     tcg_temp_free_i32(t0);
382 #endif
383 }
384 
385 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
386 {
387     gen_load_spr(cpu_gpr[gprn], sprn);
388     spr_load_dump_spr(sprn);
389 }
390 
391 static void spr_store_dump_spr(int sprn)
392 {
393 #ifdef PPC_DUMP_SPR_ACCESSES
394     TCGv_i32 t0 = tcg_const_i32(sprn);
395     gen_helper_store_dump_spr(cpu_env, t0);
396     tcg_temp_free_i32(t0);
397 #endif
398 }
399 
400 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
401 {
402     gen_store_spr(sprn, cpu_gpr[gprn]);
403     spr_store_dump_spr(sprn);
404 }
405 
406 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
407 {
408     spr_write_generic(ctx, sprn, gprn);
409 
410     /*
411      * SPR_CTRL writes must force a new translation block,
412      * allowing the PMU to calculate the run latch events with
413      * more accuracy.
414      */
415     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
416 }
417 
418 #if !defined(CONFIG_USER_ONLY)
419 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
420 {
421 #ifdef TARGET_PPC64
422     TCGv t0 = tcg_temp_new();
423     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
424     gen_store_spr(sprn, t0);
425     tcg_temp_free(t0);
426     spr_store_dump_spr(sprn);
427 #else
428     spr_write_generic(ctx, sprn, gprn);
429 #endif
430 }
431 
432 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
433 {
434     TCGv t0 = tcg_temp_new();
435     TCGv t1 = tcg_temp_new();
436     gen_load_spr(t0, sprn);
437     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
438     tcg_gen_and_tl(t0, t0, t1);
439     gen_store_spr(sprn, t0);
440     tcg_temp_free(t0);
441     tcg_temp_free(t1);
442 }
443 
444 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
445 {
446 }
447 
448 #endif
449 
450 /* SPR common to all PowerPC */
451 /* XER */
452 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
453 {
454     TCGv dst = cpu_gpr[gprn];
455     TCGv t0 = tcg_temp_new();
456     TCGv t1 = tcg_temp_new();
457     TCGv t2 = tcg_temp_new();
458     tcg_gen_mov_tl(dst, cpu_xer);
459     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
460     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
461     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
462     tcg_gen_or_tl(t0, t0, t1);
463     tcg_gen_or_tl(dst, dst, t2);
464     tcg_gen_or_tl(dst, dst, t0);
465     if (is_isa300(ctx)) {
466         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
467         tcg_gen_or_tl(dst, dst, t0);
468         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
469         tcg_gen_or_tl(dst, dst, t0);
470     }
471     tcg_temp_free(t0);
472     tcg_temp_free(t1);
473     tcg_temp_free(t2);
474 }
475 
476 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
477 {
478     TCGv src = cpu_gpr[gprn];
479     /* Write all flags, while reading back check for isa300 */
480     tcg_gen_andi_tl(cpu_xer, src,
481                     ~((1u << XER_SO) |
482                       (1u << XER_OV) | (1u << XER_OV32) |
483                       (1u << XER_CA) | (1u << XER_CA32)));
484     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
485     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
486     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
487     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
488     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
489 }
490 
491 /* LR */
492 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
493 {
494     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
495 }
496 
497 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
498 {
499     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
500 }
501 
502 /* CFAR */
503 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
504 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
505 {
506     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
507 }
508 
509 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
510 {
511     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
512 }
513 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
514 
515 /* CTR */
516 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
517 {
518     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
519 }
520 
521 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
522 {
523     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
524 }
525 
526 /* User read access to SPR */
527 /* USPRx */
528 /* UMMCRx */
529 /* UPMCx */
530 /* USIA */
531 /* UDECR */
532 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
533 {
534     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
535 }
536 
537 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
538 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
539 {
540     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
541 }
542 #endif
543 
544 /* SPR common to all non-embedded PowerPC */
545 /* DECR */
546 #if !defined(CONFIG_USER_ONLY)
547 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
548 {
549     gen_icount_io_start(ctx);
550     gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
551 }
552 
553 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
554 {
555     gen_icount_io_start(ctx);
556     gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
557 }
558 #endif
559 
560 /* SPR common to all non-embedded PowerPC, except 601 */
561 /* Time base */
562 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
563 {
564     gen_icount_io_start(ctx);
565     gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
566 }
567 
568 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
569 {
570     gen_icount_io_start(ctx);
571     gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
572 }
573 
574 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
575 {
576     gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
577 }
578 
579 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
580 {
581     gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
582 }
583 
584 #if !defined(CONFIG_USER_ONLY)
585 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
586 {
587     gen_icount_io_start(ctx);
588     gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
589 }
590 
591 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
592 {
593     gen_icount_io_start(ctx);
594     gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
595 }
596 
597 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
598 {
599     gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
600 }
601 
602 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
603 {
604     gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
605 }
606 
607 #if defined(TARGET_PPC64)
608 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
609 {
610     gen_icount_io_start(ctx);
611     gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
612 }
613 
614 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
615 {
616     gen_icount_io_start(ctx);
617     gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
618 }
619 
620 /* HDECR */
621 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
622 {
623     gen_icount_io_start(ctx);
624     gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
625 }
626 
627 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
628 {
629     gen_icount_io_start(ctx);
630     gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
631 }
632 
633 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
634 {
635     gen_icount_io_start(ctx);
636     gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
637 }
638 
639 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
640 {
641     gen_icount_io_start(ctx);
642     gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
643 }
644 
645 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
646 {
647     gen_icount_io_start(ctx);
648     gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
649 }
650 
651 #endif
652 #endif
653 
654 #if !defined(CONFIG_USER_ONLY)
655 /* IBAT0U...IBAT0U */
656 /* IBAT0L...IBAT7L */
657 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
658 {
659     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
660                   offsetof(CPUPPCState,
661                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
662 }
663 
664 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
665 {
666     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
667                   offsetof(CPUPPCState,
668                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
669 }
670 
671 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
672 {
673     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
674     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
675     tcg_temp_free_i32(t0);
676 }
677 
678 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
679 {
680     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
681     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
682     tcg_temp_free_i32(t0);
683 }
684 
685 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
686 {
687     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
688     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
689     tcg_temp_free_i32(t0);
690 }
691 
692 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
693 {
694     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
695     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
696     tcg_temp_free_i32(t0);
697 }
698 
699 /* DBAT0U...DBAT7U */
700 /* DBAT0L...DBAT7L */
701 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
702 {
703     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
704                   offsetof(CPUPPCState,
705                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
706 }
707 
708 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
709 {
710     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
711                   offsetof(CPUPPCState,
712                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
713 }
714 
715 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
716 {
717     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
718     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
719     tcg_temp_free_i32(t0);
720 }
721 
722 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
723 {
724     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
725     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
726     tcg_temp_free_i32(t0);
727 }
728 
729 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
730 {
731     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
732     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
733     tcg_temp_free_i32(t0);
734 }
735 
736 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
737 {
738     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
739     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
740     tcg_temp_free_i32(t0);
741 }
742 
743 /* SDR1 */
744 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
745 {
746     gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
747 }
748 
749 #if defined(TARGET_PPC64)
750 /* 64 bits PowerPC specific SPRs */
751 /* PIDR */
752 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
753 {
754     gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
755 }
756 
757 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
758 {
759     gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
760 }
761 
762 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
763 {
764     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
765 }
766 
767 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
768 {
769     TCGv t0 = tcg_temp_new();
770     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
771     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
772     tcg_temp_free(t0);
773 }
774 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
775 {
776     gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
777 }
778 
779 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
780 {
781     gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
782 }
783 
784 /* DPDES */
785 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
786 {
787     gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
788 }
789 
790 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
791 {
792     gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
793 }
794 #endif
795 #endif
796 
797 /* PowerPC 40x specific registers */
798 #if !defined(CONFIG_USER_ONLY)
799 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
800 {
801     gen_icount_io_start(ctx);
802     gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
803 }
804 
805 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
806 {
807     gen_icount_io_start(ctx);
808     gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
809 }
810 
811 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
812 {
813     gen_icount_io_start(ctx);
814     gen_store_spr(sprn, cpu_gpr[gprn]);
815     gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
816     /* We must stop translation as we may have rebooted */
817     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
818 }
819 
820 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
821 {
822     gen_icount_io_start(ctx);
823     gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
824 }
825 
826 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
827 {
828     gen_icount_io_start(ctx);
829     gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]);
830 }
831 
832 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
833 {
834     gen_icount_io_start(ctx);
835     gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]);
836 }
837 
838 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
839 {
840     TCGv t0 = tcg_temp_new();
841     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
842     gen_helper_store_40x_pid(cpu_env, t0);
843     tcg_temp_free(t0);
844 }
845 
846 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
847 {
848     gen_icount_io_start(ctx);
849     gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
850 }
851 
852 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
853 {
854     gen_icount_io_start(ctx);
855     gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
856 }
857 #endif
858 
859 /* PIR */
860 #if !defined(CONFIG_USER_ONLY)
861 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
862 {
863     TCGv t0 = tcg_temp_new();
864     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
865     gen_store_spr(SPR_PIR, t0);
866     tcg_temp_free(t0);
867 }
868 #endif
869 
870 /* SPE specific registers */
871 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
872 {
873     TCGv_i32 t0 = tcg_temp_new_i32();
874     tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
875     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
876     tcg_temp_free_i32(t0);
877 }
878 
879 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
880 {
881     TCGv_i32 t0 = tcg_temp_new_i32();
882     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
883     tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
884     tcg_temp_free_i32(t0);
885 }
886 
887 #if !defined(CONFIG_USER_ONLY)
888 /* Callback used to write the exception vector base */
889 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
890 {
891     TCGv t0 = tcg_temp_new();
892     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
893     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
894     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
895     gen_store_spr(sprn, t0);
896     tcg_temp_free(t0);
897 }
898 
899 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
900 {
901     int sprn_offs;
902 
903     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
904         sprn_offs = sprn - SPR_BOOKE_IVOR0;
905     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
906         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
907     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
908         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
909     } else {
910         printf("Trying to write an unknown exception vector %d %03x\n",
911                sprn, sprn);
912         gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
913         return;
914     }
915 
916     TCGv t0 = tcg_temp_new();
917     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
918     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
919     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
920     gen_store_spr(sprn, t0);
921     tcg_temp_free(t0);
922 }
923 #endif
924 
925 #ifdef TARGET_PPC64
926 #ifndef CONFIG_USER_ONLY
927 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
928 {
929     TCGv t0 = tcg_temp_new();
930     TCGv t1 = tcg_temp_new();
931     TCGv t2 = tcg_temp_new();
932 
933     /*
934      * Note, the HV=1 PR=0 case is handled earlier by simply using
935      * spr_write_generic for HV mode in the SPR table
936      */
937 
938     /* Build insertion mask into t1 based on context */
939     if (ctx->pr) {
940         gen_load_spr(t1, SPR_UAMOR);
941     } else {
942         gen_load_spr(t1, SPR_AMOR);
943     }
944 
945     /* Mask new bits into t2 */
946     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
947 
948     /* Load AMR and clear new bits in t0 */
949     gen_load_spr(t0, SPR_AMR);
950     tcg_gen_andc_tl(t0, t0, t1);
951 
952     /* Or'in new bits and write it out */
953     tcg_gen_or_tl(t0, t0, t2);
954     gen_store_spr(SPR_AMR, t0);
955     spr_store_dump_spr(SPR_AMR);
956 
957     tcg_temp_free(t0);
958     tcg_temp_free(t1);
959     tcg_temp_free(t2);
960 }
961 
962 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
963 {
964     TCGv t0 = tcg_temp_new();
965     TCGv t1 = tcg_temp_new();
966     TCGv t2 = tcg_temp_new();
967 
968     /*
969      * Note, the HV=1 case is handled earlier by simply using
970      * spr_write_generic for HV mode in the SPR table
971      */
972 
973     /* Build insertion mask into t1 based on context */
974     gen_load_spr(t1, SPR_AMOR);
975 
976     /* Mask new bits into t2 */
977     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
978 
979     /* Load AMR and clear new bits in t0 */
980     gen_load_spr(t0, SPR_UAMOR);
981     tcg_gen_andc_tl(t0, t0, t1);
982 
983     /* Or'in new bits and write it out */
984     tcg_gen_or_tl(t0, t0, t2);
985     gen_store_spr(SPR_UAMOR, t0);
986     spr_store_dump_spr(SPR_UAMOR);
987 
988     tcg_temp_free(t0);
989     tcg_temp_free(t1);
990     tcg_temp_free(t2);
991 }
992 
993 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
994 {
995     TCGv t0 = tcg_temp_new();
996     TCGv t1 = tcg_temp_new();
997     TCGv t2 = tcg_temp_new();
998 
999     /*
1000      * Note, the HV=1 case is handled earlier by simply using
1001      * spr_write_generic for HV mode in the SPR table
1002      */
1003 
1004     /* Build insertion mask into t1 based on context */
1005     gen_load_spr(t1, SPR_AMOR);
1006 
1007     /* Mask new bits into t2 */
1008     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1009 
1010     /* Load AMR and clear new bits in t0 */
1011     gen_load_spr(t0, SPR_IAMR);
1012     tcg_gen_andc_tl(t0, t0, t1);
1013 
1014     /* Or'in new bits and write it out */
1015     tcg_gen_or_tl(t0, t0, t2);
1016     gen_store_spr(SPR_IAMR, t0);
1017     spr_store_dump_spr(SPR_IAMR);
1018 
1019     tcg_temp_free(t0);
1020     tcg_temp_free(t1);
1021     tcg_temp_free(t2);
1022 }
1023 #endif
1024 #endif
1025 
1026 #ifndef CONFIG_USER_ONLY
1027 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1028 {
1029     gen_helper_fixup_thrm(cpu_env);
1030     gen_load_spr(cpu_gpr[gprn], sprn);
1031     spr_load_dump_spr(sprn);
1032 }
1033 #endif /* !CONFIG_USER_ONLY */
1034 
1035 #if !defined(CONFIG_USER_ONLY)
1036 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1037 {
1038     TCGv t0 = tcg_temp_new();
1039 
1040     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1041     gen_store_spr(sprn, t0);
1042     tcg_temp_free(t0);
1043 }
1044 
1045 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1046 {
1047     TCGv t0 = tcg_temp_new();
1048 
1049     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1050     gen_store_spr(sprn, t0);
1051     tcg_temp_free(t0);
1052 }
1053 
1054 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1055 {
1056     TCGv t0 = tcg_temp_new();
1057 
1058     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1059                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1060     gen_store_spr(sprn, t0);
1061     tcg_temp_free(t0);
1062 }
1063 
1064 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1065 {
1066     gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1067 }
1068 
1069 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1070 {
1071     TCGv_i32 t0 = tcg_const_i32(sprn);
1072     gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1073     tcg_temp_free_i32(t0);
1074 }
1075 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1076 {
1077     gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1078 }
1079 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1080 {
1081     gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1082 }
1083 
1084 #endif
1085 
1086 #if !defined(CONFIG_USER_ONLY)
1087 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1088 {
1089     TCGv val = tcg_temp_new();
1090     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1091     gen_store_spr(SPR_BOOKE_MAS3, val);
1092     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1093     gen_store_spr(SPR_BOOKE_MAS7, val);
1094     tcg_temp_free(val);
1095 }
1096 
1097 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1098 {
1099     TCGv mas7 = tcg_temp_new();
1100     TCGv mas3 = tcg_temp_new();
1101     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1102     tcg_gen_shli_tl(mas7, mas7, 32);
1103     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1104     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1105     tcg_temp_free(mas3);
1106     tcg_temp_free(mas7);
1107 }
1108 
1109 #endif
1110 
1111 #ifdef TARGET_PPC64
1112 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1113                                     int bit, int sprn, int cause)
1114 {
1115     TCGv_i32 t1 = tcg_const_i32(bit);
1116     TCGv_i32 t2 = tcg_const_i32(sprn);
1117     TCGv_i32 t3 = tcg_const_i32(cause);
1118 
1119     gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1120 
1121     tcg_temp_free_i32(t3);
1122     tcg_temp_free_i32(t2);
1123     tcg_temp_free_i32(t1);
1124 }
1125 
1126 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1127                                    int bit, int sprn, int cause)
1128 {
1129     TCGv_i32 t1 = tcg_const_i32(bit);
1130     TCGv_i32 t2 = tcg_const_i32(sprn);
1131     TCGv_i32 t3 = tcg_const_i32(cause);
1132 
1133     gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1134 
1135     tcg_temp_free_i32(t3);
1136     tcg_temp_free_i32(t2);
1137     tcg_temp_free_i32(t1);
1138 }
1139 
1140 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1141 {
1142     TCGv spr_up = tcg_temp_new();
1143     TCGv spr = tcg_temp_new();
1144 
1145     gen_load_spr(spr, sprn - 1);
1146     tcg_gen_shri_tl(spr_up, spr, 32);
1147     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1148 
1149     tcg_temp_free(spr);
1150     tcg_temp_free(spr_up);
1151 }
1152 
1153 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1154 {
1155     TCGv spr = tcg_temp_new();
1156 
1157     gen_load_spr(spr, sprn - 1);
1158     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1159     gen_store_spr(sprn - 1, spr);
1160 
1161     tcg_temp_free(spr);
1162 }
1163 
1164 #if !defined(CONFIG_USER_ONLY)
1165 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1166 {
1167     TCGv hmer = tcg_temp_new();
1168 
1169     gen_load_spr(hmer, sprn);
1170     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1171     gen_store_spr(sprn, hmer);
1172     spr_store_dump_spr(sprn);
1173     tcg_temp_free(hmer);
1174 }
1175 
1176 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1177 {
1178     gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1179 }
1180 #endif /* !defined(CONFIG_USER_ONLY) */
1181 
1182 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1183 {
1184     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1185     spr_read_generic(ctx, gprn, sprn);
1186 }
1187 
1188 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1189 {
1190     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1191     spr_write_generic(ctx, sprn, gprn);
1192 }
1193 
1194 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1195 {
1196     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1197     spr_read_generic(ctx, gprn, sprn);
1198 }
1199 
1200 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1201 {
1202     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1203     spr_write_generic(ctx, sprn, gprn);
1204 }
1205 
1206 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1207 {
1208     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1209     spr_read_prev_upper32(ctx, gprn, sprn);
1210 }
1211 
1212 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1213 {
1214     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1215     spr_write_prev_upper32(ctx, sprn, gprn);
1216 }
1217 
1218 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1219 {
1220     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1221     spr_read_generic(ctx, gprn, sprn);
1222 }
1223 
1224 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1225 {
1226     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1227     spr_write_generic(ctx, sprn, gprn);
1228 }
1229 
1230 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1231 {
1232     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1233     spr_read_prev_upper32(ctx, gprn, sprn);
1234 }
1235 
1236 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1237 {
1238     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1239     spr_write_prev_upper32(ctx, sprn, gprn);
1240 }
1241 #endif
1242 
1243 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1244 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1245 
1246 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1247 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1248 
1249 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1250 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1251 
1252 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1253 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1254 
1255 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1256 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1257 
1258 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1259 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1260 
1261 typedef struct opcode_t {
1262     unsigned char opc1, opc2, opc3, opc4;
1263 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1264     unsigned char pad[4];
1265 #endif
1266     opc_handler_t handler;
1267     const char *oname;
1268 } opcode_t;
1269 
1270 /* Helpers for priv. check */
1271 #define GEN_PRIV                                                \
1272     do {                                                        \
1273         gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \
1274     } while (0)
1275 
1276 #if defined(CONFIG_USER_ONLY)
1277 #define CHK_HV GEN_PRIV
1278 #define CHK_SV GEN_PRIV
1279 #define CHK_HVRM GEN_PRIV
1280 #else
1281 #define CHK_HV                                                          \
1282     do {                                                                \
1283         if (unlikely(ctx->pr || !ctx->hv)) {                            \
1284             GEN_PRIV;                                                   \
1285         }                                                               \
1286     } while (0)
1287 #define CHK_SV                   \
1288     do {                         \
1289         if (unlikely(ctx->pr)) { \
1290             GEN_PRIV;            \
1291         }                        \
1292     } while (0)
1293 #define CHK_HVRM                                            \
1294     do {                                                    \
1295         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) {     \
1296             GEN_PRIV;                                       \
1297         }                                                   \
1298     } while (0)
1299 #endif
1300 
1301 #define CHK_NONE
1302 
1303 /*****************************************************************************/
1304 /* PowerPC instructions table                                                */
1305 
1306 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1307 {                                                                             \
1308     .opc1 = op1,                                                              \
1309     .opc2 = op2,                                                              \
1310     .opc3 = op3,                                                              \
1311     .opc4 = 0xff,                                                             \
1312     .handler = {                                                              \
1313         .inval1  = invl,                                                      \
1314         .type = _typ,                                                         \
1315         .type2 = _typ2,                                                       \
1316         .handler = &gen_##name,                                               \
1317     },                                                                        \
1318     .oname = stringify(name),                                                 \
1319 }
1320 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1321 {                                                                             \
1322     .opc1 = op1,                                                              \
1323     .opc2 = op2,                                                              \
1324     .opc3 = op3,                                                              \
1325     .opc4 = 0xff,                                                             \
1326     .handler = {                                                              \
1327         .inval1  = invl1,                                                     \
1328         .inval2  = invl2,                                                     \
1329         .type = _typ,                                                         \
1330         .type2 = _typ2,                                                       \
1331         .handler = &gen_##name,                                               \
1332     },                                                                        \
1333     .oname = stringify(name),                                                 \
1334 }
1335 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1336 {                                                                             \
1337     .opc1 = op1,                                                              \
1338     .opc2 = op2,                                                              \
1339     .opc3 = op3,                                                              \
1340     .opc4 = 0xff,                                                             \
1341     .handler = {                                                              \
1342         .inval1  = invl,                                                      \
1343         .type = _typ,                                                         \
1344         .type2 = _typ2,                                                       \
1345         .handler = &gen_##name,                                               \
1346     },                                                                        \
1347     .oname = onam,                                                            \
1348 }
1349 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1350 {                                                                             \
1351     .opc1 = op1,                                                              \
1352     .opc2 = op2,                                                              \
1353     .opc3 = op3,                                                              \
1354     .opc4 = op4,                                                              \
1355     .handler = {                                                              \
1356         .inval1  = invl,                                                      \
1357         .type = _typ,                                                         \
1358         .type2 = _typ2,                                                       \
1359         .handler = &gen_##name,                                               \
1360     },                                                                        \
1361     .oname = stringify(name),                                                 \
1362 }
1363 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1364 {                                                                             \
1365     .opc1 = op1,                                                              \
1366     .opc2 = op2,                                                              \
1367     .opc3 = op3,                                                              \
1368     .opc4 = op4,                                                              \
1369     .handler = {                                                              \
1370         .inval1  = invl,                                                      \
1371         .type = _typ,                                                         \
1372         .type2 = _typ2,                                                       \
1373         .handler = &gen_##name,                                               \
1374     },                                                                        \
1375     .oname = onam,                                                            \
1376 }
1377 
1378 /* Invalid instruction */
1379 static void gen_invalid(DisasContext *ctx)
1380 {
1381     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1382 }
1383 
1384 static opc_handler_t invalid_handler = {
1385     .inval1  = 0xFFFFFFFF,
1386     .inval2  = 0xFFFFFFFF,
1387     .type    = PPC_NONE,
1388     .type2   = PPC_NONE,
1389     .handler = gen_invalid,
1390 };
1391 
1392 /***                           Integer comparison                          ***/
1393 
1394 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1395 {
1396     TCGv t0 = tcg_temp_new();
1397     TCGv t1 = tcg_temp_new();
1398     TCGv_i32 t = tcg_temp_new_i32();
1399 
1400     tcg_gen_movi_tl(t0, CRF_EQ);
1401     tcg_gen_movi_tl(t1, CRF_LT);
1402     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1403                        t0, arg0, arg1, t1, t0);
1404     tcg_gen_movi_tl(t1, CRF_GT);
1405     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1406                        t0, arg0, arg1, t1, t0);
1407 
1408     tcg_gen_trunc_tl_i32(t, t0);
1409     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1410     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1411 
1412     tcg_temp_free(t0);
1413     tcg_temp_free(t1);
1414     tcg_temp_free_i32(t);
1415 }
1416 
1417 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1418 {
1419     TCGv t0 = tcg_const_tl(arg1);
1420     gen_op_cmp(arg0, t0, s, crf);
1421     tcg_temp_free(t0);
1422 }
1423 
1424 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1425 {
1426     TCGv t0, t1;
1427     t0 = tcg_temp_new();
1428     t1 = tcg_temp_new();
1429     if (s) {
1430         tcg_gen_ext32s_tl(t0, arg0);
1431         tcg_gen_ext32s_tl(t1, arg1);
1432     } else {
1433         tcg_gen_ext32u_tl(t0, arg0);
1434         tcg_gen_ext32u_tl(t1, arg1);
1435     }
1436     gen_op_cmp(t0, t1, s, crf);
1437     tcg_temp_free(t1);
1438     tcg_temp_free(t0);
1439 }
1440 
1441 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1442 {
1443     TCGv t0 = tcg_const_tl(arg1);
1444     gen_op_cmp32(arg0, t0, s, crf);
1445     tcg_temp_free(t0);
1446 }
1447 
1448 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1449 {
1450     if (NARROW_MODE(ctx)) {
1451         gen_op_cmpi32(reg, 0, 1, 0);
1452     } else {
1453         gen_op_cmpi(reg, 0, 1, 0);
1454     }
1455 }
1456 
1457 /* cmprb - range comparison: isupper, isaplha, islower*/
1458 static void gen_cmprb(DisasContext *ctx)
1459 {
1460     TCGv_i32 src1 = tcg_temp_new_i32();
1461     TCGv_i32 src2 = tcg_temp_new_i32();
1462     TCGv_i32 src2lo = tcg_temp_new_i32();
1463     TCGv_i32 src2hi = tcg_temp_new_i32();
1464     TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1465 
1466     tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1467     tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1468 
1469     tcg_gen_andi_i32(src1, src1, 0xFF);
1470     tcg_gen_ext8u_i32(src2lo, src2);
1471     tcg_gen_shri_i32(src2, src2, 8);
1472     tcg_gen_ext8u_i32(src2hi, src2);
1473 
1474     tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1475     tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1476     tcg_gen_and_i32(crf, src2lo, src2hi);
1477 
1478     if (ctx->opcode & 0x00200000) {
1479         tcg_gen_shri_i32(src2, src2, 8);
1480         tcg_gen_ext8u_i32(src2lo, src2);
1481         tcg_gen_shri_i32(src2, src2, 8);
1482         tcg_gen_ext8u_i32(src2hi, src2);
1483         tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1484         tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1485         tcg_gen_and_i32(src2lo, src2lo, src2hi);
1486         tcg_gen_or_i32(crf, crf, src2lo);
1487     }
1488     tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1489     tcg_temp_free_i32(src1);
1490     tcg_temp_free_i32(src2);
1491     tcg_temp_free_i32(src2lo);
1492     tcg_temp_free_i32(src2hi);
1493 }
1494 
1495 #if defined(TARGET_PPC64)
1496 /* cmpeqb */
1497 static void gen_cmpeqb(DisasContext *ctx)
1498 {
1499     gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1500                       cpu_gpr[rB(ctx->opcode)]);
1501 }
1502 #endif
1503 
1504 /* isel (PowerPC 2.03 specification) */
1505 static void gen_isel(DisasContext *ctx)
1506 {
1507     uint32_t bi = rC(ctx->opcode);
1508     uint32_t mask = 0x08 >> (bi & 0x03);
1509     TCGv t0 = tcg_temp_new();
1510     TCGv zr;
1511 
1512     tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1513     tcg_gen_andi_tl(t0, t0, mask);
1514 
1515     zr = tcg_const_tl(0);
1516     tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1517                        rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1518                        cpu_gpr[rB(ctx->opcode)]);
1519     tcg_temp_free(zr);
1520     tcg_temp_free(t0);
1521 }
1522 
1523 /* cmpb: PowerPC 2.05 specification */
1524 static void gen_cmpb(DisasContext *ctx)
1525 {
1526     gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1527                     cpu_gpr[rB(ctx->opcode)]);
1528 }
1529 
1530 /***                           Integer arithmetic                          ***/
1531 
1532 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1533                                            TCGv arg1, TCGv arg2, int sub)
1534 {
1535     TCGv t0 = tcg_temp_new();
1536 
1537     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1538     tcg_gen_xor_tl(t0, arg1, arg2);
1539     if (sub) {
1540         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1541     } else {
1542         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1543     }
1544     tcg_temp_free(t0);
1545     if (NARROW_MODE(ctx)) {
1546         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1547         if (is_isa300(ctx)) {
1548             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1549         }
1550     } else {
1551         if (is_isa300(ctx)) {
1552             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1553         }
1554         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1555     }
1556     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1557 }
1558 
1559 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1560                                              TCGv res, TCGv arg0, TCGv arg1,
1561                                              TCGv ca32, int sub)
1562 {
1563     TCGv t0;
1564 
1565     if (!is_isa300(ctx)) {
1566         return;
1567     }
1568 
1569     t0 = tcg_temp_new();
1570     if (sub) {
1571         tcg_gen_eqv_tl(t0, arg0, arg1);
1572     } else {
1573         tcg_gen_xor_tl(t0, arg0, arg1);
1574     }
1575     tcg_gen_xor_tl(t0, t0, res);
1576     tcg_gen_extract_tl(ca32, t0, 32, 1);
1577     tcg_temp_free(t0);
1578 }
1579 
1580 /* Common add function */
1581 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1582                                     TCGv arg2, TCGv ca, TCGv ca32,
1583                                     bool add_ca, bool compute_ca,
1584                                     bool compute_ov, bool compute_rc0)
1585 {
1586     TCGv t0 = ret;
1587 
1588     if (compute_ca || compute_ov) {
1589         t0 = tcg_temp_new();
1590     }
1591 
1592     if (compute_ca) {
1593         if (NARROW_MODE(ctx)) {
1594             /*
1595              * Caution: a non-obvious corner case of the spec is that
1596              * we must produce the *entire* 64-bit addition, but
1597              * produce the carry into bit 32.
1598              */
1599             TCGv t1 = tcg_temp_new();
1600             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1601             tcg_gen_add_tl(t0, arg1, arg2);
1602             if (add_ca) {
1603                 tcg_gen_add_tl(t0, t0, ca);
1604             }
1605             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1606             tcg_temp_free(t1);
1607             tcg_gen_extract_tl(ca, ca, 32, 1);
1608             if (is_isa300(ctx)) {
1609                 tcg_gen_mov_tl(ca32, ca);
1610             }
1611         } else {
1612             TCGv zero = tcg_const_tl(0);
1613             if (add_ca) {
1614                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1615                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1616             } else {
1617                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1618             }
1619             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1620             tcg_temp_free(zero);
1621         }
1622     } else {
1623         tcg_gen_add_tl(t0, arg1, arg2);
1624         if (add_ca) {
1625             tcg_gen_add_tl(t0, t0, ca);
1626         }
1627     }
1628 
1629     if (compute_ov) {
1630         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1631     }
1632     if (unlikely(compute_rc0)) {
1633         gen_set_Rc0(ctx, t0);
1634     }
1635 
1636     if (t0 != ret) {
1637         tcg_gen_mov_tl(ret, t0);
1638         tcg_temp_free(t0);
1639     }
1640 }
1641 /* Add functions with two operands */
1642 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov)     \
1643 static void glue(gen_, name)(DisasContext *ctx)                               \
1644 {                                                                             \
1645     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1646                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1647                      ca, glue(ca, 32),                                        \
1648                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1649 }
1650 /* Add functions with one operand and one immediate */
1651 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca,                    \
1652                                 add_ca, compute_ca, compute_ov)               \
1653 static void glue(gen_, name)(DisasContext *ctx)                               \
1654 {                                                                             \
1655     TCGv t0 = tcg_const_tl(const_val);                                        \
1656     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1657                      cpu_gpr[rA(ctx->opcode)], t0,                            \
1658                      ca, glue(ca, 32),                                        \
1659                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1660     tcg_temp_free(t0);                                                        \
1661 }
1662 
1663 /* add  add.  addo  addo. */
1664 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1665 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1666 /* addc  addc.  addco  addco. */
1667 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1668 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1669 /* adde  adde.  addeo  addeo. */
1670 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1671 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1672 /* addme  addme.  addmeo  addmeo.  */
1673 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1674 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1675 /* addex */
1676 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1677 /* addze  addze.  addzeo  addzeo.*/
1678 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1679 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1680 /* addic  addic.*/
1681 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1682 {
1683     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1684     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1685                      c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1686     tcg_temp_free(c);
1687 }
1688 
1689 static void gen_addic(DisasContext *ctx)
1690 {
1691     gen_op_addic(ctx, 0);
1692 }
1693 
1694 static void gen_addic_(DisasContext *ctx)
1695 {
1696     gen_op_addic(ctx, 1);
1697 }
1698 
1699 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1700                                      TCGv arg2, int sign, int compute_ov)
1701 {
1702     TCGv_i32 t0 = tcg_temp_new_i32();
1703     TCGv_i32 t1 = tcg_temp_new_i32();
1704     TCGv_i32 t2 = tcg_temp_new_i32();
1705     TCGv_i32 t3 = tcg_temp_new_i32();
1706 
1707     tcg_gen_trunc_tl_i32(t0, arg1);
1708     tcg_gen_trunc_tl_i32(t1, arg2);
1709     if (sign) {
1710         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1711         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1712         tcg_gen_and_i32(t2, t2, t3);
1713         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1714         tcg_gen_or_i32(t2, t2, t3);
1715         tcg_gen_movi_i32(t3, 0);
1716         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1717         tcg_gen_div_i32(t3, t0, t1);
1718         tcg_gen_extu_i32_tl(ret, t3);
1719     } else {
1720         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1721         tcg_gen_movi_i32(t3, 0);
1722         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1723         tcg_gen_divu_i32(t3, t0, t1);
1724         tcg_gen_extu_i32_tl(ret, t3);
1725     }
1726     if (compute_ov) {
1727         tcg_gen_extu_i32_tl(cpu_ov, t2);
1728         if (is_isa300(ctx)) {
1729             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1730         }
1731         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1732     }
1733     tcg_temp_free_i32(t0);
1734     tcg_temp_free_i32(t1);
1735     tcg_temp_free_i32(t2);
1736     tcg_temp_free_i32(t3);
1737 
1738     if (unlikely(Rc(ctx->opcode) != 0)) {
1739         gen_set_Rc0(ctx, ret);
1740     }
1741 }
1742 /* Div functions */
1743 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
1744 static void glue(gen_, name)(DisasContext *ctx)                               \
1745 {                                                                             \
1746     gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1747                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1748                      sign, compute_ov);                                       \
1749 }
1750 /* divwu  divwu.  divwuo  divwuo.   */
1751 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1752 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1753 /* divw  divw.  divwo  divwo.   */
1754 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1755 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1756 
1757 /* div[wd]eu[o][.] */
1758 #define GEN_DIVE(name, hlpr, compute_ov)                                      \
1759 static void gen_##name(DisasContext *ctx)                                     \
1760 {                                                                             \
1761     TCGv_i32 t0 = tcg_const_i32(compute_ov);                                  \
1762     gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env,                      \
1763                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1764     tcg_temp_free_i32(t0);                                                    \
1765     if (unlikely(Rc(ctx->opcode) != 0)) {                                     \
1766         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);                           \
1767     }                                                                         \
1768 }
1769 
1770 GEN_DIVE(divweu, divweu, 0);
1771 GEN_DIVE(divweuo, divweu, 1);
1772 GEN_DIVE(divwe, divwe, 0);
1773 GEN_DIVE(divweo, divwe, 1);
1774 
1775 #if defined(TARGET_PPC64)
1776 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1777                                      TCGv arg2, int sign, int compute_ov)
1778 {
1779     TCGv_i64 t0 = tcg_temp_new_i64();
1780     TCGv_i64 t1 = tcg_temp_new_i64();
1781     TCGv_i64 t2 = tcg_temp_new_i64();
1782     TCGv_i64 t3 = tcg_temp_new_i64();
1783 
1784     tcg_gen_mov_i64(t0, arg1);
1785     tcg_gen_mov_i64(t1, arg2);
1786     if (sign) {
1787         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1788         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1789         tcg_gen_and_i64(t2, t2, t3);
1790         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1791         tcg_gen_or_i64(t2, t2, t3);
1792         tcg_gen_movi_i64(t3, 0);
1793         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1794         tcg_gen_div_i64(ret, t0, t1);
1795     } else {
1796         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1797         tcg_gen_movi_i64(t3, 0);
1798         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1799         tcg_gen_divu_i64(ret, t0, t1);
1800     }
1801     if (compute_ov) {
1802         tcg_gen_mov_tl(cpu_ov, t2);
1803         if (is_isa300(ctx)) {
1804             tcg_gen_mov_tl(cpu_ov32, t2);
1805         }
1806         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1807     }
1808     tcg_temp_free_i64(t0);
1809     tcg_temp_free_i64(t1);
1810     tcg_temp_free_i64(t2);
1811     tcg_temp_free_i64(t3);
1812 
1813     if (unlikely(Rc(ctx->opcode) != 0)) {
1814         gen_set_Rc0(ctx, ret);
1815     }
1816 }
1817 
1818 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
1819 static void glue(gen_, name)(DisasContext *ctx)                               \
1820 {                                                                             \
1821     gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1822                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
1823                       sign, compute_ov);                                      \
1824 }
1825 /* divdu  divdu.  divduo  divduo.   */
1826 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1827 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1828 /* divd  divd.  divdo  divdo.   */
1829 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1830 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1831 
1832 GEN_DIVE(divdeu, divdeu, 0);
1833 GEN_DIVE(divdeuo, divdeu, 1);
1834 GEN_DIVE(divde, divde, 0);
1835 GEN_DIVE(divdeo, divde, 1);
1836 #endif
1837 
1838 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1839                                      TCGv arg2, int sign)
1840 {
1841     TCGv_i32 t0 = tcg_temp_new_i32();
1842     TCGv_i32 t1 = tcg_temp_new_i32();
1843 
1844     tcg_gen_trunc_tl_i32(t0, arg1);
1845     tcg_gen_trunc_tl_i32(t1, arg2);
1846     if (sign) {
1847         TCGv_i32 t2 = tcg_temp_new_i32();
1848         TCGv_i32 t3 = tcg_temp_new_i32();
1849         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1850         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1851         tcg_gen_and_i32(t2, t2, t3);
1852         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1853         tcg_gen_or_i32(t2, t2, t3);
1854         tcg_gen_movi_i32(t3, 0);
1855         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1856         tcg_gen_rem_i32(t3, t0, t1);
1857         tcg_gen_ext_i32_tl(ret, t3);
1858         tcg_temp_free_i32(t2);
1859         tcg_temp_free_i32(t3);
1860     } else {
1861         TCGv_i32 t2 = tcg_const_i32(1);
1862         TCGv_i32 t3 = tcg_const_i32(0);
1863         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1864         tcg_gen_remu_i32(t3, t0, t1);
1865         tcg_gen_extu_i32_tl(ret, t3);
1866         tcg_temp_free_i32(t2);
1867         tcg_temp_free_i32(t3);
1868     }
1869     tcg_temp_free_i32(t0);
1870     tcg_temp_free_i32(t1);
1871 }
1872 
1873 #define GEN_INT_ARITH_MODW(name, opc3, sign)                                \
1874 static void glue(gen_, name)(DisasContext *ctx)                             \
1875 {                                                                           \
1876     gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1877                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1878                       sign);                                                \
1879 }
1880 
1881 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1882 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1883 
1884 #if defined(TARGET_PPC64)
1885 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1886                                      TCGv arg2, int sign)
1887 {
1888     TCGv_i64 t0 = tcg_temp_new_i64();
1889     TCGv_i64 t1 = tcg_temp_new_i64();
1890 
1891     tcg_gen_mov_i64(t0, arg1);
1892     tcg_gen_mov_i64(t1, arg2);
1893     if (sign) {
1894         TCGv_i64 t2 = tcg_temp_new_i64();
1895         TCGv_i64 t3 = tcg_temp_new_i64();
1896         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1897         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1898         tcg_gen_and_i64(t2, t2, t3);
1899         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1900         tcg_gen_or_i64(t2, t2, t3);
1901         tcg_gen_movi_i64(t3, 0);
1902         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1903         tcg_gen_rem_i64(ret, t0, t1);
1904         tcg_temp_free_i64(t2);
1905         tcg_temp_free_i64(t3);
1906     } else {
1907         TCGv_i64 t2 = tcg_const_i64(1);
1908         TCGv_i64 t3 = tcg_const_i64(0);
1909         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1910         tcg_gen_remu_i64(ret, t0, t1);
1911         tcg_temp_free_i64(t2);
1912         tcg_temp_free_i64(t3);
1913     }
1914     tcg_temp_free_i64(t0);
1915     tcg_temp_free_i64(t1);
1916 }
1917 
1918 #define GEN_INT_ARITH_MODD(name, opc3, sign)                            \
1919 static void glue(gen_, name)(DisasContext *ctx)                           \
1920 {                                                                         \
1921   gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1922                     cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1923                     sign);                                                \
1924 }
1925 
1926 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1927 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1928 #endif
1929 
1930 /* mulhw  mulhw. */
1931 static void gen_mulhw(DisasContext *ctx)
1932 {
1933     TCGv_i32 t0 = tcg_temp_new_i32();
1934     TCGv_i32 t1 = tcg_temp_new_i32();
1935 
1936     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1937     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1938     tcg_gen_muls2_i32(t0, t1, t0, t1);
1939     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1940     tcg_temp_free_i32(t0);
1941     tcg_temp_free_i32(t1);
1942     if (unlikely(Rc(ctx->opcode) != 0)) {
1943         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1944     }
1945 }
1946 
1947 /* mulhwu  mulhwu.  */
1948 static void gen_mulhwu(DisasContext *ctx)
1949 {
1950     TCGv_i32 t0 = tcg_temp_new_i32();
1951     TCGv_i32 t1 = tcg_temp_new_i32();
1952 
1953     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1954     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1955     tcg_gen_mulu2_i32(t0, t1, t0, t1);
1956     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1957     tcg_temp_free_i32(t0);
1958     tcg_temp_free_i32(t1);
1959     if (unlikely(Rc(ctx->opcode) != 0)) {
1960         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1961     }
1962 }
1963 
1964 /* mullw  mullw. */
1965 static void gen_mullw(DisasContext *ctx)
1966 {
1967 #if defined(TARGET_PPC64)
1968     TCGv_i64 t0, t1;
1969     t0 = tcg_temp_new_i64();
1970     t1 = tcg_temp_new_i64();
1971     tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1972     tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1973     tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1974     tcg_temp_free(t0);
1975     tcg_temp_free(t1);
1976 #else
1977     tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1978                     cpu_gpr[rB(ctx->opcode)]);
1979 #endif
1980     if (unlikely(Rc(ctx->opcode) != 0)) {
1981         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1982     }
1983 }
1984 
1985 /* mullwo  mullwo. */
1986 static void gen_mullwo(DisasContext *ctx)
1987 {
1988     TCGv_i32 t0 = tcg_temp_new_i32();
1989     TCGv_i32 t1 = tcg_temp_new_i32();
1990 
1991     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1992     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1993     tcg_gen_muls2_i32(t0, t1, t0, t1);
1994 #if defined(TARGET_PPC64)
1995     tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1996 #else
1997     tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
1998 #endif
1999 
2000     tcg_gen_sari_i32(t0, t0, 31);
2001     tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2002     tcg_gen_extu_i32_tl(cpu_ov, t0);
2003     if (is_isa300(ctx)) {
2004         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2005     }
2006     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2007 
2008     tcg_temp_free_i32(t0);
2009     tcg_temp_free_i32(t1);
2010     if (unlikely(Rc(ctx->opcode) != 0)) {
2011         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2012     }
2013 }
2014 
2015 /* mulli */
2016 static void gen_mulli(DisasContext *ctx)
2017 {
2018     tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2019                     SIMM(ctx->opcode));
2020 }
2021 
2022 #if defined(TARGET_PPC64)
2023 /* mulhd  mulhd. */
2024 static void gen_mulhd(DisasContext *ctx)
2025 {
2026     TCGv lo = tcg_temp_new();
2027     tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2028                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2029     tcg_temp_free(lo);
2030     if (unlikely(Rc(ctx->opcode) != 0)) {
2031         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2032     }
2033 }
2034 
2035 /* mulhdu  mulhdu. */
2036 static void gen_mulhdu(DisasContext *ctx)
2037 {
2038     TCGv lo = tcg_temp_new();
2039     tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2040                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2041     tcg_temp_free(lo);
2042     if (unlikely(Rc(ctx->opcode) != 0)) {
2043         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2044     }
2045 }
2046 
2047 /* mulld  mulld. */
2048 static void gen_mulld(DisasContext *ctx)
2049 {
2050     tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2051                    cpu_gpr[rB(ctx->opcode)]);
2052     if (unlikely(Rc(ctx->opcode) != 0)) {
2053         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2054     }
2055 }
2056 
2057 /* mulldo  mulldo. */
2058 static void gen_mulldo(DisasContext *ctx)
2059 {
2060     TCGv_i64 t0 = tcg_temp_new_i64();
2061     TCGv_i64 t1 = tcg_temp_new_i64();
2062 
2063     tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2064                       cpu_gpr[rB(ctx->opcode)]);
2065     tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2066 
2067     tcg_gen_sari_i64(t0, t0, 63);
2068     tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2069     if (is_isa300(ctx)) {
2070         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2071     }
2072     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2073 
2074     tcg_temp_free_i64(t0);
2075     tcg_temp_free_i64(t1);
2076 
2077     if (unlikely(Rc(ctx->opcode) != 0)) {
2078         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2079     }
2080 }
2081 #endif
2082 
2083 /* Common subf function */
2084 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2085                                      TCGv arg2, bool add_ca, bool compute_ca,
2086                                      bool compute_ov, bool compute_rc0)
2087 {
2088     TCGv t0 = ret;
2089 
2090     if (compute_ca || compute_ov) {
2091         t0 = tcg_temp_new();
2092     }
2093 
2094     if (compute_ca) {
2095         /* dest = ~arg1 + arg2 [+ ca].  */
2096         if (NARROW_MODE(ctx)) {
2097             /*
2098              * Caution: a non-obvious corner case of the spec is that
2099              * we must produce the *entire* 64-bit addition, but
2100              * produce the carry into bit 32.
2101              */
2102             TCGv inv1 = tcg_temp_new();
2103             TCGv t1 = tcg_temp_new();
2104             tcg_gen_not_tl(inv1, arg1);
2105             if (add_ca) {
2106                 tcg_gen_add_tl(t0, arg2, cpu_ca);
2107             } else {
2108                 tcg_gen_addi_tl(t0, arg2, 1);
2109             }
2110             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
2111             tcg_gen_add_tl(t0, t0, inv1);
2112             tcg_temp_free(inv1);
2113             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
2114             tcg_temp_free(t1);
2115             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2116             if (is_isa300(ctx)) {
2117                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2118             }
2119         } else if (add_ca) {
2120             TCGv zero, inv1 = tcg_temp_new();
2121             tcg_gen_not_tl(inv1, arg1);
2122             zero = tcg_const_tl(0);
2123             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2124             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2125             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2126             tcg_temp_free(zero);
2127             tcg_temp_free(inv1);
2128         } else {
2129             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2130             tcg_gen_sub_tl(t0, arg2, arg1);
2131             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2132         }
2133     } else if (add_ca) {
2134         /*
2135          * Since we're ignoring carry-out, we can simplify the
2136          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2137          */
2138         tcg_gen_sub_tl(t0, arg2, arg1);
2139         tcg_gen_add_tl(t0, t0, cpu_ca);
2140         tcg_gen_subi_tl(t0, t0, 1);
2141     } else {
2142         tcg_gen_sub_tl(t0, arg2, arg1);
2143     }
2144 
2145     if (compute_ov) {
2146         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2147     }
2148     if (unlikely(compute_rc0)) {
2149         gen_set_Rc0(ctx, t0);
2150     }
2151 
2152     if (t0 != ret) {
2153         tcg_gen_mov_tl(ret, t0);
2154         tcg_temp_free(t0);
2155     }
2156 }
2157 /* Sub functions with Two operands functions */
2158 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
2159 static void glue(gen_, name)(DisasContext *ctx)                               \
2160 {                                                                             \
2161     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2162                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
2163                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2164 }
2165 /* Sub functions with one operand and one immediate */
2166 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
2167                                 add_ca, compute_ca, compute_ov)               \
2168 static void glue(gen_, name)(DisasContext *ctx)                               \
2169 {                                                                             \
2170     TCGv t0 = tcg_const_tl(const_val);                                        \
2171     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2172                       cpu_gpr[rA(ctx->opcode)], t0,                           \
2173                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2174     tcg_temp_free(t0);                                                        \
2175 }
2176 /* subf  subf.  subfo  subfo. */
2177 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2178 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2179 /* subfc  subfc.  subfco  subfco. */
2180 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2181 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2182 /* subfe  subfe.  subfeo  subfo. */
2183 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2184 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2185 /* subfme  subfme.  subfmeo  subfmeo.  */
2186 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2187 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2188 /* subfze  subfze.  subfzeo  subfzeo.*/
2189 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2190 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2191 
2192 /* subfic */
2193 static void gen_subfic(DisasContext *ctx)
2194 {
2195     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2196     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2197                       c, 0, 1, 0, 0);
2198     tcg_temp_free(c);
2199 }
2200 
2201 /* neg neg. nego nego. */
2202 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2203 {
2204     TCGv zero = tcg_const_tl(0);
2205     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2206                       zero, 0, 0, compute_ov, Rc(ctx->opcode));
2207     tcg_temp_free(zero);
2208 }
2209 
2210 static void gen_neg(DisasContext *ctx)
2211 {
2212     tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2213     if (unlikely(Rc(ctx->opcode))) {
2214         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2215     }
2216 }
2217 
2218 static void gen_nego(DisasContext *ctx)
2219 {
2220     gen_op_arith_neg(ctx, 1);
2221 }
2222 
2223 /***                            Integer logical                            ***/
2224 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
2225 static void glue(gen_, name)(DisasContext *ctx)                               \
2226 {                                                                             \
2227     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],                \
2228        cpu_gpr[rB(ctx->opcode)]);                                             \
2229     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2230         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2231 }
2232 
2233 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
2234 static void glue(gen_, name)(DisasContext *ctx)                               \
2235 {                                                                             \
2236     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);               \
2237     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2238         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2239 }
2240 
2241 /* and & and. */
2242 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2243 /* andc & andc. */
2244 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2245 
2246 /* andi. */
2247 static void gen_andi_(DisasContext *ctx)
2248 {
2249     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2250                     UIMM(ctx->opcode));
2251     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2252 }
2253 
2254 /* andis. */
2255 static void gen_andis_(DisasContext *ctx)
2256 {
2257     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2258                     UIMM(ctx->opcode) << 16);
2259     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2260 }
2261 
2262 /* cntlzw */
2263 static void gen_cntlzw(DisasContext *ctx)
2264 {
2265     TCGv_i32 t = tcg_temp_new_i32();
2266 
2267     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2268     tcg_gen_clzi_i32(t, t, 32);
2269     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2270     tcg_temp_free_i32(t);
2271 
2272     if (unlikely(Rc(ctx->opcode) != 0)) {
2273         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2274     }
2275 }
2276 
2277 /* cnttzw */
2278 static void gen_cnttzw(DisasContext *ctx)
2279 {
2280     TCGv_i32 t = tcg_temp_new_i32();
2281 
2282     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2283     tcg_gen_ctzi_i32(t, t, 32);
2284     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2285     tcg_temp_free_i32(t);
2286 
2287     if (unlikely(Rc(ctx->opcode) != 0)) {
2288         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2289     }
2290 }
2291 
2292 /* eqv & eqv. */
2293 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2294 /* extsb & extsb. */
2295 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2296 /* extsh & extsh. */
2297 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2298 /* nand & nand. */
2299 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2300 /* nor & nor. */
2301 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2302 
2303 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2304 static void gen_pause(DisasContext *ctx)
2305 {
2306     TCGv_i32 t0 = tcg_const_i32(0);
2307     tcg_gen_st_i32(t0, cpu_env,
2308                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2309     tcg_temp_free_i32(t0);
2310 
2311     /* Stop translation, this gives other CPUs a chance to run */
2312     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2313 }
2314 #endif /* defined(TARGET_PPC64) */
2315 
2316 /* or & or. */
2317 static void gen_or(DisasContext *ctx)
2318 {
2319     int rs, ra, rb;
2320 
2321     rs = rS(ctx->opcode);
2322     ra = rA(ctx->opcode);
2323     rb = rB(ctx->opcode);
2324     /* Optimisation for mr. ri case */
2325     if (rs != ra || rs != rb) {
2326         if (rs != rb) {
2327             tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2328         } else {
2329             tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2330         }
2331         if (unlikely(Rc(ctx->opcode) != 0)) {
2332             gen_set_Rc0(ctx, cpu_gpr[ra]);
2333         }
2334     } else if (unlikely(Rc(ctx->opcode) != 0)) {
2335         gen_set_Rc0(ctx, cpu_gpr[rs]);
2336 #if defined(TARGET_PPC64)
2337     } else if (rs != 0) { /* 0 is nop */
2338         int prio = 0;
2339 
2340         switch (rs) {
2341         case 1:
2342             /* Set process priority to low */
2343             prio = 2;
2344             break;
2345         case 6:
2346             /* Set process priority to medium-low */
2347             prio = 3;
2348             break;
2349         case 2:
2350             /* Set process priority to normal */
2351             prio = 4;
2352             break;
2353 #if !defined(CONFIG_USER_ONLY)
2354         case 31:
2355             if (!ctx->pr) {
2356                 /* Set process priority to very low */
2357                 prio = 1;
2358             }
2359             break;
2360         case 5:
2361             if (!ctx->pr) {
2362                 /* Set process priority to medium-hight */
2363                 prio = 5;
2364             }
2365             break;
2366         case 3:
2367             if (!ctx->pr) {
2368                 /* Set process priority to high */
2369                 prio = 6;
2370             }
2371             break;
2372         case 7:
2373             if (ctx->hv && !ctx->pr) {
2374                 /* Set process priority to very high */
2375                 prio = 7;
2376             }
2377             break;
2378 #endif
2379         default:
2380             break;
2381         }
2382         if (prio) {
2383             TCGv t0 = tcg_temp_new();
2384             gen_load_spr(t0, SPR_PPR);
2385             tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2386             tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2387             gen_store_spr(SPR_PPR, t0);
2388             tcg_temp_free(t0);
2389         }
2390 #if !defined(CONFIG_USER_ONLY)
2391         /*
2392          * Pause out of TCG otherwise spin loops with smt_low eat too
2393          * much CPU and the kernel hangs.  This applies to all
2394          * encodings other than no-op, e.g., miso(rs=26), yield(27),
2395          * mdoio(29), mdoom(30), and all currently undefined.
2396          */
2397         gen_pause(ctx);
2398 #endif
2399 #endif
2400     }
2401 }
2402 /* orc & orc. */
2403 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2404 
2405 /* xor & xor. */
2406 static void gen_xor(DisasContext *ctx)
2407 {
2408     /* Optimisation for "set to zero" case */
2409     if (rS(ctx->opcode) != rB(ctx->opcode)) {
2410         tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2411                        cpu_gpr[rB(ctx->opcode)]);
2412     } else {
2413         tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2414     }
2415     if (unlikely(Rc(ctx->opcode) != 0)) {
2416         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2417     }
2418 }
2419 
2420 /* ori */
2421 static void gen_ori(DisasContext *ctx)
2422 {
2423     target_ulong uimm = UIMM(ctx->opcode);
2424 
2425     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2426         return;
2427     }
2428     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2429 }
2430 
2431 /* oris */
2432 static void gen_oris(DisasContext *ctx)
2433 {
2434     target_ulong uimm = UIMM(ctx->opcode);
2435 
2436     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2437         /* NOP */
2438         return;
2439     }
2440     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2441                    uimm << 16);
2442 }
2443 
2444 /* xori */
2445 static void gen_xori(DisasContext *ctx)
2446 {
2447     target_ulong uimm = UIMM(ctx->opcode);
2448 
2449     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2450         /* NOP */
2451         return;
2452     }
2453     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2454 }
2455 
2456 /* xoris */
2457 static void gen_xoris(DisasContext *ctx)
2458 {
2459     target_ulong uimm = UIMM(ctx->opcode);
2460 
2461     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2462         /* NOP */
2463         return;
2464     }
2465     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2466                     uimm << 16);
2467 }
2468 
2469 /* popcntb : PowerPC 2.03 specification */
2470 static void gen_popcntb(DisasContext *ctx)
2471 {
2472     gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2473 }
2474 
2475 static void gen_popcntw(DisasContext *ctx)
2476 {
2477 #if defined(TARGET_PPC64)
2478     gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2479 #else
2480     tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2481 #endif
2482 }
2483 
2484 #if defined(TARGET_PPC64)
2485 /* popcntd: PowerPC 2.06 specification */
2486 static void gen_popcntd(DisasContext *ctx)
2487 {
2488     tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2489 }
2490 #endif
2491 
2492 /* prtyw: PowerPC 2.05 specification */
2493 static void gen_prtyw(DisasContext *ctx)
2494 {
2495     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2496     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2497     TCGv t0 = tcg_temp_new();
2498     tcg_gen_shri_tl(t0, rs, 16);
2499     tcg_gen_xor_tl(ra, rs, t0);
2500     tcg_gen_shri_tl(t0, ra, 8);
2501     tcg_gen_xor_tl(ra, ra, t0);
2502     tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2503     tcg_temp_free(t0);
2504 }
2505 
2506 #if defined(TARGET_PPC64)
2507 /* prtyd: PowerPC 2.05 specification */
2508 static void gen_prtyd(DisasContext *ctx)
2509 {
2510     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2511     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2512     TCGv t0 = tcg_temp_new();
2513     tcg_gen_shri_tl(t0, rs, 32);
2514     tcg_gen_xor_tl(ra, rs, t0);
2515     tcg_gen_shri_tl(t0, ra, 16);
2516     tcg_gen_xor_tl(ra, ra, t0);
2517     tcg_gen_shri_tl(t0, ra, 8);
2518     tcg_gen_xor_tl(ra, ra, t0);
2519     tcg_gen_andi_tl(ra, ra, 1);
2520     tcg_temp_free(t0);
2521 }
2522 #endif
2523 
2524 #if defined(TARGET_PPC64)
2525 /* bpermd */
2526 static void gen_bpermd(DisasContext *ctx)
2527 {
2528     gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2529                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2530 }
2531 #endif
2532 
2533 #if defined(TARGET_PPC64)
2534 /* extsw & extsw. */
2535 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2536 
2537 /* cntlzd */
2538 static void gen_cntlzd(DisasContext *ctx)
2539 {
2540     tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2541     if (unlikely(Rc(ctx->opcode) != 0)) {
2542         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2543     }
2544 }
2545 
2546 /* cnttzd */
2547 static void gen_cnttzd(DisasContext *ctx)
2548 {
2549     tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2550     if (unlikely(Rc(ctx->opcode) != 0)) {
2551         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2552     }
2553 }
2554 
2555 /* darn */
2556 static void gen_darn(DisasContext *ctx)
2557 {
2558     int l = L(ctx->opcode);
2559 
2560     if (l > 2) {
2561         tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2562     } else {
2563         gen_icount_io_start(ctx);
2564         if (l == 0) {
2565             gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2566         } else {
2567             /* Return 64-bit random for both CRN and RRN */
2568             gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2569         }
2570     }
2571 }
2572 #endif
2573 
2574 /***                             Integer rotate                            ***/
2575 
2576 /* rlwimi & rlwimi. */
2577 static void gen_rlwimi(DisasContext *ctx)
2578 {
2579     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2580     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2581     uint32_t sh = SH(ctx->opcode);
2582     uint32_t mb = MB(ctx->opcode);
2583     uint32_t me = ME(ctx->opcode);
2584 
2585     if (sh == (31 - me) && mb <= me) {
2586         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2587     } else {
2588         target_ulong mask;
2589         bool mask_in_32b = true;
2590         TCGv t1;
2591 
2592 #if defined(TARGET_PPC64)
2593         mb += 32;
2594         me += 32;
2595 #endif
2596         mask = MASK(mb, me);
2597 
2598 #if defined(TARGET_PPC64)
2599         if (mask > 0xffffffffu) {
2600             mask_in_32b = false;
2601         }
2602 #endif
2603         t1 = tcg_temp_new();
2604         if (mask_in_32b) {
2605             TCGv_i32 t0 = tcg_temp_new_i32();
2606             tcg_gen_trunc_tl_i32(t0, t_rs);
2607             tcg_gen_rotli_i32(t0, t0, sh);
2608             tcg_gen_extu_i32_tl(t1, t0);
2609             tcg_temp_free_i32(t0);
2610         } else {
2611 #if defined(TARGET_PPC64)
2612             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2613             tcg_gen_rotli_i64(t1, t1, sh);
2614 #else
2615             g_assert_not_reached();
2616 #endif
2617         }
2618 
2619         tcg_gen_andi_tl(t1, t1, mask);
2620         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2621         tcg_gen_or_tl(t_ra, t_ra, t1);
2622         tcg_temp_free(t1);
2623     }
2624     if (unlikely(Rc(ctx->opcode) != 0)) {
2625         gen_set_Rc0(ctx, t_ra);
2626     }
2627 }
2628 
2629 /* rlwinm & rlwinm. */
2630 static void gen_rlwinm(DisasContext *ctx)
2631 {
2632     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2633     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2634     int sh = SH(ctx->opcode);
2635     int mb = MB(ctx->opcode);
2636     int me = ME(ctx->opcode);
2637     int len = me - mb + 1;
2638     int rsh = (32 - sh) & 31;
2639 
2640     if (sh != 0 && len > 0 && me == (31 - sh)) {
2641         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2642     } else if (me == 31 && rsh + len <= 32) {
2643         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2644     } else {
2645         target_ulong mask;
2646         bool mask_in_32b = true;
2647 #if defined(TARGET_PPC64)
2648         mb += 32;
2649         me += 32;
2650 #endif
2651         mask = MASK(mb, me);
2652 #if defined(TARGET_PPC64)
2653         if (mask > 0xffffffffu) {
2654             mask_in_32b = false;
2655         }
2656 #endif
2657         if (mask_in_32b) {
2658             if (sh == 0) {
2659                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2660             } else {
2661                 TCGv_i32 t0 = tcg_temp_new_i32();
2662                 tcg_gen_trunc_tl_i32(t0, t_rs);
2663                 tcg_gen_rotli_i32(t0, t0, sh);
2664                 tcg_gen_andi_i32(t0, t0, mask);
2665                 tcg_gen_extu_i32_tl(t_ra, t0);
2666                 tcg_temp_free_i32(t0);
2667             }
2668         } else {
2669 #if defined(TARGET_PPC64)
2670             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2671             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2672             tcg_gen_andi_i64(t_ra, t_ra, mask);
2673 #else
2674             g_assert_not_reached();
2675 #endif
2676         }
2677     }
2678     if (unlikely(Rc(ctx->opcode) != 0)) {
2679         gen_set_Rc0(ctx, t_ra);
2680     }
2681 }
2682 
2683 /* rlwnm & rlwnm. */
2684 static void gen_rlwnm(DisasContext *ctx)
2685 {
2686     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2687     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2688     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2689     uint32_t mb = MB(ctx->opcode);
2690     uint32_t me = ME(ctx->opcode);
2691     target_ulong mask;
2692     bool mask_in_32b = true;
2693 
2694 #if defined(TARGET_PPC64)
2695     mb += 32;
2696     me += 32;
2697 #endif
2698     mask = MASK(mb, me);
2699 
2700 #if defined(TARGET_PPC64)
2701     if (mask > 0xffffffffu) {
2702         mask_in_32b = false;
2703     }
2704 #endif
2705     if (mask_in_32b) {
2706         TCGv_i32 t0 = tcg_temp_new_i32();
2707         TCGv_i32 t1 = tcg_temp_new_i32();
2708         tcg_gen_trunc_tl_i32(t0, t_rb);
2709         tcg_gen_trunc_tl_i32(t1, t_rs);
2710         tcg_gen_andi_i32(t0, t0, 0x1f);
2711         tcg_gen_rotl_i32(t1, t1, t0);
2712         tcg_gen_extu_i32_tl(t_ra, t1);
2713         tcg_temp_free_i32(t0);
2714         tcg_temp_free_i32(t1);
2715     } else {
2716 #if defined(TARGET_PPC64)
2717         TCGv_i64 t0 = tcg_temp_new_i64();
2718         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2719         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2720         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2721         tcg_temp_free_i64(t0);
2722 #else
2723         g_assert_not_reached();
2724 #endif
2725     }
2726 
2727     tcg_gen_andi_tl(t_ra, t_ra, mask);
2728 
2729     if (unlikely(Rc(ctx->opcode) != 0)) {
2730         gen_set_Rc0(ctx, t_ra);
2731     }
2732 }
2733 
2734 #if defined(TARGET_PPC64)
2735 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2736 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2737 {                                                                             \
2738     gen_##name(ctx, 0);                                                       \
2739 }                                                                             \
2740                                                                               \
2741 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2742 {                                                                             \
2743     gen_##name(ctx, 1);                                                       \
2744 }
2745 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2746 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2747 {                                                                             \
2748     gen_##name(ctx, 0, 0);                                                    \
2749 }                                                                             \
2750                                                                               \
2751 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2752 {                                                                             \
2753     gen_##name(ctx, 0, 1);                                                    \
2754 }                                                                             \
2755                                                                               \
2756 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2757 {                                                                             \
2758     gen_##name(ctx, 1, 0);                                                    \
2759 }                                                                             \
2760                                                                               \
2761 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2762 {                                                                             \
2763     gen_##name(ctx, 1, 1);                                                    \
2764 }
2765 
2766 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2767 {
2768     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2769     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2770     int len = me - mb + 1;
2771     int rsh = (64 - sh) & 63;
2772 
2773     if (sh != 0 && len > 0 && me == (63 - sh)) {
2774         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2775     } else if (me == 63 && rsh + len <= 64) {
2776         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2777     } else {
2778         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2779         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2780     }
2781     if (unlikely(Rc(ctx->opcode) != 0)) {
2782         gen_set_Rc0(ctx, t_ra);
2783     }
2784 }
2785 
2786 /* rldicl - rldicl. */
2787 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2788 {
2789     uint32_t sh, mb;
2790 
2791     sh = SH(ctx->opcode) | (shn << 5);
2792     mb = MB(ctx->opcode) | (mbn << 5);
2793     gen_rldinm(ctx, mb, 63, sh);
2794 }
2795 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2796 
2797 /* rldicr - rldicr. */
2798 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2799 {
2800     uint32_t sh, me;
2801 
2802     sh = SH(ctx->opcode) | (shn << 5);
2803     me = MB(ctx->opcode) | (men << 5);
2804     gen_rldinm(ctx, 0, me, sh);
2805 }
2806 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2807 
2808 /* rldic - rldic. */
2809 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2810 {
2811     uint32_t sh, mb;
2812 
2813     sh = SH(ctx->opcode) | (shn << 5);
2814     mb = MB(ctx->opcode) | (mbn << 5);
2815     gen_rldinm(ctx, mb, 63 - sh, sh);
2816 }
2817 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2818 
2819 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2820 {
2821     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2822     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2823     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2824     TCGv t0;
2825 
2826     t0 = tcg_temp_new();
2827     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2828     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2829     tcg_temp_free(t0);
2830 
2831     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2832     if (unlikely(Rc(ctx->opcode) != 0)) {
2833         gen_set_Rc0(ctx, t_ra);
2834     }
2835 }
2836 
2837 /* rldcl - rldcl. */
2838 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2839 {
2840     uint32_t mb;
2841 
2842     mb = MB(ctx->opcode) | (mbn << 5);
2843     gen_rldnm(ctx, mb, 63);
2844 }
2845 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2846 
2847 /* rldcr - rldcr. */
2848 static inline void gen_rldcr(DisasContext *ctx, int men)
2849 {
2850     uint32_t me;
2851 
2852     me = MB(ctx->opcode) | (men << 5);
2853     gen_rldnm(ctx, 0, me);
2854 }
2855 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2856 
2857 /* rldimi - rldimi. */
2858 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2859 {
2860     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2861     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2862     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2863     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2864     uint32_t me = 63 - sh;
2865 
2866     if (mb <= me) {
2867         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2868     } else {
2869         target_ulong mask = MASK(mb, me);
2870         TCGv t1 = tcg_temp_new();
2871 
2872         tcg_gen_rotli_tl(t1, t_rs, sh);
2873         tcg_gen_andi_tl(t1, t1, mask);
2874         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2875         tcg_gen_or_tl(t_ra, t_ra, t1);
2876         tcg_temp_free(t1);
2877     }
2878     if (unlikely(Rc(ctx->opcode) != 0)) {
2879         gen_set_Rc0(ctx, t_ra);
2880     }
2881 }
2882 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2883 #endif
2884 
2885 /***                             Integer shift                             ***/
2886 
2887 /* slw & slw. */
2888 static void gen_slw(DisasContext *ctx)
2889 {
2890     TCGv t0, t1;
2891 
2892     t0 = tcg_temp_new();
2893     /* AND rS with a mask that is 0 when rB >= 0x20 */
2894 #if defined(TARGET_PPC64)
2895     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2896     tcg_gen_sari_tl(t0, t0, 0x3f);
2897 #else
2898     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2899     tcg_gen_sari_tl(t0, t0, 0x1f);
2900 #endif
2901     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2902     t1 = tcg_temp_new();
2903     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2904     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2905     tcg_temp_free(t1);
2906     tcg_temp_free(t0);
2907     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2908     if (unlikely(Rc(ctx->opcode) != 0)) {
2909         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2910     }
2911 }
2912 
2913 /* sraw & sraw. */
2914 static void gen_sraw(DisasContext *ctx)
2915 {
2916     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2917                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2918     if (unlikely(Rc(ctx->opcode) != 0)) {
2919         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2920     }
2921 }
2922 
2923 /* srawi & srawi. */
2924 static void gen_srawi(DisasContext *ctx)
2925 {
2926     int sh = SH(ctx->opcode);
2927     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2928     TCGv src = cpu_gpr[rS(ctx->opcode)];
2929     if (sh == 0) {
2930         tcg_gen_ext32s_tl(dst, src);
2931         tcg_gen_movi_tl(cpu_ca, 0);
2932         if (is_isa300(ctx)) {
2933             tcg_gen_movi_tl(cpu_ca32, 0);
2934         }
2935     } else {
2936         TCGv t0;
2937         tcg_gen_ext32s_tl(dst, src);
2938         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2939         t0 = tcg_temp_new();
2940         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2941         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2942         tcg_temp_free(t0);
2943         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2944         if (is_isa300(ctx)) {
2945             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2946         }
2947         tcg_gen_sari_tl(dst, dst, sh);
2948     }
2949     if (unlikely(Rc(ctx->opcode) != 0)) {
2950         gen_set_Rc0(ctx, dst);
2951     }
2952 }
2953 
2954 /* srw & srw. */
2955 static void gen_srw(DisasContext *ctx)
2956 {
2957     TCGv t0, t1;
2958 
2959     t0 = tcg_temp_new();
2960     /* AND rS with a mask that is 0 when rB >= 0x20 */
2961 #if defined(TARGET_PPC64)
2962     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2963     tcg_gen_sari_tl(t0, t0, 0x3f);
2964 #else
2965     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2966     tcg_gen_sari_tl(t0, t0, 0x1f);
2967 #endif
2968     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2969     tcg_gen_ext32u_tl(t0, t0);
2970     t1 = tcg_temp_new();
2971     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2972     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2973     tcg_temp_free(t1);
2974     tcg_temp_free(t0);
2975     if (unlikely(Rc(ctx->opcode) != 0)) {
2976         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2977     }
2978 }
2979 
2980 #if defined(TARGET_PPC64)
2981 /* sld & sld. */
2982 static void gen_sld(DisasContext *ctx)
2983 {
2984     TCGv t0, t1;
2985 
2986     t0 = tcg_temp_new();
2987     /* AND rS with a mask that is 0 when rB >= 0x40 */
2988     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2989     tcg_gen_sari_tl(t0, t0, 0x3f);
2990     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2991     t1 = tcg_temp_new();
2992     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2993     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2994     tcg_temp_free(t1);
2995     tcg_temp_free(t0);
2996     if (unlikely(Rc(ctx->opcode) != 0)) {
2997         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2998     }
2999 }
3000 
3001 /* srad & srad. */
3002 static void gen_srad(DisasContext *ctx)
3003 {
3004     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3005                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3006     if (unlikely(Rc(ctx->opcode) != 0)) {
3007         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3008     }
3009 }
3010 /* sradi & sradi. */
3011 static inline void gen_sradi(DisasContext *ctx, int n)
3012 {
3013     int sh = SH(ctx->opcode) + (n << 5);
3014     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3015     TCGv src = cpu_gpr[rS(ctx->opcode)];
3016     if (sh == 0) {
3017         tcg_gen_mov_tl(dst, src);
3018         tcg_gen_movi_tl(cpu_ca, 0);
3019         if (is_isa300(ctx)) {
3020             tcg_gen_movi_tl(cpu_ca32, 0);
3021         }
3022     } else {
3023         TCGv t0;
3024         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3025         t0 = tcg_temp_new();
3026         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3027         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3028         tcg_temp_free(t0);
3029         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3030         if (is_isa300(ctx)) {
3031             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3032         }
3033         tcg_gen_sari_tl(dst, src, sh);
3034     }
3035     if (unlikely(Rc(ctx->opcode) != 0)) {
3036         gen_set_Rc0(ctx, dst);
3037     }
3038 }
3039 
3040 static void gen_sradi0(DisasContext *ctx)
3041 {
3042     gen_sradi(ctx, 0);
3043 }
3044 
3045 static void gen_sradi1(DisasContext *ctx)
3046 {
3047     gen_sradi(ctx, 1);
3048 }
3049 
3050 /* extswsli & extswsli. */
3051 static inline void gen_extswsli(DisasContext *ctx, int n)
3052 {
3053     int sh = SH(ctx->opcode) + (n << 5);
3054     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3055     TCGv src = cpu_gpr[rS(ctx->opcode)];
3056 
3057     tcg_gen_ext32s_tl(dst, src);
3058     tcg_gen_shli_tl(dst, dst, sh);
3059     if (unlikely(Rc(ctx->opcode) != 0)) {
3060         gen_set_Rc0(ctx, dst);
3061     }
3062 }
3063 
3064 static void gen_extswsli0(DisasContext *ctx)
3065 {
3066     gen_extswsli(ctx, 0);
3067 }
3068 
3069 static void gen_extswsli1(DisasContext *ctx)
3070 {
3071     gen_extswsli(ctx, 1);
3072 }
3073 
3074 /* srd & srd. */
3075 static void gen_srd(DisasContext *ctx)
3076 {
3077     TCGv t0, t1;
3078 
3079     t0 = tcg_temp_new();
3080     /* AND rS with a mask that is 0 when rB >= 0x40 */
3081     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3082     tcg_gen_sari_tl(t0, t0, 0x3f);
3083     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3084     t1 = tcg_temp_new();
3085     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3086     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3087     tcg_temp_free(t1);
3088     tcg_temp_free(t0);
3089     if (unlikely(Rc(ctx->opcode) != 0)) {
3090         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3091     }
3092 }
3093 #endif
3094 
3095 /***                           Addressing modes                            ***/
3096 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3097 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3098                                       target_long maskl)
3099 {
3100     target_long simm = SIMM(ctx->opcode);
3101 
3102     simm &= ~maskl;
3103     if (rA(ctx->opcode) == 0) {
3104         if (NARROW_MODE(ctx)) {
3105             simm = (uint32_t)simm;
3106         }
3107         tcg_gen_movi_tl(EA, simm);
3108     } else if (likely(simm != 0)) {
3109         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3110         if (NARROW_MODE(ctx)) {
3111             tcg_gen_ext32u_tl(EA, EA);
3112         }
3113     } else {
3114         if (NARROW_MODE(ctx)) {
3115             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3116         } else {
3117             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3118         }
3119     }
3120 }
3121 
3122 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3123 {
3124     if (rA(ctx->opcode) == 0) {
3125         if (NARROW_MODE(ctx)) {
3126             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3127         } else {
3128             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3129         }
3130     } else {
3131         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3132         if (NARROW_MODE(ctx)) {
3133             tcg_gen_ext32u_tl(EA, EA);
3134         }
3135     }
3136 }
3137 
3138 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3139 {
3140     if (rA(ctx->opcode) == 0) {
3141         tcg_gen_movi_tl(EA, 0);
3142     } else if (NARROW_MODE(ctx)) {
3143         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3144     } else {
3145         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3146     }
3147 }
3148 
3149 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3150                                 target_long val)
3151 {
3152     tcg_gen_addi_tl(ret, arg1, val);
3153     if (NARROW_MODE(ctx)) {
3154         tcg_gen_ext32u_tl(ret, ret);
3155     }
3156 }
3157 
3158 static inline void gen_align_no_le(DisasContext *ctx)
3159 {
3160     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3161                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3162 }
3163 
3164 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
3165 {
3166     TCGv ea = tcg_temp_new();
3167     if (ra) {
3168         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
3169     } else {
3170         tcg_gen_mov_tl(ea, displ);
3171     }
3172     if (NARROW_MODE(ctx)) {
3173         tcg_gen_ext32u_tl(ea, ea);
3174     }
3175     return ea;
3176 }
3177 
3178 /***                             Integer load                              ***/
3179 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3180 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3181 
3182 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
3183 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
3184                                   TCGv val,                             \
3185                                   TCGv addr)                            \
3186 {                                                                       \
3187     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
3188 }
3189 
3190 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
3191 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3192 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3193 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3194 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3195 
3196 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3197 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3198 
3199 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
3200 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
3201                                              TCGv_i64 val,          \
3202                                              TCGv addr)             \
3203 {                                                                   \
3204     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
3205 }
3206 
3207 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
3208 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3209 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3210 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3211 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
3212 
3213 #if defined(TARGET_PPC64)
3214 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
3215 #endif
3216 
3217 #define GEN_QEMU_STORE_TL(stop, op)                                     \
3218 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
3219                                   TCGv val,                             \
3220                                   TCGv addr)                            \
3221 {                                                                       \
3222     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
3223 }
3224 
3225 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
3226 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
3227 #endif
3228 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3229 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3230 
3231 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3232 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3233 
3234 #define GEN_QEMU_STORE_64(stop, op)                               \
3235 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
3236                                               TCGv_i64 val,       \
3237                                               TCGv addr)          \
3238 {                                                                 \
3239     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
3240 }
3241 
3242 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
3243 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3244 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3245 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
3246 
3247 #if defined(TARGET_PPC64)
3248 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
3249 #endif
3250 
3251 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
3252 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3253 {                                                                             \
3254     TCGv EA;                                                                  \
3255     chk;                                                                      \
3256     gen_set_access_type(ctx, ACCESS_INT);                                     \
3257     EA = tcg_temp_new();                                                      \
3258     gen_addr_reg_index(ctx, EA);                                              \
3259     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
3260     tcg_temp_free(EA);                                                        \
3261 }
3262 
3263 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
3264     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3265 
3266 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
3267     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3268 
3269 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
3270 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3271 {                                                                             \
3272     TCGv EA;                                                                  \
3273     CHK_SV;                                                                   \
3274     gen_set_access_type(ctx, ACCESS_INT);                                     \
3275     EA = tcg_temp_new();                                                      \
3276     gen_addr_reg_index(ctx, EA);                                              \
3277     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3278     tcg_temp_free(EA);                                                        \
3279 }
3280 
3281 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3282 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3283 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3284 #if defined(TARGET_PPC64)
3285 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
3286 #endif
3287 
3288 #if defined(TARGET_PPC64)
3289 /* CI load/store variants */
3290 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3291 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3292 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3293 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3294 #endif
3295 
3296 /***                              Integer store                            ***/
3297 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
3298 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3299 {                                                                             \
3300     TCGv EA;                                                                  \
3301     chk;                                                                      \
3302     gen_set_access_type(ctx, ACCESS_INT);                                     \
3303     EA = tcg_temp_new();                                                      \
3304     gen_addr_reg_index(ctx, EA);                                              \
3305     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
3306     tcg_temp_free(EA);                                                        \
3307 }
3308 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
3309     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3310 
3311 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
3312     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3313 
3314 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
3315 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3316 {                                                                             \
3317     TCGv EA;                                                                  \
3318     CHK_SV;                                                                   \
3319     gen_set_access_type(ctx, ACCESS_INT);                                     \
3320     EA = tcg_temp_new();                                                      \
3321     gen_addr_reg_index(ctx, EA);                                              \
3322     tcg_gen_qemu_st_tl(                                                       \
3323         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
3324     tcg_temp_free(EA);                                                        \
3325 }
3326 
3327 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3328 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3329 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3330 #if defined(TARGET_PPC64)
3331 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
3332 #endif
3333 
3334 #if defined(TARGET_PPC64)
3335 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3336 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3337 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3338 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3339 #endif
3340 /***                Integer load and store with byte reverse               ***/
3341 
3342 /* lhbrx */
3343 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3344 
3345 /* lwbrx */
3346 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3347 
3348 #if defined(TARGET_PPC64)
3349 /* ldbrx */
3350 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3351 /* stdbrx */
3352 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3353 #endif  /* TARGET_PPC64 */
3354 
3355 /* sthbrx */
3356 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3357 /* stwbrx */
3358 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3359 
3360 /***                    Integer load and store multiple                    ***/
3361 
3362 /* lmw */
3363 static void gen_lmw(DisasContext *ctx)
3364 {
3365     TCGv t0;
3366     TCGv_i32 t1;
3367 
3368     if (ctx->le_mode) {
3369         gen_align_no_le(ctx);
3370         return;
3371     }
3372     gen_set_access_type(ctx, ACCESS_INT);
3373     t0 = tcg_temp_new();
3374     t1 = tcg_const_i32(rD(ctx->opcode));
3375     gen_addr_imm_index(ctx, t0, 0);
3376     gen_helper_lmw(cpu_env, t0, t1);
3377     tcg_temp_free(t0);
3378     tcg_temp_free_i32(t1);
3379 }
3380 
3381 /* stmw */
3382 static void gen_stmw(DisasContext *ctx)
3383 {
3384     TCGv t0;
3385     TCGv_i32 t1;
3386 
3387     if (ctx->le_mode) {
3388         gen_align_no_le(ctx);
3389         return;
3390     }
3391     gen_set_access_type(ctx, ACCESS_INT);
3392     t0 = tcg_temp_new();
3393     t1 = tcg_const_i32(rS(ctx->opcode));
3394     gen_addr_imm_index(ctx, t0, 0);
3395     gen_helper_stmw(cpu_env, t0, t1);
3396     tcg_temp_free(t0);
3397     tcg_temp_free_i32(t1);
3398 }
3399 
3400 /***                    Integer load and store strings                     ***/
3401 
3402 /* lswi */
3403 /*
3404  * PowerPC32 specification says we must generate an exception if rA is
3405  * in the range of registers to be loaded.  In an other hand, IBM says
3406  * this is valid, but rA won't be loaded.  For now, I'll follow the
3407  * spec...
3408  */
3409 static void gen_lswi(DisasContext *ctx)
3410 {
3411     TCGv t0;
3412     TCGv_i32 t1, t2;
3413     int nb = NB(ctx->opcode);
3414     int start = rD(ctx->opcode);
3415     int ra = rA(ctx->opcode);
3416     int nr;
3417 
3418     if (ctx->le_mode) {
3419         gen_align_no_le(ctx);
3420         return;
3421     }
3422     if (nb == 0) {
3423         nb = 32;
3424     }
3425     nr = DIV_ROUND_UP(nb, 4);
3426     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3427         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3428         return;
3429     }
3430     gen_set_access_type(ctx, ACCESS_INT);
3431     t0 = tcg_temp_new();
3432     gen_addr_register(ctx, t0);
3433     t1 = tcg_const_i32(nb);
3434     t2 = tcg_const_i32(start);
3435     gen_helper_lsw(cpu_env, t0, t1, t2);
3436     tcg_temp_free(t0);
3437     tcg_temp_free_i32(t1);
3438     tcg_temp_free_i32(t2);
3439 }
3440 
3441 /* lswx */
3442 static void gen_lswx(DisasContext *ctx)
3443 {
3444     TCGv t0;
3445     TCGv_i32 t1, t2, t3;
3446 
3447     if (ctx->le_mode) {
3448         gen_align_no_le(ctx);
3449         return;
3450     }
3451     gen_set_access_type(ctx, ACCESS_INT);
3452     t0 = tcg_temp_new();
3453     gen_addr_reg_index(ctx, t0);
3454     t1 = tcg_const_i32(rD(ctx->opcode));
3455     t2 = tcg_const_i32(rA(ctx->opcode));
3456     t3 = tcg_const_i32(rB(ctx->opcode));
3457     gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3458     tcg_temp_free(t0);
3459     tcg_temp_free_i32(t1);
3460     tcg_temp_free_i32(t2);
3461     tcg_temp_free_i32(t3);
3462 }
3463 
3464 /* stswi */
3465 static void gen_stswi(DisasContext *ctx)
3466 {
3467     TCGv t0;
3468     TCGv_i32 t1, t2;
3469     int nb = NB(ctx->opcode);
3470 
3471     if (ctx->le_mode) {
3472         gen_align_no_le(ctx);
3473         return;
3474     }
3475     gen_set_access_type(ctx, ACCESS_INT);
3476     t0 = tcg_temp_new();
3477     gen_addr_register(ctx, t0);
3478     if (nb == 0) {
3479         nb = 32;
3480     }
3481     t1 = tcg_const_i32(nb);
3482     t2 = tcg_const_i32(rS(ctx->opcode));
3483     gen_helper_stsw(cpu_env, t0, t1, t2);
3484     tcg_temp_free(t0);
3485     tcg_temp_free_i32(t1);
3486     tcg_temp_free_i32(t2);
3487 }
3488 
3489 /* stswx */
3490 static void gen_stswx(DisasContext *ctx)
3491 {
3492     TCGv t0;
3493     TCGv_i32 t1, t2;
3494 
3495     if (ctx->le_mode) {
3496         gen_align_no_le(ctx);
3497         return;
3498     }
3499     gen_set_access_type(ctx, ACCESS_INT);
3500     t0 = tcg_temp_new();
3501     gen_addr_reg_index(ctx, t0);
3502     t1 = tcg_temp_new_i32();
3503     tcg_gen_trunc_tl_i32(t1, cpu_xer);
3504     tcg_gen_andi_i32(t1, t1, 0x7F);
3505     t2 = tcg_const_i32(rS(ctx->opcode));
3506     gen_helper_stsw(cpu_env, t0, t1, t2);
3507     tcg_temp_free(t0);
3508     tcg_temp_free_i32(t1);
3509     tcg_temp_free_i32(t2);
3510 }
3511 
3512 /***                        Memory synchronisation                         ***/
3513 /* eieio */
3514 static void gen_eieio(DisasContext *ctx)
3515 {
3516     TCGBar bar = TCG_MO_LD_ST;
3517 
3518     /*
3519      * POWER9 has a eieio instruction variant using bit 6 as a hint to
3520      * tell the CPU it is a store-forwarding barrier.
3521      */
3522     if (ctx->opcode & 0x2000000) {
3523         /*
3524          * ISA says that "Reserved fields in instructions are ignored
3525          * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3526          * as this is not an instruction software should be using,
3527          * complain to the user.
3528          */
3529         if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3530             qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3531                           TARGET_FMT_lx "\n", ctx->cia);
3532         } else {
3533             bar = TCG_MO_ST_LD;
3534         }
3535     }
3536 
3537     tcg_gen_mb(bar | TCG_BAR_SC);
3538 }
3539 
3540 #if !defined(CONFIG_USER_ONLY)
3541 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3542 {
3543     TCGv_i32 t;
3544     TCGLabel *l;
3545 
3546     if (!ctx->lazy_tlb_flush) {
3547         return;
3548     }
3549     l = gen_new_label();
3550     t = tcg_temp_new_i32();
3551     tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3552     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3553     if (global) {
3554         gen_helper_check_tlb_flush_global(cpu_env);
3555     } else {
3556         gen_helper_check_tlb_flush_local(cpu_env);
3557     }
3558     gen_set_label(l);
3559     tcg_temp_free_i32(t);
3560 }
3561 #else
3562 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3563 #endif
3564 
3565 /* isync */
3566 static void gen_isync(DisasContext *ctx)
3567 {
3568     /*
3569      * We need to check for a pending TLB flush. This can only happen in
3570      * kernel mode however so check MSR_PR
3571      */
3572     if (!ctx->pr) {
3573         gen_check_tlb_flush(ctx, false);
3574     }
3575     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3576     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3577 }
3578 
3579 #define MEMOP_GET_SIZE(x)  (1 << ((x) & MO_SIZE))
3580 
3581 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3582 {
3583     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3584     TCGv t0 = tcg_temp_new();
3585 
3586     gen_set_access_type(ctx, ACCESS_RES);
3587     gen_addr_reg_index(ctx, t0);
3588     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3589     tcg_gen_mov_tl(cpu_reserve, t0);
3590     tcg_gen_mov_tl(cpu_reserve_val, gpr);
3591     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3592     tcg_temp_free(t0);
3593 }
3594 
3595 #define LARX(name, memop)                  \
3596 static void gen_##name(DisasContext *ctx)  \
3597 {                                          \
3598     gen_load_locked(ctx, memop);           \
3599 }
3600 
3601 /* lwarx */
3602 LARX(lbarx, DEF_MEMOP(MO_UB))
3603 LARX(lharx, DEF_MEMOP(MO_UW))
3604 LARX(lwarx, DEF_MEMOP(MO_UL))
3605 
3606 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
3607                                       TCGv EA, TCGCond cond, int addend)
3608 {
3609     TCGv t = tcg_temp_new();
3610     TCGv t2 = tcg_temp_new();
3611     TCGv u = tcg_temp_new();
3612 
3613     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3614     tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
3615     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3616     tcg_gen_addi_tl(u, t, addend);
3617 
3618     /* E.g. for fetch and increment bounded... */
3619     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3620     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3621     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3622 
3623     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3624     tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
3625     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
3626 
3627     tcg_temp_free(t);
3628     tcg_temp_free(t2);
3629     tcg_temp_free(u);
3630 }
3631 
3632 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3633 {
3634     uint32_t gpr_FC = FC(ctx->opcode);
3635     TCGv EA = tcg_temp_new();
3636     int rt = rD(ctx->opcode);
3637     bool need_serial;
3638     TCGv src, dst;
3639 
3640     gen_addr_register(ctx, EA);
3641     dst = cpu_gpr[rt];
3642     src = cpu_gpr[(rt + 1) & 31];
3643 
3644     need_serial = false;
3645     memop |= MO_ALIGN;
3646     switch (gpr_FC) {
3647     case 0: /* Fetch and add */
3648         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3649         break;
3650     case 1: /* Fetch and xor */
3651         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3652         break;
3653     case 2: /* Fetch and or */
3654         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3655         break;
3656     case 3: /* Fetch and 'and' */
3657         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3658         break;
3659     case 4:  /* Fetch and max unsigned */
3660         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3661         break;
3662     case 5:  /* Fetch and max signed */
3663         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3664         break;
3665     case 6:  /* Fetch and min unsigned */
3666         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3667         break;
3668     case 7:  /* Fetch and min signed */
3669         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3670         break;
3671     case 8: /* Swap */
3672         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3673         break;
3674 
3675     case 16: /* Compare and swap not equal */
3676         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3677             need_serial = true;
3678         } else {
3679             TCGv t0 = tcg_temp_new();
3680             TCGv t1 = tcg_temp_new();
3681 
3682             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3683             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3684                 tcg_gen_mov_tl(t1, src);
3685             } else {
3686                 tcg_gen_ext32u_tl(t1, src);
3687             }
3688             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3689                                cpu_gpr[(rt + 2) & 31], t0);
3690             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3691             tcg_gen_mov_tl(dst, t0);
3692 
3693             tcg_temp_free(t0);
3694             tcg_temp_free(t1);
3695         }
3696         break;
3697 
3698     case 24: /* Fetch and increment bounded */
3699         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3700             need_serial = true;
3701         } else {
3702             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3703         }
3704         break;
3705     case 25: /* Fetch and increment equal */
3706         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3707             need_serial = true;
3708         } else {
3709             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3710         }
3711         break;
3712     case 28: /* Fetch and decrement bounded */
3713         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3714             need_serial = true;
3715         } else {
3716             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3717         }
3718         break;
3719 
3720     default:
3721         /* invoke data storage error handler */
3722         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3723     }
3724     tcg_temp_free(EA);
3725 
3726     if (need_serial) {
3727         /* Restart with exclusive lock.  */
3728         gen_helper_exit_atomic(cpu_env);
3729         ctx->base.is_jmp = DISAS_NORETURN;
3730     }
3731 }
3732 
3733 static void gen_lwat(DisasContext *ctx)
3734 {
3735     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3736 }
3737 
3738 #ifdef TARGET_PPC64
3739 static void gen_ldat(DisasContext *ctx)
3740 {
3741     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3742 }
3743 #endif
3744 
3745 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3746 {
3747     uint32_t gpr_FC = FC(ctx->opcode);
3748     TCGv EA = tcg_temp_new();
3749     TCGv src, discard;
3750 
3751     gen_addr_register(ctx, EA);
3752     src = cpu_gpr[rD(ctx->opcode)];
3753     discard = tcg_temp_new();
3754 
3755     memop |= MO_ALIGN;
3756     switch (gpr_FC) {
3757     case 0: /* add and Store */
3758         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3759         break;
3760     case 1: /* xor and Store */
3761         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3762         break;
3763     case 2: /* Or and Store */
3764         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3765         break;
3766     case 3: /* 'and' and Store */
3767         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3768         break;
3769     case 4:  /* Store max unsigned */
3770         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3771         break;
3772     case 5:  /* Store max signed */
3773         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3774         break;
3775     case 6:  /* Store min unsigned */
3776         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3777         break;
3778     case 7:  /* Store min signed */
3779         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3780         break;
3781     case 24: /* Store twin  */
3782         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3783             /* Restart with exclusive lock.  */
3784             gen_helper_exit_atomic(cpu_env);
3785             ctx->base.is_jmp = DISAS_NORETURN;
3786         } else {
3787             TCGv t = tcg_temp_new();
3788             TCGv t2 = tcg_temp_new();
3789             TCGv s = tcg_temp_new();
3790             TCGv s2 = tcg_temp_new();
3791             TCGv ea_plus_s = tcg_temp_new();
3792 
3793             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3794             tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
3795             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3796             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3797             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3798             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3799             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3800 
3801             tcg_temp_free(ea_plus_s);
3802             tcg_temp_free(s2);
3803             tcg_temp_free(s);
3804             tcg_temp_free(t2);
3805             tcg_temp_free(t);
3806         }
3807         break;
3808     default:
3809         /* invoke data storage error handler */
3810         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3811     }
3812     tcg_temp_free(discard);
3813     tcg_temp_free(EA);
3814 }
3815 
3816 static void gen_stwat(DisasContext *ctx)
3817 {
3818     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3819 }
3820 
3821 #ifdef TARGET_PPC64
3822 static void gen_stdat(DisasContext *ctx)
3823 {
3824     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3825 }
3826 #endif
3827 
3828 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3829 {
3830     TCGLabel *l1 = gen_new_label();
3831     TCGLabel *l2 = gen_new_label();
3832     TCGv t0 = tcg_temp_new();
3833     int reg = rS(ctx->opcode);
3834 
3835     gen_set_access_type(ctx, ACCESS_RES);
3836     gen_addr_reg_index(ctx, t0);
3837     tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3838     tcg_temp_free(t0);
3839 
3840     t0 = tcg_temp_new();
3841     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3842                               cpu_gpr[reg], ctx->mem_idx,
3843                               DEF_MEMOP(memop) | MO_ALIGN);
3844     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3845     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3846     tcg_gen_or_tl(t0, t0, cpu_so);
3847     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3848     tcg_temp_free(t0);
3849     tcg_gen_br(l2);
3850 
3851     gen_set_label(l1);
3852 
3853     /*
3854      * Address mismatch implies failure.  But we still need to provide
3855      * the memory barrier semantics of the instruction.
3856      */
3857     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3858     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3859 
3860     gen_set_label(l2);
3861     tcg_gen_movi_tl(cpu_reserve, -1);
3862 }
3863 
3864 #define STCX(name, memop)                  \
3865 static void gen_##name(DisasContext *ctx)  \
3866 {                                          \
3867     gen_conditional_store(ctx, memop);     \
3868 }
3869 
3870 STCX(stbcx_, DEF_MEMOP(MO_UB))
3871 STCX(sthcx_, DEF_MEMOP(MO_UW))
3872 STCX(stwcx_, DEF_MEMOP(MO_UL))
3873 
3874 #if defined(TARGET_PPC64)
3875 /* ldarx */
3876 LARX(ldarx, DEF_MEMOP(MO_UQ))
3877 /* stdcx. */
3878 STCX(stdcx_, DEF_MEMOP(MO_UQ))
3879 
3880 /* lqarx */
3881 static void gen_lqarx(DisasContext *ctx)
3882 {
3883     int rd = rD(ctx->opcode);
3884     TCGv EA, hi, lo;
3885 
3886     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3887                  (rd == rB(ctx->opcode)))) {
3888         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3889         return;
3890     }
3891 
3892     gen_set_access_type(ctx, ACCESS_RES);
3893     EA = tcg_temp_new();
3894     gen_addr_reg_index(ctx, EA);
3895 
3896     /* Note that the low part is always in RD+1, even in LE mode.  */
3897     lo = cpu_gpr[rd + 1];
3898     hi = cpu_gpr[rd];
3899 
3900     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3901         if (HAVE_ATOMIC128) {
3902             TCGv_i32 oi = tcg_temp_new_i32();
3903             if (ctx->le_mode) {
3904                 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN,
3905                                                     ctx->mem_idx));
3906                 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3907             } else {
3908                 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN,
3909                                                     ctx->mem_idx));
3910                 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3911             }
3912             tcg_temp_free_i32(oi);
3913             tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3914         } else {
3915             /* Restart with exclusive lock.  */
3916             gen_helper_exit_atomic(cpu_env);
3917             ctx->base.is_jmp = DISAS_NORETURN;
3918             tcg_temp_free(EA);
3919             return;
3920         }
3921     } else if (ctx->le_mode) {
3922         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEUQ | MO_ALIGN_16);
3923         tcg_gen_mov_tl(cpu_reserve, EA);
3924         gen_addr_add(ctx, EA, EA, 8);
3925         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEUQ);
3926     } else {
3927         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEUQ | MO_ALIGN_16);
3928         tcg_gen_mov_tl(cpu_reserve, EA);
3929         gen_addr_add(ctx, EA, EA, 8);
3930         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEUQ);
3931     }
3932     tcg_temp_free(EA);
3933 
3934     tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
3935     tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
3936 }
3937 
3938 /* stqcx. */
3939 static void gen_stqcx_(DisasContext *ctx)
3940 {
3941     int rs = rS(ctx->opcode);
3942     TCGv EA, hi, lo;
3943 
3944     if (unlikely(rs & 1)) {
3945         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3946         return;
3947     }
3948 
3949     gen_set_access_type(ctx, ACCESS_RES);
3950     EA = tcg_temp_new();
3951     gen_addr_reg_index(ctx, EA);
3952 
3953     /* Note that the low part is always in RS+1, even in LE mode.  */
3954     lo = cpu_gpr[rs + 1];
3955     hi = cpu_gpr[rs];
3956 
3957     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3958         if (HAVE_CMPXCHG128) {
3959             TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN);
3960             if (ctx->le_mode) {
3961                 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env,
3962                                              EA, lo, hi, oi);
3963             } else {
3964                 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env,
3965                                              EA, lo, hi, oi);
3966             }
3967             tcg_temp_free_i32(oi);
3968         } else {
3969             /* Restart with exclusive lock.  */
3970             gen_helper_exit_atomic(cpu_env);
3971             ctx->base.is_jmp = DISAS_NORETURN;
3972         }
3973         tcg_temp_free(EA);
3974     } else {
3975         TCGLabel *lab_fail = gen_new_label();
3976         TCGLabel *lab_over = gen_new_label();
3977         TCGv_i64 t0 = tcg_temp_new_i64();
3978         TCGv_i64 t1 = tcg_temp_new_i64();
3979 
3980         tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
3981         tcg_temp_free(EA);
3982 
3983         gen_qemu_ld64_i64(ctx, t0, cpu_reserve);
3984         tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
3985                                      ? offsetof(CPUPPCState, reserve_val2)
3986                                      : offsetof(CPUPPCState, reserve_val)));
3987         tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
3988 
3989         tcg_gen_addi_i64(t0, cpu_reserve, 8);
3990         gen_qemu_ld64_i64(ctx, t0, t0);
3991         tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
3992                                      ? offsetof(CPUPPCState, reserve_val)
3993                                      : offsetof(CPUPPCState, reserve_val2)));
3994         tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
3995 
3996         /* Success */
3997         gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve);
3998         tcg_gen_addi_i64(t0, cpu_reserve, 8);
3999         gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0);
4000 
4001         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4002         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
4003         tcg_gen_br(lab_over);
4004 
4005         gen_set_label(lab_fail);
4006         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4007 
4008         gen_set_label(lab_over);
4009         tcg_gen_movi_tl(cpu_reserve, -1);
4010         tcg_temp_free_i64(t0);
4011         tcg_temp_free_i64(t1);
4012     }
4013 }
4014 #endif /* defined(TARGET_PPC64) */
4015 
4016 /* sync */
4017 static void gen_sync(DisasContext *ctx)
4018 {
4019     uint32_t l = (ctx->opcode >> 21) & 3;
4020 
4021     /*
4022      * We may need to check for a pending TLB flush.
4023      *
4024      * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4025      *
4026      * Additionally, this can only happen in kernel mode however so
4027      * check MSR_PR as well.
4028      */
4029     if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4030         gen_check_tlb_flush(ctx, true);
4031     }
4032     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
4033 }
4034 
4035 /* wait */
4036 static void gen_wait(DisasContext *ctx)
4037 {
4038     TCGv_i32 t0 = tcg_const_i32(1);
4039     tcg_gen_st_i32(t0, cpu_env,
4040                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4041     tcg_temp_free_i32(t0);
4042     /* Stop translation, as the CPU is supposed to sleep from now */
4043     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4044 }
4045 
4046 #if defined(TARGET_PPC64)
4047 static void gen_doze(DisasContext *ctx)
4048 {
4049 #if defined(CONFIG_USER_ONLY)
4050     GEN_PRIV;
4051 #else
4052     TCGv_i32 t;
4053 
4054     CHK_HV;
4055     t = tcg_const_i32(PPC_PM_DOZE);
4056     gen_helper_pminsn(cpu_env, t);
4057     tcg_temp_free_i32(t);
4058     /* Stop translation, as the CPU is supposed to sleep from now */
4059     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4060 #endif /* defined(CONFIG_USER_ONLY) */
4061 }
4062 
4063 static void gen_nap(DisasContext *ctx)
4064 {
4065 #if defined(CONFIG_USER_ONLY)
4066     GEN_PRIV;
4067 #else
4068     TCGv_i32 t;
4069 
4070     CHK_HV;
4071     t = tcg_const_i32(PPC_PM_NAP);
4072     gen_helper_pminsn(cpu_env, t);
4073     tcg_temp_free_i32(t);
4074     /* Stop translation, as the CPU is supposed to sleep from now */
4075     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4076 #endif /* defined(CONFIG_USER_ONLY) */
4077 }
4078 
4079 static void gen_stop(DisasContext *ctx)
4080 {
4081 #if defined(CONFIG_USER_ONLY)
4082     GEN_PRIV;
4083 #else
4084     TCGv_i32 t;
4085 
4086     CHK_HV;
4087     t = tcg_const_i32(PPC_PM_STOP);
4088     gen_helper_pminsn(cpu_env, t);
4089     tcg_temp_free_i32(t);
4090     /* Stop translation, as the CPU is supposed to sleep from now */
4091     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4092 #endif /* defined(CONFIG_USER_ONLY) */
4093 }
4094 
4095 static void gen_sleep(DisasContext *ctx)
4096 {
4097 #if defined(CONFIG_USER_ONLY)
4098     GEN_PRIV;
4099 #else
4100     TCGv_i32 t;
4101 
4102     CHK_HV;
4103     t = tcg_const_i32(PPC_PM_SLEEP);
4104     gen_helper_pminsn(cpu_env, t);
4105     tcg_temp_free_i32(t);
4106     /* Stop translation, as the CPU is supposed to sleep from now */
4107     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4108 #endif /* defined(CONFIG_USER_ONLY) */
4109 }
4110 
4111 static void gen_rvwinkle(DisasContext *ctx)
4112 {
4113 #if defined(CONFIG_USER_ONLY)
4114     GEN_PRIV;
4115 #else
4116     TCGv_i32 t;
4117 
4118     CHK_HV;
4119     t = tcg_const_i32(PPC_PM_RVWINKLE);
4120     gen_helper_pminsn(cpu_env, t);
4121     tcg_temp_free_i32(t);
4122     /* Stop translation, as the CPU is supposed to sleep from now */
4123     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4124 #endif /* defined(CONFIG_USER_ONLY) */
4125 }
4126 #endif /* #if defined(TARGET_PPC64) */
4127 
4128 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4129 {
4130 #if defined(TARGET_PPC64)
4131     if (ctx->has_cfar) {
4132         tcg_gen_movi_tl(cpu_cfar, nip);
4133     }
4134 #endif
4135 }
4136 
4137 #if defined(TARGET_PPC64)
4138 static void pmu_count_insns(DisasContext *ctx)
4139 {
4140     /*
4141      * Do not bother calling the helper if the PMU isn't counting
4142      * instructions.
4143      */
4144     if (!ctx->pmu_insn_cnt) {
4145         return;
4146     }
4147 
4148  #if !defined(CONFIG_USER_ONLY)
4149     /*
4150      * The PMU insns_inc() helper stops the internal PMU timer if a
4151      * counter overflows happens. In that case, if the guest is
4152      * running with icount and we do not handle it beforehand,
4153      * the helper can trigger a 'bad icount read'.
4154      */
4155     gen_icount_io_start(ctx);
4156 
4157     gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns));
4158 #else
4159     /*
4160      * User mode can read (but not write) PMC5 and start/stop
4161      * the PMU via MMCR0_FC. In this case just increment
4162      * PMC5 with base.num_insns.
4163      */
4164     TCGv t0 = tcg_temp_new();
4165 
4166     gen_load_spr(t0, SPR_POWER_PMC5);
4167     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4168     gen_store_spr(SPR_POWER_PMC5, t0);
4169 
4170     tcg_temp_free(t0);
4171 #endif /* #if !defined(CONFIG_USER_ONLY) */
4172 }
4173 #else
4174 static void pmu_count_insns(DisasContext *ctx)
4175 {
4176     return;
4177 }
4178 #endif /* #if defined(TARGET_PPC64) */
4179 
4180 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4181 {
4182     return translator_use_goto_tb(&ctx->base, dest);
4183 }
4184 
4185 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4186 {
4187     if (unlikely(ctx->singlestep_enabled)) {
4188         gen_debug_exception(ctx);
4189     } else {
4190         /*
4191          * tcg_gen_lookup_and_goto_ptr will exit the TB if
4192          * CF_NO_GOTO_PTR is set. Count insns now.
4193          */
4194         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
4195             pmu_count_insns(ctx);
4196         }
4197 
4198         tcg_gen_lookup_and_goto_ptr();
4199     }
4200 }
4201 
4202 /***                                Branch                                 ***/
4203 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4204 {
4205     if (NARROW_MODE(ctx)) {
4206         dest = (uint32_t) dest;
4207     }
4208     if (use_goto_tb(ctx, dest)) {
4209         pmu_count_insns(ctx);
4210         tcg_gen_goto_tb(n);
4211         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4212         tcg_gen_exit_tb(ctx->base.tb, n);
4213     } else {
4214         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4215         gen_lookup_and_goto_ptr(ctx);
4216     }
4217 }
4218 
4219 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4220 {
4221     if (NARROW_MODE(ctx)) {
4222         nip = (uint32_t)nip;
4223     }
4224     tcg_gen_movi_tl(cpu_lr, nip);
4225 }
4226 
4227 /* b ba bl bla */
4228 static void gen_b(DisasContext *ctx)
4229 {
4230     target_ulong li, target;
4231 
4232     /* sign extend LI */
4233     li = LI(ctx->opcode);
4234     li = (li ^ 0x02000000) - 0x02000000;
4235     if (likely(AA(ctx->opcode) == 0)) {
4236         target = ctx->cia + li;
4237     } else {
4238         target = li;
4239     }
4240     if (LK(ctx->opcode)) {
4241         gen_setlr(ctx, ctx->base.pc_next);
4242     }
4243     gen_update_cfar(ctx, ctx->cia);
4244     gen_goto_tb(ctx, 0, target);
4245     ctx->base.is_jmp = DISAS_NORETURN;
4246 }
4247 
4248 #define BCOND_IM  0
4249 #define BCOND_LR  1
4250 #define BCOND_CTR 2
4251 #define BCOND_TAR 3
4252 
4253 static void gen_bcond(DisasContext *ctx, int type)
4254 {
4255     uint32_t bo = BO(ctx->opcode);
4256     TCGLabel *l1;
4257     TCGv target;
4258 
4259     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4260         target = tcg_temp_local_new();
4261         if (type == BCOND_CTR) {
4262             tcg_gen_mov_tl(target, cpu_ctr);
4263         } else if (type == BCOND_TAR) {
4264             gen_load_spr(target, SPR_TAR);
4265         } else {
4266             tcg_gen_mov_tl(target, cpu_lr);
4267         }
4268     } else {
4269         target = NULL;
4270     }
4271     if (LK(ctx->opcode)) {
4272         gen_setlr(ctx, ctx->base.pc_next);
4273     }
4274     l1 = gen_new_label();
4275     if ((bo & 0x4) == 0) {
4276         /* Decrement and test CTR */
4277         TCGv temp = tcg_temp_new();
4278 
4279         if (type == BCOND_CTR) {
4280             /*
4281              * All ISAs up to v3 describe this form of bcctr as invalid but
4282              * some processors, ie. 64-bit server processors compliant with
4283              * arch 2.x, do implement a "test and decrement" logic instead,
4284              * as described in their respective UMs. This logic involves CTR
4285              * to act as both the branch target and a counter, which makes
4286              * it basically useless and thus never used in real code.
4287              *
4288              * This form was hence chosen to trigger extra micro-architectural
4289              * side-effect on real HW needed for the Spectre v2 workaround.
4290              * It is up to guests that implement such workaround, ie. linux, to
4291              * use this form in a way it just triggers the side-effect without
4292              * doing anything else harmful.
4293              */
4294             if (unlikely(!is_book3s_arch2x(ctx))) {
4295                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4296                 tcg_temp_free(temp);
4297                 tcg_temp_free(target);
4298                 return;
4299             }
4300 
4301             if (NARROW_MODE(ctx)) {
4302                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4303             } else {
4304                 tcg_gen_mov_tl(temp, cpu_ctr);
4305             }
4306             if (bo & 0x2) {
4307                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4308             } else {
4309                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4310             }
4311             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4312         } else {
4313             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4314             if (NARROW_MODE(ctx)) {
4315                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4316             } else {
4317                 tcg_gen_mov_tl(temp, cpu_ctr);
4318             }
4319             if (bo & 0x2) {
4320                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4321             } else {
4322                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4323             }
4324         }
4325         tcg_temp_free(temp);
4326     }
4327     if ((bo & 0x10) == 0) {
4328         /* Test CR */
4329         uint32_t bi = BI(ctx->opcode);
4330         uint32_t mask = 0x08 >> (bi & 0x03);
4331         TCGv_i32 temp = tcg_temp_new_i32();
4332 
4333         if (bo & 0x8) {
4334             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4335             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4336         } else {
4337             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4338             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4339         }
4340         tcg_temp_free_i32(temp);
4341     }
4342     gen_update_cfar(ctx, ctx->cia);
4343     if (type == BCOND_IM) {
4344         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4345         if (likely(AA(ctx->opcode) == 0)) {
4346             gen_goto_tb(ctx, 0, ctx->cia + li);
4347         } else {
4348             gen_goto_tb(ctx, 0, li);
4349         }
4350     } else {
4351         if (NARROW_MODE(ctx)) {
4352             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4353         } else {
4354             tcg_gen_andi_tl(cpu_nip, target, ~3);
4355         }
4356         gen_lookup_and_goto_ptr(ctx);
4357         tcg_temp_free(target);
4358     }
4359     if ((bo & 0x14) != 0x14) {
4360         /* fallthrough case */
4361         gen_set_label(l1);
4362         gen_goto_tb(ctx, 1, ctx->base.pc_next);
4363     }
4364     ctx->base.is_jmp = DISAS_NORETURN;
4365 }
4366 
4367 static void gen_bc(DisasContext *ctx)
4368 {
4369     gen_bcond(ctx, BCOND_IM);
4370 }
4371 
4372 static void gen_bcctr(DisasContext *ctx)
4373 {
4374     gen_bcond(ctx, BCOND_CTR);
4375 }
4376 
4377 static void gen_bclr(DisasContext *ctx)
4378 {
4379     gen_bcond(ctx, BCOND_LR);
4380 }
4381 
4382 static void gen_bctar(DisasContext *ctx)
4383 {
4384     gen_bcond(ctx, BCOND_TAR);
4385 }
4386 
4387 /***                      Condition register logical                       ***/
4388 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
4389 static void glue(gen_, name)(DisasContext *ctx)                               \
4390 {                                                                             \
4391     uint8_t bitmask;                                                          \
4392     int sh;                                                                   \
4393     TCGv_i32 t0, t1;                                                          \
4394     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
4395     t0 = tcg_temp_new_i32();                                                  \
4396     if (sh > 0)                                                               \
4397         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
4398     else if (sh < 0)                                                          \
4399         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
4400     else                                                                      \
4401         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
4402     t1 = tcg_temp_new_i32();                                                  \
4403     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
4404     if (sh > 0)                                                               \
4405         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
4406     else if (sh < 0)                                                          \
4407         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
4408     else                                                                      \
4409         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
4410     tcg_op(t0, t0, t1);                                                       \
4411     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
4412     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
4413     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
4414     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
4415     tcg_temp_free_i32(t0);                                                    \
4416     tcg_temp_free_i32(t1);                                                    \
4417 }
4418 
4419 /* crand */
4420 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4421 /* crandc */
4422 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4423 /* creqv */
4424 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4425 /* crnand */
4426 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4427 /* crnor */
4428 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4429 /* cror */
4430 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4431 /* crorc */
4432 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4433 /* crxor */
4434 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4435 
4436 /* mcrf */
4437 static void gen_mcrf(DisasContext *ctx)
4438 {
4439     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4440 }
4441 
4442 /***                           System linkage                              ***/
4443 
4444 /* rfi (supervisor only) */
4445 static void gen_rfi(DisasContext *ctx)
4446 {
4447 #if defined(CONFIG_USER_ONLY)
4448     GEN_PRIV;
4449 #else
4450     /*
4451      * This instruction doesn't exist anymore on 64-bit server
4452      * processors compliant with arch 2.x
4453      */
4454     if (is_book3s_arch2x(ctx)) {
4455         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4456         return;
4457     }
4458     /* Restore CPU state */
4459     CHK_SV;
4460     gen_icount_io_start(ctx);
4461     gen_update_cfar(ctx, ctx->cia);
4462     gen_helper_rfi(cpu_env);
4463     ctx->base.is_jmp = DISAS_EXIT;
4464 #endif
4465 }
4466 
4467 #if defined(TARGET_PPC64)
4468 static void gen_rfid(DisasContext *ctx)
4469 {
4470 #if defined(CONFIG_USER_ONLY)
4471     GEN_PRIV;
4472 #else
4473     /* Restore CPU state */
4474     CHK_SV;
4475     gen_icount_io_start(ctx);
4476     gen_update_cfar(ctx, ctx->cia);
4477     gen_helper_rfid(cpu_env);
4478     ctx->base.is_jmp = DISAS_EXIT;
4479 #endif
4480 }
4481 
4482 #if !defined(CONFIG_USER_ONLY)
4483 static void gen_rfscv(DisasContext *ctx)
4484 {
4485 #if defined(CONFIG_USER_ONLY)
4486     GEN_PRIV;
4487 #else
4488     /* Restore CPU state */
4489     CHK_SV;
4490     gen_icount_io_start(ctx);
4491     gen_update_cfar(ctx, ctx->cia);
4492     gen_helper_rfscv(cpu_env);
4493     ctx->base.is_jmp = DISAS_EXIT;
4494 #endif
4495 }
4496 #endif
4497 
4498 static void gen_hrfid(DisasContext *ctx)
4499 {
4500 #if defined(CONFIG_USER_ONLY)
4501     GEN_PRIV;
4502 #else
4503     /* Restore CPU state */
4504     CHK_HV;
4505     gen_helper_hrfid(cpu_env);
4506     ctx->base.is_jmp = DISAS_EXIT;
4507 #endif
4508 }
4509 #endif
4510 
4511 /* sc */
4512 #if defined(CONFIG_USER_ONLY)
4513 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4514 #else
4515 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4516 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4517 #endif
4518 static void gen_sc(DisasContext *ctx)
4519 {
4520     uint32_t lev;
4521 
4522     lev = (ctx->opcode >> 5) & 0x7F;
4523     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4524 }
4525 
4526 #if defined(TARGET_PPC64)
4527 #if !defined(CONFIG_USER_ONLY)
4528 static void gen_scv(DisasContext *ctx)
4529 {
4530     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4531 
4532     /* Set the PC back to the faulting instruction. */
4533     gen_update_nip(ctx, ctx->cia);
4534     gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4535 
4536     ctx->base.is_jmp = DISAS_NORETURN;
4537 }
4538 #endif
4539 #endif
4540 
4541 /***                                Trap                                   ***/
4542 
4543 /* Check for unconditional traps (always or never) */
4544 static bool check_unconditional_trap(DisasContext *ctx)
4545 {
4546     /* Trap never */
4547     if (TO(ctx->opcode) == 0) {
4548         return true;
4549     }
4550     /* Trap always */
4551     if (TO(ctx->opcode) == 31) {
4552         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4553         return true;
4554     }
4555     return false;
4556 }
4557 
4558 /* tw */
4559 static void gen_tw(DisasContext *ctx)
4560 {
4561     TCGv_i32 t0;
4562 
4563     if (check_unconditional_trap(ctx)) {
4564         return;
4565     }
4566     t0 = tcg_const_i32(TO(ctx->opcode));
4567     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4568                   t0);
4569     tcg_temp_free_i32(t0);
4570 }
4571 
4572 /* twi */
4573 static void gen_twi(DisasContext *ctx)
4574 {
4575     TCGv t0;
4576     TCGv_i32 t1;
4577 
4578     if (check_unconditional_trap(ctx)) {
4579         return;
4580     }
4581     t0 = tcg_const_tl(SIMM(ctx->opcode));
4582     t1 = tcg_const_i32(TO(ctx->opcode));
4583     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4584     tcg_temp_free(t0);
4585     tcg_temp_free_i32(t1);
4586 }
4587 
4588 #if defined(TARGET_PPC64)
4589 /* td */
4590 static void gen_td(DisasContext *ctx)
4591 {
4592     TCGv_i32 t0;
4593 
4594     if (check_unconditional_trap(ctx)) {
4595         return;
4596     }
4597     t0 = tcg_const_i32(TO(ctx->opcode));
4598     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4599                   t0);
4600     tcg_temp_free_i32(t0);
4601 }
4602 
4603 /* tdi */
4604 static void gen_tdi(DisasContext *ctx)
4605 {
4606     TCGv t0;
4607     TCGv_i32 t1;
4608 
4609     if (check_unconditional_trap(ctx)) {
4610         return;
4611     }
4612     t0 = tcg_const_tl(SIMM(ctx->opcode));
4613     t1 = tcg_const_i32(TO(ctx->opcode));
4614     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4615     tcg_temp_free(t0);
4616     tcg_temp_free_i32(t1);
4617 }
4618 #endif
4619 
4620 /***                          Processor control                            ***/
4621 
4622 /* mcrxr */
4623 static void gen_mcrxr(DisasContext *ctx)
4624 {
4625     TCGv_i32 t0 = tcg_temp_new_i32();
4626     TCGv_i32 t1 = tcg_temp_new_i32();
4627     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4628 
4629     tcg_gen_trunc_tl_i32(t0, cpu_so);
4630     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4631     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4632     tcg_gen_shli_i32(t0, t0, 3);
4633     tcg_gen_shli_i32(t1, t1, 2);
4634     tcg_gen_shli_i32(dst, dst, 1);
4635     tcg_gen_or_i32(dst, dst, t0);
4636     tcg_gen_or_i32(dst, dst, t1);
4637     tcg_temp_free_i32(t0);
4638     tcg_temp_free_i32(t1);
4639 
4640     tcg_gen_movi_tl(cpu_so, 0);
4641     tcg_gen_movi_tl(cpu_ov, 0);
4642     tcg_gen_movi_tl(cpu_ca, 0);
4643 }
4644 
4645 #ifdef TARGET_PPC64
4646 /* mcrxrx */
4647 static void gen_mcrxrx(DisasContext *ctx)
4648 {
4649     TCGv t0 = tcg_temp_new();
4650     TCGv t1 = tcg_temp_new();
4651     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4652 
4653     /* copy OV and OV32 */
4654     tcg_gen_shli_tl(t0, cpu_ov, 1);
4655     tcg_gen_or_tl(t0, t0, cpu_ov32);
4656     tcg_gen_shli_tl(t0, t0, 2);
4657     /* copy CA and CA32 */
4658     tcg_gen_shli_tl(t1, cpu_ca, 1);
4659     tcg_gen_or_tl(t1, t1, cpu_ca32);
4660     tcg_gen_or_tl(t0, t0, t1);
4661     tcg_gen_trunc_tl_i32(dst, t0);
4662     tcg_temp_free(t0);
4663     tcg_temp_free(t1);
4664 }
4665 #endif
4666 
4667 /* mfcr mfocrf */
4668 static void gen_mfcr(DisasContext *ctx)
4669 {
4670     uint32_t crm, crn;
4671 
4672     if (likely(ctx->opcode & 0x00100000)) {
4673         crm = CRM(ctx->opcode);
4674         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4675             crn = ctz32(crm);
4676             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4677             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4678                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4679         }
4680     } else {
4681         TCGv_i32 t0 = tcg_temp_new_i32();
4682         tcg_gen_mov_i32(t0, cpu_crf[0]);
4683         tcg_gen_shli_i32(t0, t0, 4);
4684         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4685         tcg_gen_shli_i32(t0, t0, 4);
4686         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4687         tcg_gen_shli_i32(t0, t0, 4);
4688         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4689         tcg_gen_shli_i32(t0, t0, 4);
4690         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4691         tcg_gen_shli_i32(t0, t0, 4);
4692         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4693         tcg_gen_shli_i32(t0, t0, 4);
4694         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4695         tcg_gen_shli_i32(t0, t0, 4);
4696         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4697         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4698         tcg_temp_free_i32(t0);
4699     }
4700 }
4701 
4702 /* mfmsr */
4703 static void gen_mfmsr(DisasContext *ctx)
4704 {
4705     CHK_SV;
4706     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4707 }
4708 
4709 /* mfspr */
4710 static inline void gen_op_mfspr(DisasContext *ctx)
4711 {
4712     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4713     uint32_t sprn = SPR(ctx->opcode);
4714 
4715 #if defined(CONFIG_USER_ONLY)
4716     read_cb = ctx->spr_cb[sprn].uea_read;
4717 #else
4718     if (ctx->pr) {
4719         read_cb = ctx->spr_cb[sprn].uea_read;
4720     } else if (ctx->hv) {
4721         read_cb = ctx->spr_cb[sprn].hea_read;
4722     } else {
4723         read_cb = ctx->spr_cb[sprn].oea_read;
4724     }
4725 #endif
4726     if (likely(read_cb != NULL)) {
4727         if (likely(read_cb != SPR_NOACCESS)) {
4728             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4729         } else {
4730             /* Privilege exception */
4731             /*
4732              * This is a hack to avoid warnings when running Linux:
4733              * this OS breaks the PowerPC virtualisation model,
4734              * allowing userland application to read the PVR
4735              */
4736             if (sprn != SPR_PVR) {
4737                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4738                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4739                               ctx->cia);
4740             }
4741             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4742         }
4743     } else {
4744         /* ISA 2.07 defines these as no-ops */
4745         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4746             (sprn >= 808 && sprn <= 811)) {
4747             /* This is a nop */
4748             return;
4749         }
4750         /* Not defined */
4751         qemu_log_mask(LOG_GUEST_ERROR,
4752                       "Trying to read invalid spr %d (0x%03x) at "
4753                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4754 
4755         /*
4756          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4757          * generate a priv, a hv emu or a no-op
4758          */
4759         if (sprn & 0x10) {
4760             if (ctx->pr) {
4761                 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4762             }
4763         } else {
4764             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4765                 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4766             }
4767         }
4768     }
4769 }
4770 
4771 static void gen_mfspr(DisasContext *ctx)
4772 {
4773     gen_op_mfspr(ctx);
4774 }
4775 
4776 /* mftb */
4777 static void gen_mftb(DisasContext *ctx)
4778 {
4779     gen_op_mfspr(ctx);
4780 }
4781 
4782 /* mtcrf mtocrf*/
4783 static void gen_mtcrf(DisasContext *ctx)
4784 {
4785     uint32_t crm, crn;
4786 
4787     crm = CRM(ctx->opcode);
4788     if (likely((ctx->opcode & 0x00100000))) {
4789         if (crm && ((crm & (crm - 1)) == 0)) {
4790             TCGv_i32 temp = tcg_temp_new_i32();
4791             crn = ctz32(crm);
4792             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4793             tcg_gen_shri_i32(temp, temp, crn * 4);
4794             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4795             tcg_temp_free_i32(temp);
4796         }
4797     } else {
4798         TCGv_i32 temp = tcg_temp_new_i32();
4799         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4800         for (crn = 0 ; crn < 8 ; crn++) {
4801             if (crm & (1 << crn)) {
4802                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4803                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4804             }
4805         }
4806         tcg_temp_free_i32(temp);
4807     }
4808 }
4809 
4810 /* mtmsr */
4811 #if defined(TARGET_PPC64)
4812 static void gen_mtmsrd(DisasContext *ctx)
4813 {
4814     if (unlikely(!is_book3s_arch2x(ctx))) {
4815         gen_invalid(ctx);
4816         return;
4817     }
4818 
4819     CHK_SV;
4820 
4821 #if !defined(CONFIG_USER_ONLY)
4822     TCGv t0, t1;
4823     target_ulong mask;
4824 
4825     t0 = tcg_temp_new();
4826     t1 = tcg_temp_new();
4827 
4828     gen_icount_io_start(ctx);
4829 
4830     if (ctx->opcode & 0x00010000) {
4831         /* L=1 form only updates EE and RI */
4832         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4833     } else {
4834         /* mtmsrd does not alter HV, S, ME, or LE */
4835         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4836                  (1ULL << MSR_HV));
4837         /*
4838          * XXX: we need to update nip before the store if we enter
4839          *      power saving mode, we will exit the loop directly from
4840          *      ppc_store_msr
4841          */
4842         gen_update_nip(ctx, ctx->base.pc_next);
4843     }
4844 
4845     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4846     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4847     tcg_gen_or_tl(t0, t0, t1);
4848 
4849     gen_helper_store_msr(cpu_env, t0);
4850 
4851     /* Must stop the translation as machine state (may have) changed */
4852     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4853 
4854     tcg_temp_free(t0);
4855     tcg_temp_free(t1);
4856 #endif /* !defined(CONFIG_USER_ONLY) */
4857 }
4858 #endif /* defined(TARGET_PPC64) */
4859 
4860 static void gen_mtmsr(DisasContext *ctx)
4861 {
4862     CHK_SV;
4863 
4864 #if !defined(CONFIG_USER_ONLY)
4865     TCGv t0, t1;
4866     target_ulong mask = 0xFFFFFFFF;
4867 
4868     t0 = tcg_temp_new();
4869     t1 = tcg_temp_new();
4870 
4871     gen_icount_io_start(ctx);
4872     if (ctx->opcode & 0x00010000) {
4873         /* L=1 form only updates EE and RI */
4874         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
4875     } else {
4876         /* mtmsr does not alter S, ME, or LE */
4877         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
4878 
4879         /*
4880          * XXX: we need to update nip before the store if we enter
4881          *      power saving mode, we will exit the loop directly from
4882          *      ppc_store_msr
4883          */
4884         gen_update_nip(ctx, ctx->base.pc_next);
4885     }
4886 
4887     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4888     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4889     tcg_gen_or_tl(t0, t0, t1);
4890 
4891     gen_helper_store_msr(cpu_env, t0);
4892 
4893     /* Must stop the translation as machine state (may have) changed */
4894     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4895 
4896     tcg_temp_free(t0);
4897     tcg_temp_free(t1);
4898 #endif
4899 }
4900 
4901 /* mtspr */
4902 static void gen_mtspr(DisasContext *ctx)
4903 {
4904     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
4905     uint32_t sprn = SPR(ctx->opcode);
4906 
4907 #if defined(CONFIG_USER_ONLY)
4908     write_cb = ctx->spr_cb[sprn].uea_write;
4909 #else
4910     if (ctx->pr) {
4911         write_cb = ctx->spr_cb[sprn].uea_write;
4912     } else if (ctx->hv) {
4913         write_cb = ctx->spr_cb[sprn].hea_write;
4914     } else {
4915         write_cb = ctx->spr_cb[sprn].oea_write;
4916     }
4917 #endif
4918     if (likely(write_cb != NULL)) {
4919         if (likely(write_cb != SPR_NOACCESS)) {
4920             (*write_cb)(ctx, sprn, rS(ctx->opcode));
4921         } else {
4922             /* Privilege exception */
4923             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
4924                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4925                           ctx->cia);
4926             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4927         }
4928     } else {
4929         /* ISA 2.07 defines these as no-ops */
4930         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4931             (sprn >= 808 && sprn <= 811)) {
4932             /* This is a nop */
4933             return;
4934         }
4935 
4936         /* Not defined */
4937         qemu_log_mask(LOG_GUEST_ERROR,
4938                       "Trying to write invalid spr %d (0x%03x) at "
4939                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4940 
4941 
4942         /*
4943          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4944          * generate a priv, a hv emu or a no-op
4945          */
4946         if (sprn & 0x10) {
4947             if (ctx->pr) {
4948                 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4949             }
4950         } else {
4951             if (ctx->pr || sprn == 0) {
4952                 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR);
4953             }
4954         }
4955     }
4956 }
4957 
4958 #if defined(TARGET_PPC64)
4959 /* setb */
4960 static void gen_setb(DisasContext *ctx)
4961 {
4962     TCGv_i32 t0 = tcg_temp_new_i32();
4963     TCGv_i32 t8 = tcg_constant_i32(8);
4964     TCGv_i32 tm1 = tcg_constant_i32(-1);
4965     int crf = crfS(ctx->opcode);
4966 
4967     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
4968     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
4969     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4970 
4971     tcg_temp_free_i32(t0);
4972 }
4973 #endif
4974 
4975 /***                         Cache management                              ***/
4976 
4977 /* dcbf */
4978 static void gen_dcbf(DisasContext *ctx)
4979 {
4980     /* XXX: specification says this is treated as a load by the MMU */
4981     TCGv t0;
4982     gen_set_access_type(ctx, ACCESS_CACHE);
4983     t0 = tcg_temp_new();
4984     gen_addr_reg_index(ctx, t0);
4985     gen_qemu_ld8u(ctx, t0, t0);
4986     tcg_temp_free(t0);
4987 }
4988 
4989 /* dcbfep (external PID dcbf) */
4990 static void gen_dcbfep(DisasContext *ctx)
4991 {
4992     /* XXX: specification says this is treated as a load by the MMU */
4993     TCGv t0;
4994     CHK_SV;
4995     gen_set_access_type(ctx, ACCESS_CACHE);
4996     t0 = tcg_temp_new();
4997     gen_addr_reg_index(ctx, t0);
4998     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4999     tcg_temp_free(t0);
5000 }
5001 
5002 /* dcbi (Supervisor only) */
5003 static void gen_dcbi(DisasContext *ctx)
5004 {
5005 #if defined(CONFIG_USER_ONLY)
5006     GEN_PRIV;
5007 #else
5008     TCGv EA, val;
5009 
5010     CHK_SV;
5011     EA = tcg_temp_new();
5012     gen_set_access_type(ctx, ACCESS_CACHE);
5013     gen_addr_reg_index(ctx, EA);
5014     val = tcg_temp_new();
5015     /* XXX: specification says this should be treated as a store by the MMU */
5016     gen_qemu_ld8u(ctx, val, EA);
5017     gen_qemu_st8(ctx, val, EA);
5018     tcg_temp_free(val);
5019     tcg_temp_free(EA);
5020 #endif /* defined(CONFIG_USER_ONLY) */
5021 }
5022 
5023 /* dcdst */
5024 static void gen_dcbst(DisasContext *ctx)
5025 {
5026     /* XXX: specification say this is treated as a load by the MMU */
5027     TCGv t0;
5028     gen_set_access_type(ctx, ACCESS_CACHE);
5029     t0 = tcg_temp_new();
5030     gen_addr_reg_index(ctx, t0);
5031     gen_qemu_ld8u(ctx, t0, t0);
5032     tcg_temp_free(t0);
5033 }
5034 
5035 /* dcbstep (dcbstep External PID version) */
5036 static void gen_dcbstep(DisasContext *ctx)
5037 {
5038     /* XXX: specification say this is treated as a load by the MMU */
5039     TCGv t0;
5040     gen_set_access_type(ctx, ACCESS_CACHE);
5041     t0 = tcg_temp_new();
5042     gen_addr_reg_index(ctx, t0);
5043     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5044     tcg_temp_free(t0);
5045 }
5046 
5047 /* dcbt */
5048 static void gen_dcbt(DisasContext *ctx)
5049 {
5050     /*
5051      * interpreted as no-op
5052      * XXX: specification say this is treated as a load by the MMU but
5053      *      does not generate any exception
5054      */
5055 }
5056 
5057 /* dcbtep */
5058 static void gen_dcbtep(DisasContext *ctx)
5059 {
5060     /*
5061      * interpreted as no-op
5062      * XXX: specification say this is treated as a load by the MMU but
5063      *      does not generate any exception
5064      */
5065 }
5066 
5067 /* dcbtst */
5068 static void gen_dcbtst(DisasContext *ctx)
5069 {
5070     /*
5071      * interpreted as no-op
5072      * XXX: specification say this is treated as a load by the MMU but
5073      *      does not generate any exception
5074      */
5075 }
5076 
5077 /* dcbtstep */
5078 static void gen_dcbtstep(DisasContext *ctx)
5079 {
5080     /*
5081      * interpreted as no-op
5082      * XXX: specification say this is treated as a load by the MMU but
5083      *      does not generate any exception
5084      */
5085 }
5086 
5087 /* dcbtls */
5088 static void gen_dcbtls(DisasContext *ctx)
5089 {
5090     /* Always fails locking the cache */
5091     TCGv t0 = tcg_temp_new();
5092     gen_load_spr(t0, SPR_Exxx_L1CSR0);
5093     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5094     gen_store_spr(SPR_Exxx_L1CSR0, t0);
5095     tcg_temp_free(t0);
5096 }
5097 
5098 /* dcbz */
5099 static void gen_dcbz(DisasContext *ctx)
5100 {
5101     TCGv tcgv_addr;
5102     TCGv_i32 tcgv_op;
5103 
5104     gen_set_access_type(ctx, ACCESS_CACHE);
5105     tcgv_addr = tcg_temp_new();
5106     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5107     gen_addr_reg_index(ctx, tcgv_addr);
5108     gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5109     tcg_temp_free(tcgv_addr);
5110     tcg_temp_free_i32(tcgv_op);
5111 }
5112 
5113 /* dcbzep */
5114 static void gen_dcbzep(DisasContext *ctx)
5115 {
5116     TCGv tcgv_addr;
5117     TCGv_i32 tcgv_op;
5118 
5119     gen_set_access_type(ctx, ACCESS_CACHE);
5120     tcgv_addr = tcg_temp_new();
5121     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5122     gen_addr_reg_index(ctx, tcgv_addr);
5123     gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5124     tcg_temp_free(tcgv_addr);
5125     tcg_temp_free_i32(tcgv_op);
5126 }
5127 
5128 /* dst / dstt */
5129 static void gen_dst(DisasContext *ctx)
5130 {
5131     if (rA(ctx->opcode) == 0) {
5132         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5133     } else {
5134         /* interpreted as no-op */
5135     }
5136 }
5137 
5138 /* dstst /dststt */
5139 static void gen_dstst(DisasContext *ctx)
5140 {
5141     if (rA(ctx->opcode) == 0) {
5142         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5143     } else {
5144         /* interpreted as no-op */
5145     }
5146 
5147 }
5148 
5149 /* dss / dssall */
5150 static void gen_dss(DisasContext *ctx)
5151 {
5152     /* interpreted as no-op */
5153 }
5154 
5155 /* icbi */
5156 static void gen_icbi(DisasContext *ctx)
5157 {
5158     TCGv t0;
5159     gen_set_access_type(ctx, ACCESS_CACHE);
5160     t0 = tcg_temp_new();
5161     gen_addr_reg_index(ctx, t0);
5162     gen_helper_icbi(cpu_env, t0);
5163     tcg_temp_free(t0);
5164 }
5165 
5166 /* icbiep */
5167 static void gen_icbiep(DisasContext *ctx)
5168 {
5169     TCGv t0;
5170     gen_set_access_type(ctx, ACCESS_CACHE);
5171     t0 = tcg_temp_new();
5172     gen_addr_reg_index(ctx, t0);
5173     gen_helper_icbiep(cpu_env, t0);
5174     tcg_temp_free(t0);
5175 }
5176 
5177 /* Optional: */
5178 /* dcba */
5179 static void gen_dcba(DisasContext *ctx)
5180 {
5181     /*
5182      * interpreted as no-op
5183      * XXX: specification say this is treated as a store by the MMU
5184      *      but does not generate any exception
5185      */
5186 }
5187 
5188 /***                    Segment register manipulation                      ***/
5189 /* Supervisor only: */
5190 
5191 /* mfsr */
5192 static void gen_mfsr(DisasContext *ctx)
5193 {
5194 #if defined(CONFIG_USER_ONLY)
5195     GEN_PRIV;
5196 #else
5197     TCGv t0;
5198 
5199     CHK_SV;
5200     t0 = tcg_const_tl(SR(ctx->opcode));
5201     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5202     tcg_temp_free(t0);
5203 #endif /* defined(CONFIG_USER_ONLY) */
5204 }
5205 
5206 /* mfsrin */
5207 static void gen_mfsrin(DisasContext *ctx)
5208 {
5209 #if defined(CONFIG_USER_ONLY)
5210     GEN_PRIV;
5211 #else
5212     TCGv t0;
5213 
5214     CHK_SV;
5215     t0 = tcg_temp_new();
5216     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5217     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5218     tcg_temp_free(t0);
5219 #endif /* defined(CONFIG_USER_ONLY) */
5220 }
5221 
5222 /* mtsr */
5223 static void gen_mtsr(DisasContext *ctx)
5224 {
5225 #if defined(CONFIG_USER_ONLY)
5226     GEN_PRIV;
5227 #else
5228     TCGv t0;
5229 
5230     CHK_SV;
5231     t0 = tcg_const_tl(SR(ctx->opcode));
5232     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5233     tcg_temp_free(t0);
5234 #endif /* defined(CONFIG_USER_ONLY) */
5235 }
5236 
5237 /* mtsrin */
5238 static void gen_mtsrin(DisasContext *ctx)
5239 {
5240 #if defined(CONFIG_USER_ONLY)
5241     GEN_PRIV;
5242 #else
5243     TCGv t0;
5244     CHK_SV;
5245 
5246     t0 = tcg_temp_new();
5247     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5248     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5249     tcg_temp_free(t0);
5250 #endif /* defined(CONFIG_USER_ONLY) */
5251 }
5252 
5253 #if defined(TARGET_PPC64)
5254 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5255 
5256 /* mfsr */
5257 static void gen_mfsr_64b(DisasContext *ctx)
5258 {
5259 #if defined(CONFIG_USER_ONLY)
5260     GEN_PRIV;
5261 #else
5262     TCGv t0;
5263 
5264     CHK_SV;
5265     t0 = tcg_const_tl(SR(ctx->opcode));
5266     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5267     tcg_temp_free(t0);
5268 #endif /* defined(CONFIG_USER_ONLY) */
5269 }
5270 
5271 /* mfsrin */
5272 static void gen_mfsrin_64b(DisasContext *ctx)
5273 {
5274 #if defined(CONFIG_USER_ONLY)
5275     GEN_PRIV;
5276 #else
5277     TCGv t0;
5278 
5279     CHK_SV;
5280     t0 = tcg_temp_new();
5281     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5282     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5283     tcg_temp_free(t0);
5284 #endif /* defined(CONFIG_USER_ONLY) */
5285 }
5286 
5287 /* mtsr */
5288 static void gen_mtsr_64b(DisasContext *ctx)
5289 {
5290 #if defined(CONFIG_USER_ONLY)
5291     GEN_PRIV;
5292 #else
5293     TCGv t0;
5294 
5295     CHK_SV;
5296     t0 = tcg_const_tl(SR(ctx->opcode));
5297     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5298     tcg_temp_free(t0);
5299 #endif /* defined(CONFIG_USER_ONLY) */
5300 }
5301 
5302 /* mtsrin */
5303 static void gen_mtsrin_64b(DisasContext *ctx)
5304 {
5305 #if defined(CONFIG_USER_ONLY)
5306     GEN_PRIV;
5307 #else
5308     TCGv t0;
5309 
5310     CHK_SV;
5311     t0 = tcg_temp_new();
5312     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5313     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5314     tcg_temp_free(t0);
5315 #endif /* defined(CONFIG_USER_ONLY) */
5316 }
5317 
5318 /* slbmte */
5319 static void gen_slbmte(DisasContext *ctx)
5320 {
5321 #if defined(CONFIG_USER_ONLY)
5322     GEN_PRIV;
5323 #else
5324     CHK_SV;
5325 
5326     gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)],
5327                          cpu_gpr[rS(ctx->opcode)]);
5328 #endif /* defined(CONFIG_USER_ONLY) */
5329 }
5330 
5331 static void gen_slbmfee(DisasContext *ctx)
5332 {
5333 #if defined(CONFIG_USER_ONLY)
5334     GEN_PRIV;
5335 #else
5336     CHK_SV;
5337 
5338     gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5339                              cpu_gpr[rB(ctx->opcode)]);
5340 #endif /* defined(CONFIG_USER_ONLY) */
5341 }
5342 
5343 static void gen_slbmfev(DisasContext *ctx)
5344 {
5345 #if defined(CONFIG_USER_ONLY)
5346     GEN_PRIV;
5347 #else
5348     CHK_SV;
5349 
5350     gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5351                              cpu_gpr[rB(ctx->opcode)]);
5352 #endif /* defined(CONFIG_USER_ONLY) */
5353 }
5354 
5355 static void gen_slbfee_(DisasContext *ctx)
5356 {
5357 #if defined(CONFIG_USER_ONLY)
5358     gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5359 #else
5360     TCGLabel *l1, *l2;
5361 
5362     if (unlikely(ctx->pr)) {
5363         gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG);
5364         return;
5365     }
5366     gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env,
5367                              cpu_gpr[rB(ctx->opcode)]);
5368     l1 = gen_new_label();
5369     l2 = gen_new_label();
5370     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5371     tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1);
5372     tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
5373     tcg_gen_br(l2);
5374     gen_set_label(l1);
5375     tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0);
5376     gen_set_label(l2);
5377 #endif
5378 }
5379 #endif /* defined(TARGET_PPC64) */
5380 
5381 /***                      Lookaside buffer management                      ***/
5382 /* Optional & supervisor only: */
5383 
5384 /* tlbia */
5385 static void gen_tlbia(DisasContext *ctx)
5386 {
5387 #if defined(CONFIG_USER_ONLY)
5388     GEN_PRIV;
5389 #else
5390     CHK_HV;
5391 
5392     gen_helper_tlbia(cpu_env);
5393 #endif  /* defined(CONFIG_USER_ONLY) */
5394 }
5395 
5396 /* tlbiel */
5397 static void gen_tlbiel(DisasContext *ctx)
5398 {
5399 #if defined(CONFIG_USER_ONLY)
5400     GEN_PRIV;
5401 #else
5402     bool psr = (ctx->opcode >> 17) & 0x1;
5403 
5404     if (ctx->pr || (!ctx->hv && !psr && ctx->hr)) {
5405         /*
5406          * tlbiel is privileged except when PSR=0 and HR=1, making it
5407          * hypervisor privileged.
5408          */
5409         GEN_PRIV;
5410     }
5411 
5412     gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5413 #endif /* defined(CONFIG_USER_ONLY) */
5414 }
5415 
5416 /* tlbie */
5417 static void gen_tlbie(DisasContext *ctx)
5418 {
5419 #if defined(CONFIG_USER_ONLY)
5420     GEN_PRIV;
5421 #else
5422     bool psr = (ctx->opcode >> 17) & 0x1;
5423     TCGv_i32 t1;
5424 
5425     if (ctx->pr) {
5426         /* tlbie is privileged... */
5427         GEN_PRIV;
5428     } else if (!ctx->hv) {
5429         if (!ctx->gtse || (!psr && ctx->hr)) {
5430             /*
5431              * ... except when GTSE=0 or when PSR=0 and HR=1, making it
5432              * hypervisor privileged.
5433              */
5434             GEN_PRIV;
5435         }
5436     }
5437 
5438     if (NARROW_MODE(ctx)) {
5439         TCGv t0 = tcg_temp_new();
5440         tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]);
5441         gen_helper_tlbie(cpu_env, t0);
5442         tcg_temp_free(t0);
5443     } else {
5444         gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5445     }
5446     t1 = tcg_temp_new_i32();
5447     tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
5448     tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH);
5449     tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
5450     tcg_temp_free_i32(t1);
5451 #endif /* defined(CONFIG_USER_ONLY) */
5452 }
5453 
5454 /* tlbsync */
5455 static void gen_tlbsync(DisasContext *ctx)
5456 {
5457 #if defined(CONFIG_USER_ONLY)
5458     GEN_PRIV;
5459 #else
5460 
5461     if (ctx->gtse) {
5462         CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */
5463     } else {
5464         CHK_HV; /* Else hypervisor privileged */
5465     }
5466 
5467     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5468     if (ctx->insns_flags & PPC_BOOKE) {
5469         gen_check_tlb_flush(ctx, true);
5470     }
5471 #endif /* defined(CONFIG_USER_ONLY) */
5472 }
5473 
5474 #if defined(TARGET_PPC64)
5475 /* slbia */
5476 static void gen_slbia(DisasContext *ctx)
5477 {
5478 #if defined(CONFIG_USER_ONLY)
5479     GEN_PRIV;
5480 #else
5481     uint32_t ih = (ctx->opcode >> 21) & 0x7;
5482     TCGv_i32 t0 = tcg_const_i32(ih);
5483 
5484     CHK_SV;
5485 
5486     gen_helper_slbia(cpu_env, t0);
5487     tcg_temp_free_i32(t0);
5488 #endif /* defined(CONFIG_USER_ONLY) */
5489 }
5490 
5491 /* slbie */
5492 static void gen_slbie(DisasContext *ctx)
5493 {
5494 #if defined(CONFIG_USER_ONLY)
5495     GEN_PRIV;
5496 #else
5497     CHK_SV;
5498 
5499     gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5500 #endif /* defined(CONFIG_USER_ONLY) */
5501 }
5502 
5503 /* slbieg */
5504 static void gen_slbieg(DisasContext *ctx)
5505 {
5506 #if defined(CONFIG_USER_ONLY)
5507     GEN_PRIV;
5508 #else
5509     CHK_SV;
5510 
5511     gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5512 #endif /* defined(CONFIG_USER_ONLY) */
5513 }
5514 
5515 /* slbsync */
5516 static void gen_slbsync(DisasContext *ctx)
5517 {
5518 #if defined(CONFIG_USER_ONLY)
5519     GEN_PRIV;
5520 #else
5521     CHK_SV;
5522     gen_check_tlb_flush(ctx, true);
5523 #endif /* defined(CONFIG_USER_ONLY) */
5524 }
5525 
5526 #endif  /* defined(TARGET_PPC64) */
5527 
5528 /***                              External control                         ***/
5529 /* Optional: */
5530 
5531 /* eciwx */
5532 static void gen_eciwx(DisasContext *ctx)
5533 {
5534     TCGv t0;
5535     /* Should check EAR[E] ! */
5536     gen_set_access_type(ctx, ACCESS_EXT);
5537     t0 = tcg_temp_new();
5538     gen_addr_reg_index(ctx, t0);
5539     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5540                        DEF_MEMOP(MO_UL | MO_ALIGN));
5541     tcg_temp_free(t0);
5542 }
5543 
5544 /* ecowx */
5545 static void gen_ecowx(DisasContext *ctx)
5546 {
5547     TCGv t0;
5548     /* Should check EAR[E] ! */
5549     gen_set_access_type(ctx, ACCESS_EXT);
5550     t0 = tcg_temp_new();
5551     gen_addr_reg_index(ctx, t0);
5552     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5553                        DEF_MEMOP(MO_UL | MO_ALIGN));
5554     tcg_temp_free(t0);
5555 }
5556 
5557 /* 602 - 603 - G2 TLB management */
5558 
5559 /* tlbld */
5560 static void gen_tlbld_6xx(DisasContext *ctx)
5561 {
5562 #if defined(CONFIG_USER_ONLY)
5563     GEN_PRIV;
5564 #else
5565     CHK_SV;
5566     gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5567 #endif /* defined(CONFIG_USER_ONLY) */
5568 }
5569 
5570 /* tlbli */
5571 static void gen_tlbli_6xx(DisasContext *ctx)
5572 {
5573 #if defined(CONFIG_USER_ONLY)
5574     GEN_PRIV;
5575 #else
5576     CHK_SV;
5577     gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5578 #endif /* defined(CONFIG_USER_ONLY) */
5579 }
5580 
5581 /* BookE specific instructions */
5582 
5583 /* XXX: not implemented on 440 ? */
5584 static void gen_mfapidi(DisasContext *ctx)
5585 {
5586     /* XXX: TODO */
5587     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5588 }
5589 
5590 /* XXX: not implemented on 440 ? */
5591 static void gen_tlbiva(DisasContext *ctx)
5592 {
5593 #if defined(CONFIG_USER_ONLY)
5594     GEN_PRIV;
5595 #else
5596     TCGv t0;
5597 
5598     CHK_SV;
5599     t0 = tcg_temp_new();
5600     gen_addr_reg_index(ctx, t0);
5601     gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5602     tcg_temp_free(t0);
5603 #endif /* defined(CONFIG_USER_ONLY) */
5604 }
5605 
5606 /* All 405 MAC instructions are translated here */
5607 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5608                                         int ra, int rb, int rt, int Rc)
5609 {
5610     TCGv t0, t1;
5611 
5612     t0 = tcg_temp_local_new();
5613     t1 = tcg_temp_local_new();
5614 
5615     switch (opc3 & 0x0D) {
5616     case 0x05:
5617         /* macchw    - macchw.    - macchwo   - macchwo.   */
5618         /* macchws   - macchws.   - macchwso  - macchwso.  */
5619         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
5620         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
5621         /* mulchw - mulchw. */
5622         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5623         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5624         tcg_gen_ext16s_tl(t1, t1);
5625         break;
5626     case 0x04:
5627         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
5628         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
5629         /* mulchwu - mulchwu. */
5630         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5631         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5632         tcg_gen_ext16u_tl(t1, t1);
5633         break;
5634     case 0x01:
5635         /* machhw    - machhw.    - machhwo   - machhwo.   */
5636         /* machhws   - machhws.   - machhwso  - machhwso.  */
5637         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
5638         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
5639         /* mulhhw - mulhhw. */
5640         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5641         tcg_gen_ext16s_tl(t0, t0);
5642         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5643         tcg_gen_ext16s_tl(t1, t1);
5644         break;
5645     case 0x00:
5646         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
5647         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
5648         /* mulhhwu - mulhhwu. */
5649         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5650         tcg_gen_ext16u_tl(t0, t0);
5651         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5652         tcg_gen_ext16u_tl(t1, t1);
5653         break;
5654     case 0x0D:
5655         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
5656         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
5657         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
5658         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
5659         /* mullhw - mullhw. */
5660         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5661         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5662         break;
5663     case 0x0C:
5664         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
5665         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
5666         /* mullhwu - mullhwu. */
5667         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5668         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5669         break;
5670     }
5671     if (opc2 & 0x04) {
5672         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5673         tcg_gen_mul_tl(t1, t0, t1);
5674         if (opc2 & 0x02) {
5675             /* nmultiply-and-accumulate (0x0E) */
5676             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5677         } else {
5678             /* multiply-and-accumulate (0x0C) */
5679             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5680         }
5681 
5682         if (opc3 & 0x12) {
5683             /* Check overflow and/or saturate */
5684             TCGLabel *l1 = gen_new_label();
5685 
5686             if (opc3 & 0x10) {
5687                 /* Start with XER OV disabled, the most likely case */
5688                 tcg_gen_movi_tl(cpu_ov, 0);
5689             }
5690             if (opc3 & 0x01) {
5691                 /* Signed */
5692                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5693                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5694                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5695                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5696                 if (opc3 & 0x02) {
5697                     /* Saturate */
5698                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5699                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5700                 }
5701             } else {
5702                 /* Unsigned */
5703                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5704                 if (opc3 & 0x02) {
5705                     /* Saturate */
5706                     tcg_gen_movi_tl(t0, UINT32_MAX);
5707                 }
5708             }
5709             if (opc3 & 0x10) {
5710                 /* Check overflow */
5711                 tcg_gen_movi_tl(cpu_ov, 1);
5712                 tcg_gen_movi_tl(cpu_so, 1);
5713             }
5714             gen_set_label(l1);
5715             tcg_gen_mov_tl(cpu_gpr[rt], t0);
5716         }
5717     } else {
5718         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5719     }
5720     tcg_temp_free(t0);
5721     tcg_temp_free(t1);
5722     if (unlikely(Rc) != 0) {
5723         /* Update Rc0 */
5724         gen_set_Rc0(ctx, cpu_gpr[rt]);
5725     }
5726 }
5727 
5728 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
5729 static void glue(gen_, name)(DisasContext *ctx)                               \
5730 {                                                                             \
5731     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
5732                          rD(ctx->opcode), Rc(ctx->opcode));                   \
5733 }
5734 
5735 /* macchw    - macchw.    */
5736 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5737 /* macchwo   - macchwo.   */
5738 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5739 /* macchws   - macchws.   */
5740 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5741 /* macchwso  - macchwso.  */
5742 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5743 /* macchwsu  - macchwsu.  */
5744 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5745 /* macchwsuo - macchwsuo. */
5746 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5747 /* macchwu   - macchwu.   */
5748 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5749 /* macchwuo  - macchwuo.  */
5750 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5751 /* machhw    - machhw.    */
5752 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5753 /* machhwo   - machhwo.   */
5754 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5755 /* machhws   - machhws.   */
5756 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5757 /* machhwso  - machhwso.  */
5758 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5759 /* machhwsu  - machhwsu.  */
5760 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5761 /* machhwsuo - machhwsuo. */
5762 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5763 /* machhwu   - machhwu.   */
5764 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5765 /* machhwuo  - machhwuo.  */
5766 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5767 /* maclhw    - maclhw.    */
5768 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5769 /* maclhwo   - maclhwo.   */
5770 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5771 /* maclhws   - maclhws.   */
5772 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5773 /* maclhwso  - maclhwso.  */
5774 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5775 /* maclhwu   - maclhwu.   */
5776 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5777 /* maclhwuo  - maclhwuo.  */
5778 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5779 /* maclhwsu  - maclhwsu.  */
5780 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5781 /* maclhwsuo - maclhwsuo. */
5782 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5783 /* nmacchw   - nmacchw.   */
5784 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5785 /* nmacchwo  - nmacchwo.  */
5786 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5787 /* nmacchws  - nmacchws.  */
5788 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5789 /* nmacchwso - nmacchwso. */
5790 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5791 /* nmachhw   - nmachhw.   */
5792 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5793 /* nmachhwo  - nmachhwo.  */
5794 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5795 /* nmachhws  - nmachhws.  */
5796 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5797 /* nmachhwso - nmachhwso. */
5798 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5799 /* nmaclhw   - nmaclhw.   */
5800 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5801 /* nmaclhwo  - nmaclhwo.  */
5802 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5803 /* nmaclhws  - nmaclhws.  */
5804 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5805 /* nmaclhwso - nmaclhwso. */
5806 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5807 
5808 /* mulchw  - mulchw.  */
5809 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5810 /* mulchwu - mulchwu. */
5811 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5812 /* mulhhw  - mulhhw.  */
5813 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5814 /* mulhhwu - mulhhwu. */
5815 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5816 /* mullhw  - mullhw.  */
5817 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5818 /* mullhwu - mullhwu. */
5819 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5820 
5821 /* mfdcr */
5822 static void gen_mfdcr(DisasContext *ctx)
5823 {
5824 #if defined(CONFIG_USER_ONLY)
5825     GEN_PRIV;
5826 #else
5827     TCGv dcrn;
5828 
5829     CHK_SV;
5830     dcrn = tcg_const_tl(SPR(ctx->opcode));
5831     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5832     tcg_temp_free(dcrn);
5833 #endif /* defined(CONFIG_USER_ONLY) */
5834 }
5835 
5836 /* mtdcr */
5837 static void gen_mtdcr(DisasContext *ctx)
5838 {
5839 #if defined(CONFIG_USER_ONLY)
5840     GEN_PRIV;
5841 #else
5842     TCGv dcrn;
5843 
5844     CHK_SV;
5845     dcrn = tcg_const_tl(SPR(ctx->opcode));
5846     gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5847     tcg_temp_free(dcrn);
5848 #endif /* defined(CONFIG_USER_ONLY) */
5849 }
5850 
5851 /* mfdcrx */
5852 /* XXX: not implemented on 440 ? */
5853 static void gen_mfdcrx(DisasContext *ctx)
5854 {
5855 #if defined(CONFIG_USER_ONLY)
5856     GEN_PRIV;
5857 #else
5858     CHK_SV;
5859     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5860                         cpu_gpr[rA(ctx->opcode)]);
5861     /* Note: Rc update flag set leads to undefined state of Rc0 */
5862 #endif /* defined(CONFIG_USER_ONLY) */
5863 }
5864 
5865 /* mtdcrx */
5866 /* XXX: not implemented on 440 ? */
5867 static void gen_mtdcrx(DisasContext *ctx)
5868 {
5869 #if defined(CONFIG_USER_ONLY)
5870     GEN_PRIV;
5871 #else
5872     CHK_SV;
5873     gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5874                          cpu_gpr[rS(ctx->opcode)]);
5875     /* Note: Rc update flag set leads to undefined state of Rc0 */
5876 #endif /* defined(CONFIG_USER_ONLY) */
5877 }
5878 
5879 /* mfdcrux (PPC 460) : user-mode access to DCR */
5880 static void gen_mfdcrux(DisasContext *ctx)
5881 {
5882     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5883                         cpu_gpr[rA(ctx->opcode)]);
5884     /* Note: Rc update flag set leads to undefined state of Rc0 */
5885 }
5886 
5887 /* mtdcrux (PPC 460) : user-mode access to DCR */
5888 static void gen_mtdcrux(DisasContext *ctx)
5889 {
5890     gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5891                          cpu_gpr[rS(ctx->opcode)]);
5892     /* Note: Rc update flag set leads to undefined state of Rc0 */
5893 }
5894 
5895 /* dccci */
5896 static void gen_dccci(DisasContext *ctx)
5897 {
5898     CHK_SV;
5899     /* interpreted as no-op */
5900 }
5901 
5902 /* dcread */
5903 static void gen_dcread(DisasContext *ctx)
5904 {
5905 #if defined(CONFIG_USER_ONLY)
5906     GEN_PRIV;
5907 #else
5908     TCGv EA, val;
5909 
5910     CHK_SV;
5911     gen_set_access_type(ctx, ACCESS_CACHE);
5912     EA = tcg_temp_new();
5913     gen_addr_reg_index(ctx, EA);
5914     val = tcg_temp_new();
5915     gen_qemu_ld32u(ctx, val, EA);
5916     tcg_temp_free(val);
5917     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5918     tcg_temp_free(EA);
5919 #endif /* defined(CONFIG_USER_ONLY) */
5920 }
5921 
5922 /* icbt */
5923 static void gen_icbt_40x(DisasContext *ctx)
5924 {
5925     /*
5926      * interpreted as no-op
5927      * XXX: specification say this is treated as a load by the MMU but
5928      *      does not generate any exception
5929      */
5930 }
5931 
5932 /* iccci */
5933 static void gen_iccci(DisasContext *ctx)
5934 {
5935     CHK_SV;
5936     /* interpreted as no-op */
5937 }
5938 
5939 /* icread */
5940 static void gen_icread(DisasContext *ctx)
5941 {
5942     CHK_SV;
5943     /* interpreted as no-op */
5944 }
5945 
5946 /* rfci (supervisor only) */
5947 static void gen_rfci_40x(DisasContext *ctx)
5948 {
5949 #if defined(CONFIG_USER_ONLY)
5950     GEN_PRIV;
5951 #else
5952     CHK_SV;
5953     /* Restore CPU state */
5954     gen_helper_40x_rfci(cpu_env);
5955     ctx->base.is_jmp = DISAS_EXIT;
5956 #endif /* defined(CONFIG_USER_ONLY) */
5957 }
5958 
5959 static void gen_rfci(DisasContext *ctx)
5960 {
5961 #if defined(CONFIG_USER_ONLY)
5962     GEN_PRIV;
5963 #else
5964     CHK_SV;
5965     /* Restore CPU state */
5966     gen_helper_rfci(cpu_env);
5967     ctx->base.is_jmp = DISAS_EXIT;
5968 #endif /* defined(CONFIG_USER_ONLY) */
5969 }
5970 
5971 /* BookE specific */
5972 
5973 /* XXX: not implemented on 440 ? */
5974 static void gen_rfdi(DisasContext *ctx)
5975 {
5976 #if defined(CONFIG_USER_ONLY)
5977     GEN_PRIV;
5978 #else
5979     CHK_SV;
5980     /* Restore CPU state */
5981     gen_helper_rfdi(cpu_env);
5982     ctx->base.is_jmp = DISAS_EXIT;
5983 #endif /* defined(CONFIG_USER_ONLY) */
5984 }
5985 
5986 /* XXX: not implemented on 440 ? */
5987 static void gen_rfmci(DisasContext *ctx)
5988 {
5989 #if defined(CONFIG_USER_ONLY)
5990     GEN_PRIV;
5991 #else
5992     CHK_SV;
5993     /* Restore CPU state */
5994     gen_helper_rfmci(cpu_env);
5995     ctx->base.is_jmp = DISAS_EXIT;
5996 #endif /* defined(CONFIG_USER_ONLY) */
5997 }
5998 
5999 /* TLB management - PowerPC 405 implementation */
6000 
6001 /* tlbre */
6002 static void gen_tlbre_40x(DisasContext *ctx)
6003 {
6004 #if defined(CONFIG_USER_ONLY)
6005     GEN_PRIV;
6006 #else
6007     CHK_SV;
6008     switch (rB(ctx->opcode)) {
6009     case 0:
6010         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
6011                                 cpu_gpr[rA(ctx->opcode)]);
6012         break;
6013     case 1:
6014         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
6015                                 cpu_gpr[rA(ctx->opcode)]);
6016         break;
6017     default:
6018         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6019         break;
6020     }
6021 #endif /* defined(CONFIG_USER_ONLY) */
6022 }
6023 
6024 /* tlbsx - tlbsx. */
6025 static void gen_tlbsx_40x(DisasContext *ctx)
6026 {
6027 #if defined(CONFIG_USER_ONLY)
6028     GEN_PRIV;
6029 #else
6030     TCGv t0;
6031 
6032     CHK_SV;
6033     t0 = tcg_temp_new();
6034     gen_addr_reg_index(ctx, t0);
6035     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6036     tcg_temp_free(t0);
6037     if (Rc(ctx->opcode)) {
6038         TCGLabel *l1 = gen_new_label();
6039         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6040         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6041         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6042         gen_set_label(l1);
6043     }
6044 #endif /* defined(CONFIG_USER_ONLY) */
6045 }
6046 
6047 /* tlbwe */
6048 static void gen_tlbwe_40x(DisasContext *ctx)
6049 {
6050 #if defined(CONFIG_USER_ONLY)
6051     GEN_PRIV;
6052 #else
6053     CHK_SV;
6054 
6055     switch (rB(ctx->opcode)) {
6056     case 0:
6057         gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
6058                                 cpu_gpr[rS(ctx->opcode)]);
6059         break;
6060     case 1:
6061         gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
6062                                 cpu_gpr[rS(ctx->opcode)]);
6063         break;
6064     default:
6065         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6066         break;
6067     }
6068 #endif /* defined(CONFIG_USER_ONLY) */
6069 }
6070 
6071 /* TLB management - PowerPC 440 implementation */
6072 
6073 /* tlbre */
6074 static void gen_tlbre_440(DisasContext *ctx)
6075 {
6076 #if defined(CONFIG_USER_ONLY)
6077     GEN_PRIV;
6078 #else
6079     CHK_SV;
6080 
6081     switch (rB(ctx->opcode)) {
6082     case 0:
6083     case 1:
6084     case 2:
6085         {
6086             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6087             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
6088                                  t0, cpu_gpr[rA(ctx->opcode)]);
6089             tcg_temp_free_i32(t0);
6090         }
6091         break;
6092     default:
6093         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6094         break;
6095     }
6096 #endif /* defined(CONFIG_USER_ONLY) */
6097 }
6098 
6099 /* tlbsx - tlbsx. */
6100 static void gen_tlbsx_440(DisasContext *ctx)
6101 {
6102 #if defined(CONFIG_USER_ONLY)
6103     GEN_PRIV;
6104 #else
6105     TCGv t0;
6106 
6107     CHK_SV;
6108     t0 = tcg_temp_new();
6109     gen_addr_reg_index(ctx, t0);
6110     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6111     tcg_temp_free(t0);
6112     if (Rc(ctx->opcode)) {
6113         TCGLabel *l1 = gen_new_label();
6114         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6115         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6116         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6117         gen_set_label(l1);
6118     }
6119 #endif /* defined(CONFIG_USER_ONLY) */
6120 }
6121 
6122 /* tlbwe */
6123 static void gen_tlbwe_440(DisasContext *ctx)
6124 {
6125 #if defined(CONFIG_USER_ONLY)
6126     GEN_PRIV;
6127 #else
6128     CHK_SV;
6129     switch (rB(ctx->opcode)) {
6130     case 0:
6131     case 1:
6132     case 2:
6133         {
6134             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6135             gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6136                                  cpu_gpr[rS(ctx->opcode)]);
6137             tcg_temp_free_i32(t0);
6138         }
6139         break;
6140     default:
6141         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6142         break;
6143     }
6144 #endif /* defined(CONFIG_USER_ONLY) */
6145 }
6146 
6147 /* TLB management - PowerPC BookE 2.06 implementation */
6148 
6149 /* tlbre */
6150 static void gen_tlbre_booke206(DisasContext *ctx)
6151 {
6152  #if defined(CONFIG_USER_ONLY)
6153     GEN_PRIV;
6154 #else
6155    CHK_SV;
6156     gen_helper_booke206_tlbre(cpu_env);
6157 #endif /* defined(CONFIG_USER_ONLY) */
6158 }
6159 
6160 /* tlbsx - tlbsx. */
6161 static void gen_tlbsx_booke206(DisasContext *ctx)
6162 {
6163 #if defined(CONFIG_USER_ONLY)
6164     GEN_PRIV;
6165 #else
6166     TCGv t0;
6167 
6168     CHK_SV;
6169     if (rA(ctx->opcode)) {
6170         t0 = tcg_temp_new();
6171         tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6172     } else {
6173         t0 = tcg_const_tl(0);
6174     }
6175 
6176     tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6177     gen_helper_booke206_tlbsx(cpu_env, t0);
6178     tcg_temp_free(t0);
6179 #endif /* defined(CONFIG_USER_ONLY) */
6180 }
6181 
6182 /* tlbwe */
6183 static void gen_tlbwe_booke206(DisasContext *ctx)
6184 {
6185 #if defined(CONFIG_USER_ONLY)
6186     GEN_PRIV;
6187 #else
6188     CHK_SV;
6189     gen_helper_booke206_tlbwe(cpu_env);
6190 #endif /* defined(CONFIG_USER_ONLY) */
6191 }
6192 
6193 static void gen_tlbivax_booke206(DisasContext *ctx)
6194 {
6195 #if defined(CONFIG_USER_ONLY)
6196     GEN_PRIV;
6197 #else
6198     TCGv t0;
6199 
6200     CHK_SV;
6201     t0 = tcg_temp_new();
6202     gen_addr_reg_index(ctx, t0);
6203     gen_helper_booke206_tlbivax(cpu_env, t0);
6204     tcg_temp_free(t0);
6205 #endif /* defined(CONFIG_USER_ONLY) */
6206 }
6207 
6208 static void gen_tlbilx_booke206(DisasContext *ctx)
6209 {
6210 #if defined(CONFIG_USER_ONLY)
6211     GEN_PRIV;
6212 #else
6213     TCGv t0;
6214 
6215     CHK_SV;
6216     t0 = tcg_temp_new();
6217     gen_addr_reg_index(ctx, t0);
6218 
6219     switch ((ctx->opcode >> 21) & 0x3) {
6220     case 0:
6221         gen_helper_booke206_tlbilx0(cpu_env, t0);
6222         break;
6223     case 1:
6224         gen_helper_booke206_tlbilx1(cpu_env, t0);
6225         break;
6226     case 3:
6227         gen_helper_booke206_tlbilx3(cpu_env, t0);
6228         break;
6229     default:
6230         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6231         break;
6232     }
6233 
6234     tcg_temp_free(t0);
6235 #endif /* defined(CONFIG_USER_ONLY) */
6236 }
6237 
6238 
6239 /* wrtee */
6240 static void gen_wrtee(DisasContext *ctx)
6241 {
6242 #if defined(CONFIG_USER_ONLY)
6243     GEN_PRIV;
6244 #else
6245     TCGv t0;
6246 
6247     CHK_SV;
6248     t0 = tcg_temp_new();
6249     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6250     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6251     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6252     tcg_temp_free(t0);
6253     /*
6254      * Stop translation to have a chance to raise an exception if we
6255      * just set msr_ee to 1
6256      */
6257     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6258 #endif /* defined(CONFIG_USER_ONLY) */
6259 }
6260 
6261 /* wrteei */
6262 static void gen_wrteei(DisasContext *ctx)
6263 {
6264 #if defined(CONFIG_USER_ONLY)
6265     GEN_PRIV;
6266 #else
6267     CHK_SV;
6268     if (ctx->opcode & 0x00008000) {
6269         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6270         /* Stop translation to have a chance to raise an exception */
6271         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6272     } else {
6273         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6274     }
6275 #endif /* defined(CONFIG_USER_ONLY) */
6276 }
6277 
6278 /* PowerPC 440 specific instructions */
6279 
6280 /* dlmzb */
6281 static void gen_dlmzb(DisasContext *ctx)
6282 {
6283     TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6284     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6285                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6286     tcg_temp_free_i32(t0);
6287 }
6288 
6289 /* mbar replaces eieio on 440 */
6290 static void gen_mbar(DisasContext *ctx)
6291 {
6292     /* interpreted as no-op */
6293 }
6294 
6295 /* msync replaces sync on 440 */
6296 static void gen_msync_4xx(DisasContext *ctx)
6297 {
6298     /* Only e500 seems to treat reserved bits as invalid */
6299     if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
6300         (ctx->opcode & 0x03FFF801)) {
6301         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6302     }
6303     /* otherwise interpreted as no-op */
6304 }
6305 
6306 /* icbt */
6307 static void gen_icbt_440(DisasContext *ctx)
6308 {
6309     /*
6310      * interpreted as no-op
6311      * XXX: specification say this is treated as a load by the MMU but
6312      *      does not generate any exception
6313      */
6314 }
6315 
6316 /* Embedded.Processor Control */
6317 
6318 static void gen_msgclr(DisasContext *ctx)
6319 {
6320 #if defined(CONFIG_USER_ONLY)
6321     GEN_PRIV;
6322 #else
6323     CHK_HV;
6324     if (is_book3s_arch2x(ctx)) {
6325         gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6326     } else {
6327         gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6328     }
6329 #endif /* defined(CONFIG_USER_ONLY) */
6330 }
6331 
6332 static void gen_msgsnd(DisasContext *ctx)
6333 {
6334 #if defined(CONFIG_USER_ONLY)
6335     GEN_PRIV;
6336 #else
6337     CHK_HV;
6338     if (is_book3s_arch2x(ctx)) {
6339         gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]);
6340     } else {
6341         gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
6342     }
6343 #endif /* defined(CONFIG_USER_ONLY) */
6344 }
6345 
6346 #if defined(TARGET_PPC64)
6347 static void gen_msgclrp(DisasContext *ctx)
6348 {
6349 #if defined(CONFIG_USER_ONLY)
6350     GEN_PRIV;
6351 #else
6352     CHK_SV;
6353     gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6354 #endif /* defined(CONFIG_USER_ONLY) */
6355 }
6356 
6357 static void gen_msgsndp(DisasContext *ctx)
6358 {
6359 #if defined(CONFIG_USER_ONLY)
6360     GEN_PRIV;
6361 #else
6362     CHK_SV;
6363     gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6364 #endif /* defined(CONFIG_USER_ONLY) */
6365 }
6366 #endif
6367 
6368 static void gen_msgsync(DisasContext *ctx)
6369 {
6370 #if defined(CONFIG_USER_ONLY)
6371     GEN_PRIV;
6372 #else
6373     CHK_HV;
6374 #endif /* defined(CONFIG_USER_ONLY) */
6375     /* interpreted as no-op */
6376 }
6377 
6378 #if defined(TARGET_PPC64)
6379 static void gen_maddld(DisasContext *ctx)
6380 {
6381     TCGv_i64 t1 = tcg_temp_new_i64();
6382 
6383     tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6384     tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
6385     tcg_temp_free_i64(t1);
6386 }
6387 
6388 /* maddhd maddhdu */
6389 static void gen_maddhd_maddhdu(DisasContext *ctx)
6390 {
6391     TCGv_i64 lo = tcg_temp_new_i64();
6392     TCGv_i64 hi = tcg_temp_new_i64();
6393     TCGv_i64 t1 = tcg_temp_new_i64();
6394 
6395     if (Rc(ctx->opcode)) {
6396         tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6397                           cpu_gpr[rB(ctx->opcode)]);
6398         tcg_gen_movi_i64(t1, 0);
6399     } else {
6400         tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6401                           cpu_gpr[rB(ctx->opcode)]);
6402         tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6403     }
6404     tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6405                      cpu_gpr[rC(ctx->opcode)], t1);
6406     tcg_temp_free_i64(lo);
6407     tcg_temp_free_i64(hi);
6408     tcg_temp_free_i64(t1);
6409 }
6410 #endif /* defined(TARGET_PPC64) */
6411 
6412 static void gen_tbegin(DisasContext *ctx)
6413 {
6414     if (unlikely(!ctx->tm_enabled)) {
6415         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6416         return;
6417     }
6418     gen_helper_tbegin(cpu_env);
6419 }
6420 
6421 #define GEN_TM_NOOP(name)                                      \
6422 static inline void gen_##name(DisasContext *ctx)               \
6423 {                                                              \
6424     if (unlikely(!ctx->tm_enabled)) {                          \
6425         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6426         return;                                                \
6427     }                                                          \
6428     /*                                                         \
6429      * Because tbegin always fails in QEMU, these user         \
6430      * space instructions all have a simple implementation:    \
6431      *                                                         \
6432      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
6433      *           = 0b0 || 0b00    || 0b0                       \
6434      */                                                        \
6435     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6436 }
6437 
6438 GEN_TM_NOOP(tend);
6439 GEN_TM_NOOP(tabort);
6440 GEN_TM_NOOP(tabortwc);
6441 GEN_TM_NOOP(tabortwci);
6442 GEN_TM_NOOP(tabortdc);
6443 GEN_TM_NOOP(tabortdci);
6444 GEN_TM_NOOP(tsr);
6445 
6446 static inline void gen_cp_abort(DisasContext *ctx)
6447 {
6448     /* Do Nothing */
6449 }
6450 
6451 #define GEN_CP_PASTE_NOOP(name)                           \
6452 static inline void gen_##name(DisasContext *ctx)          \
6453 {                                                         \
6454     /*                                                    \
6455      * Generate invalid exception until we have an        \
6456      * implementation of the copy paste facility          \
6457      */                                                   \
6458     gen_invalid(ctx);                                     \
6459 }
6460 
6461 GEN_CP_PASTE_NOOP(copy)
6462 GEN_CP_PASTE_NOOP(paste)
6463 
6464 static void gen_tcheck(DisasContext *ctx)
6465 {
6466     if (unlikely(!ctx->tm_enabled)) {
6467         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6468         return;
6469     }
6470     /*
6471      * Because tbegin always fails, the tcheck implementation is
6472      * simple:
6473      *
6474      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6475      *         = 0b1 || 0b00 || 0b0
6476      */
6477     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6478 }
6479 
6480 #if defined(CONFIG_USER_ONLY)
6481 #define GEN_TM_PRIV_NOOP(name)                                 \
6482 static inline void gen_##name(DisasContext *ctx)               \
6483 {                                                              \
6484     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);            \
6485 }
6486 
6487 #else
6488 
6489 #define GEN_TM_PRIV_NOOP(name)                                 \
6490 static inline void gen_##name(DisasContext *ctx)               \
6491 {                                                              \
6492     CHK_SV;                                                    \
6493     if (unlikely(!ctx->tm_enabled)) {                          \
6494         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6495         return;                                                \
6496     }                                                          \
6497     /*                                                         \
6498      * Because tbegin always fails, the implementation is      \
6499      * simple:                                                 \
6500      *                                                         \
6501      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
6502      *         = 0b0 || 0b00 | 0b0                             \
6503      */                                                        \
6504     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6505 }
6506 
6507 #endif
6508 
6509 GEN_TM_PRIV_NOOP(treclaim);
6510 GEN_TM_PRIV_NOOP(trechkpt);
6511 
6512 static inline void get_fpr(TCGv_i64 dst, int regno)
6513 {
6514     tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
6515 }
6516 
6517 static inline void set_fpr(int regno, TCGv_i64 src)
6518 {
6519     tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
6520 }
6521 
6522 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
6523 {
6524     tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
6525 }
6526 
6527 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
6528 {
6529     tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
6530 }
6531 
6532 /*
6533  * Helpers for decodetree used by !function for decoding arguments.
6534  */
6535 static int times_2(DisasContext *ctx, int x)
6536 {
6537     return x * 2;
6538 }
6539 
6540 static int times_4(DisasContext *ctx, int x)
6541 {
6542     return x * 4;
6543 }
6544 
6545 static int times_16(DisasContext *ctx, int x)
6546 {
6547     return x * 16;
6548 }
6549 
6550 /*
6551  * Helpers for trans_* functions to check for specific insns flags.
6552  * Use token pasting to ensure that we use the proper flag with the
6553  * proper variable.
6554  */
6555 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
6556     do {                                                \
6557         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
6558             return false;                               \
6559         }                                               \
6560     } while (0)
6561 
6562 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
6563     do {                                                \
6564         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
6565             return false;                               \
6566         }                                               \
6567     } while (0)
6568 
6569 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
6570 #if TARGET_LONG_BITS == 32
6571 # define REQUIRE_64BIT(CTX)  return false
6572 #else
6573 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
6574 #endif
6575 
6576 #define REQUIRE_VECTOR(CTX)                             \
6577     do {                                                \
6578         if (unlikely(!(CTX)->altivec_enabled)) {        \
6579             gen_exception((CTX), POWERPC_EXCP_VPU);     \
6580             return true;                                \
6581         }                                               \
6582     } while (0)
6583 
6584 #define REQUIRE_VSX(CTX)                                \
6585     do {                                                \
6586         if (unlikely(!(CTX)->vsx_enabled)) {            \
6587             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
6588             return true;                                \
6589         }                                               \
6590     } while (0)
6591 
6592 #define REQUIRE_FPU(ctx)                                \
6593     do {                                                \
6594         if (unlikely(!(ctx)->fpu_enabled)) {            \
6595             gen_exception((ctx), POWERPC_EXCP_FPU);     \
6596             return true;                                \
6597         }                                               \
6598     } while (0)
6599 
6600 /*
6601  * Helpers for implementing sets of trans_* functions.
6602  * Defer the implementation of NAME to FUNC, with optional extra arguments.
6603  */
6604 #define TRANS(NAME, FUNC, ...) \
6605     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6606     { return FUNC(ctx, a, __VA_ARGS__); }
6607 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
6608     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6609     {                                                          \
6610         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
6611         return FUNC(ctx, a, __VA_ARGS__);                      \
6612     }
6613 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6614     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6615     {                                                          \
6616         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6617         return FUNC(ctx, a, __VA_ARGS__);                      \
6618     }
6619 
6620 #define TRANS64(NAME, FUNC, ...) \
6621     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6622     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
6623 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6624     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6625     {                                                          \
6626         REQUIRE_64BIT(ctx);                                    \
6627         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6628         return FUNC(ctx, a, __VA_ARGS__);                      \
6629     }
6630 
6631 /* TODO: More TRANS* helpers for extra insn_flags checks. */
6632 
6633 
6634 #include "decode-insn32.c.inc"
6635 #include "decode-insn64.c.inc"
6636 #include "power8-pmu-regs.c.inc"
6637 
6638 /*
6639  * Incorporate CIA into the constant when R=1.
6640  * Validate that when R=1, RA=0.
6641  */
6642 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
6643 {
6644     d->rt = a->rt;
6645     d->ra = a->ra;
6646     d->si = a->si;
6647     if (a->r) {
6648         if (unlikely(a->ra != 0)) {
6649             gen_invalid(ctx);
6650             return false;
6651         }
6652         d->si += ctx->cia;
6653     }
6654     return true;
6655 }
6656 
6657 #include "translate/fixedpoint-impl.c.inc"
6658 
6659 #include "translate/fp-impl.c.inc"
6660 
6661 #include "translate/vmx-impl.c.inc"
6662 
6663 #include "translate/vsx-impl.c.inc"
6664 
6665 #include "translate/dfp-impl.c.inc"
6666 
6667 #include "translate/spe-impl.c.inc"
6668 
6669 #include "translate/branch-impl.c.inc"
6670 
6671 /* Handles lfdp */
6672 static void gen_dform39(DisasContext *ctx)
6673 {
6674     if ((ctx->opcode & 0x3) == 0) {
6675         if (ctx->insns_flags2 & PPC2_ISA205) {
6676             return gen_lfdp(ctx);
6677         }
6678     }
6679     return gen_invalid(ctx);
6680 }
6681 
6682 /* Handles stfdp */
6683 static void gen_dform3D(DisasContext *ctx)
6684 {
6685     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
6686         /* stfdp */
6687         if (ctx->insns_flags2 & PPC2_ISA205) {
6688             return gen_stfdp(ctx);
6689         }
6690     }
6691     return gen_invalid(ctx);
6692 }
6693 
6694 #if defined(TARGET_PPC64)
6695 /* brd */
6696 static void gen_brd(DisasContext *ctx)
6697 {
6698     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6699 }
6700 
6701 /* brw */
6702 static void gen_brw(DisasContext *ctx)
6703 {
6704     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6705     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
6706 
6707 }
6708 
6709 /* brh */
6710 static void gen_brh(DisasContext *ctx)
6711 {
6712     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
6713     TCGv_i64 t1 = tcg_temp_new_i64();
6714     TCGv_i64 t2 = tcg_temp_new_i64();
6715 
6716     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
6717     tcg_gen_and_i64(t2, t1, mask);
6718     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
6719     tcg_gen_shli_i64(t1, t1, 8);
6720     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
6721 
6722     tcg_temp_free_i64(t1);
6723     tcg_temp_free_i64(t2);
6724 }
6725 #endif
6726 
6727 static opcode_t opcodes[] = {
6728 #if defined(TARGET_PPC64)
6729 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
6730 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
6731 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
6732 #endif
6733 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6734 #if defined(TARGET_PPC64)
6735 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6736 #endif
6737 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6738 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6739 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6740 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6741 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6742 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6743 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6744 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6745 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6746 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6747 #if defined(TARGET_PPC64)
6748 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6749 #endif
6750 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6751 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6752 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6753 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6754 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6755 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6756 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6757 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
6758 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6759 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
6760 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6761 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6762 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6763 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6764 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6765 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6766 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6767 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6768 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6769 #if defined(TARGET_PPC64)
6770 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6771 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6772 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6773 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6774 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6775 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6776 #endif
6777 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6778 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6779 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6780 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6781 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6782 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6783 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6784 #if defined(TARGET_PPC64)
6785 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6786 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6787 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6788 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6789 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6790 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6791                PPC_NONE, PPC2_ISA300),
6792 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6793                PPC_NONE, PPC2_ISA300),
6794 #endif
6795 /* handles lfdp, lxsd, lxssp */
6796 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6797 /* handles stfdp, stxsd, stxssp */
6798 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6799 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6800 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6801 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6802 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6803 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6804 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6805 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
6806 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6807 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6808 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6809 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6810 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
6811 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
6812 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6813 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6814 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6815 #if defined(TARGET_PPC64)
6816 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
6817 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
6818 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6819 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6820 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6821 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6822 #endif
6823 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6824 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
6825 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300),
6826 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6827 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6828 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6829 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6830 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6831 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6832 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6833 #if defined(TARGET_PPC64)
6834 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6835 #if !defined(CONFIG_USER_ONLY)
6836 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6837 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6838 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6839 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
6840 #endif
6841 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6842 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6843 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6844 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6845 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6846 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6847 #endif
6848 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6849 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
6850 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
6851 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6852 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6853 #if defined(TARGET_PPC64)
6854 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6855 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6856 #endif
6857 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6858 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6859 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6860 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6861 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6862 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6863 #if defined(TARGET_PPC64)
6864 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6865 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6866 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
6867 #endif
6868 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6869 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6870 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6871 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6872 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6873 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6874 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6875 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6876 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6877 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6878 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6879 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6880 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6881 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6882 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6883 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
6884 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6885 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6886 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6887 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6888 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6889 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6890 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6891 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6892 #if defined(TARGET_PPC64)
6893 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6894 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6895              PPC_SEGMENT_64B),
6896 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6897 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6898              PPC_SEGMENT_64B),
6899 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B),
6900 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B),
6901 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B),
6902 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B),
6903 #endif
6904 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6905 /*
6906  * XXX Those instructions will need to be handled differently for
6907  * different ISA versions
6908  */
6909 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE),
6910 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE),
6911 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300),
6912 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300),
6913 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6914 #if defined(TARGET_PPC64)
6915 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI),
6916 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI),
6917 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300),
6918 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6919 #endif
6920 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6921 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6922 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6923 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6924 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6925 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6926 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6927 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6928 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6929 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6930 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX),
6931 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX),
6932 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6933 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6934 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6935 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6936 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6937 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6938 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6939 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6940 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6941 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6942 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6943 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6944 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6945 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6946 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6947 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6948                PPC_NONE, PPC2_BOOKE206),
6949 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6950                PPC_NONE, PPC2_BOOKE206),
6951 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6952                PPC_NONE, PPC2_BOOKE206),
6953 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6954                PPC_NONE, PPC2_BOOKE206),
6955 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6956                PPC_NONE, PPC2_BOOKE206),
6957 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
6958                PPC_NONE, PPC2_PRCNTL),
6959 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
6960                PPC_NONE, PPC2_PRCNTL),
6961 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000,
6962                PPC_NONE, PPC2_PRCNTL),
6963 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6964 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6965 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6966 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6967               PPC_BOOKE, PPC2_BOOKE206),
6968 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
6969 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6970                PPC_BOOKE, PPC2_BOOKE206),
6971 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
6972              PPC_440_SPEC),
6973 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6974 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6975 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6976 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6977 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
6978 #if defined(TARGET_PPC64)
6979 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6980               PPC2_ISA300),
6981 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6982 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001,
6983                PPC_NONE, PPC2_ISA207S),
6984 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001,
6985                PPC_NONE, PPC2_ISA207S),
6986 #endif
6987 
6988 #undef GEN_INT_ARITH_ADD
6989 #undef GEN_INT_ARITH_ADD_CONST
6990 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov)         \
6991 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6992 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val,                        \
6993                                 add_ca, compute_ca, compute_ov)               \
6994 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6995 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6996 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6997 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6998 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6999 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
7000 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
7001 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
7002 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
7003 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
7004 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
7005 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
7006 
7007 #undef GEN_INT_ARITH_DIVW
7008 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
7009 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
7010 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
7011 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
7012 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
7013 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
7014 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
7015 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
7016 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
7017 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
7018 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
7019 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
7020 
7021 #if defined(TARGET_PPC64)
7022 #undef GEN_INT_ARITH_DIVD
7023 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
7024 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
7025 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
7026 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
7027 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
7028 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
7029 
7030 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
7031 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
7032 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
7033 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
7034 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
7035 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
7036 
7037 #undef GEN_INT_ARITH_MUL_HELPER
7038 #define GEN_INT_ARITH_MUL_HELPER(name, opc3)                                  \
7039 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
7040 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
7041 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
7042 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
7043 #endif
7044 
7045 #undef GEN_INT_ARITH_SUBF
7046 #undef GEN_INT_ARITH_SUBF_CONST
7047 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
7048 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
7049 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
7050                                 add_ca, compute_ca, compute_ov)               \
7051 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
7052 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
7053 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
7054 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
7055 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
7056 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
7057 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
7058 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
7059 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
7060 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
7061 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
7062 
7063 #undef GEN_LOGICAL1
7064 #undef GEN_LOGICAL2
7065 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
7066 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
7067 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
7068 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
7069 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
7070 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
7071 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
7072 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
7073 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
7074 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
7075 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
7076 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
7077 #if defined(TARGET_PPC64)
7078 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
7079 #endif
7080 
7081 #if defined(TARGET_PPC64)
7082 #undef GEN_PPC64_R2
7083 #undef GEN_PPC64_R4
7084 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
7085 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
7086 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
7087              PPC_64B)
7088 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
7089 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
7090 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
7091              PPC_64B),                                                        \
7092 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
7093              PPC_64B),                                                        \
7094 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
7095              PPC_64B)
7096 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
7097 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
7098 GEN_PPC64_R4(rldic, 0x1E, 0x04),
7099 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
7100 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
7101 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
7102 #endif
7103 
7104 #undef GEN_LDX_E
7105 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
7106 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
7107 
7108 #if defined(TARGET_PPC64)
7109 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
7110 
7111 /* HV/P7 and later only */
7112 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
7113 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
7114 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
7115 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
7116 #endif
7117 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
7118 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
7119 
7120 /* External PID based load */
7121 #undef GEN_LDEPX
7122 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
7123 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7124               0x00000001, PPC_NONE, PPC2_BOOKE206),
7125 
7126 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
7127 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
7128 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
7129 #if defined(TARGET_PPC64)
7130 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
7131 #endif
7132 
7133 #undef GEN_STX_E
7134 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
7135 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
7136 
7137 #if defined(TARGET_PPC64)
7138 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
7139 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
7140 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
7141 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
7142 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
7143 #endif
7144 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
7145 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
7146 
7147 #undef GEN_STEPX
7148 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
7149 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7150               0x00000001, PPC_NONE, PPC2_BOOKE206),
7151 
7152 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
7153 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
7154 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
7155 #if defined(TARGET_PPC64)
7156 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
7157 #endif
7158 
7159 #undef GEN_CRLOGIC
7160 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
7161 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
7162 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
7163 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
7164 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
7165 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
7166 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
7167 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
7168 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
7169 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
7170 
7171 #undef GEN_MAC_HANDLER
7172 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
7173 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
7174 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
7175 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
7176 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
7177 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
7178 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
7179 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
7180 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
7181 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
7182 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
7183 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
7184 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
7185 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
7186 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
7187 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
7188 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
7189 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
7190 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
7191 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
7192 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
7193 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
7194 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
7195 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
7196 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
7197 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
7198 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
7199 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
7200 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
7201 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
7202 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
7203 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
7204 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
7205 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
7206 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
7207 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
7208 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
7209 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
7210 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
7211 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
7212 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
7213 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
7214 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
7215 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
7216 
7217 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
7218                PPC_NONE, PPC2_TM),
7219 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
7220                PPC_NONE, PPC2_TM),
7221 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
7222                PPC_NONE, PPC2_TM),
7223 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
7224                PPC_NONE, PPC2_TM),
7225 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
7226                PPC_NONE, PPC2_TM),
7227 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
7228                PPC_NONE, PPC2_TM),
7229 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
7230                PPC_NONE, PPC2_TM),
7231 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
7232                PPC_NONE, PPC2_TM),
7233 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
7234                PPC_NONE, PPC2_TM),
7235 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
7236                PPC_NONE, PPC2_TM),
7237 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
7238                PPC_NONE, PPC2_TM),
7239 
7240 #include "translate/fp-ops.c.inc"
7241 
7242 #include "translate/vmx-ops.c.inc"
7243 
7244 #include "translate/vsx-ops.c.inc"
7245 
7246 #include "translate/spe-ops.c.inc"
7247 };
7248 
7249 /*****************************************************************************/
7250 /* Opcode types */
7251 enum {
7252     PPC_DIRECT   = 0, /* Opcode routine        */
7253     PPC_INDIRECT = 1, /* Indirect opcode table */
7254 };
7255 
7256 #define PPC_OPCODE_MASK 0x3
7257 
7258 static inline int is_indirect_opcode(void *handler)
7259 {
7260     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
7261 }
7262 
7263 static inline opc_handler_t **ind_table(void *handler)
7264 {
7265     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
7266 }
7267 
7268 /* Instruction table creation */
7269 /* Opcodes tables creation */
7270 static void fill_new_table(opc_handler_t **table, int len)
7271 {
7272     int i;
7273 
7274     for (i = 0; i < len; i++) {
7275         table[i] = &invalid_handler;
7276     }
7277 }
7278 
7279 static int create_new_table(opc_handler_t **table, unsigned char idx)
7280 {
7281     opc_handler_t **tmp;
7282 
7283     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
7284     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
7285     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
7286 
7287     return 0;
7288 }
7289 
7290 static int insert_in_table(opc_handler_t **table, unsigned char idx,
7291                             opc_handler_t *handler)
7292 {
7293     if (table[idx] != &invalid_handler) {
7294         return -1;
7295     }
7296     table[idx] = handler;
7297 
7298     return 0;
7299 }
7300 
7301 static int register_direct_insn(opc_handler_t **ppc_opcodes,
7302                                 unsigned char idx, opc_handler_t *handler)
7303 {
7304     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
7305         printf("*** ERROR: opcode %02x already assigned in main "
7306                "opcode table\n", idx);
7307         return -1;
7308     }
7309 
7310     return 0;
7311 }
7312 
7313 static int register_ind_in_table(opc_handler_t **table,
7314                                  unsigned char idx1, unsigned char idx2,
7315                                  opc_handler_t *handler)
7316 {
7317     if (table[idx1] == &invalid_handler) {
7318         if (create_new_table(table, idx1) < 0) {
7319             printf("*** ERROR: unable to create indirect table "
7320                    "idx=%02x\n", idx1);
7321             return -1;
7322         }
7323     } else {
7324         if (!is_indirect_opcode(table[idx1])) {
7325             printf("*** ERROR: idx %02x already assigned to a direct "
7326                    "opcode\n", idx1);
7327             return -1;
7328         }
7329     }
7330     if (handler != NULL &&
7331         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
7332         printf("*** ERROR: opcode %02x already assigned in "
7333                "opcode table %02x\n", idx2, idx1);
7334         return -1;
7335     }
7336 
7337     return 0;
7338 }
7339 
7340 static int register_ind_insn(opc_handler_t **ppc_opcodes,
7341                              unsigned char idx1, unsigned char idx2,
7342                              opc_handler_t *handler)
7343 {
7344     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
7345 }
7346 
7347 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
7348                                 unsigned char idx1, unsigned char idx2,
7349                                 unsigned char idx3, opc_handler_t *handler)
7350 {
7351     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7352         printf("*** ERROR: unable to join indirect table idx "
7353                "[%02x-%02x]\n", idx1, idx2);
7354         return -1;
7355     }
7356     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
7357                               handler) < 0) {
7358         printf("*** ERROR: unable to insert opcode "
7359                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7360         return -1;
7361     }
7362 
7363     return 0;
7364 }
7365 
7366 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
7367                                  unsigned char idx1, unsigned char idx2,
7368                                  unsigned char idx3, unsigned char idx4,
7369                                  opc_handler_t *handler)
7370 {
7371     opc_handler_t **table;
7372 
7373     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7374         printf("*** ERROR: unable to join indirect table idx "
7375                "[%02x-%02x]\n", idx1, idx2);
7376         return -1;
7377     }
7378     table = ind_table(ppc_opcodes[idx1]);
7379     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
7380         printf("*** ERROR: unable to join 2nd-level indirect table idx "
7381                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7382         return -1;
7383     }
7384     table = ind_table(table[idx2]);
7385     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
7386         printf("*** ERROR: unable to insert opcode "
7387                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
7388         return -1;
7389     }
7390     return 0;
7391 }
7392 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
7393 {
7394     if (insn->opc2 != 0xFF) {
7395         if (insn->opc3 != 0xFF) {
7396             if (insn->opc4 != 0xFF) {
7397                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7398                                           insn->opc3, insn->opc4,
7399                                           &insn->handler) < 0) {
7400                     return -1;
7401                 }
7402             } else {
7403                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7404                                          insn->opc3, &insn->handler) < 0) {
7405                     return -1;
7406                 }
7407             }
7408         } else {
7409             if (register_ind_insn(ppc_opcodes, insn->opc1,
7410                                   insn->opc2, &insn->handler) < 0) {
7411                 return -1;
7412             }
7413         }
7414     } else {
7415         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
7416             return -1;
7417         }
7418     }
7419 
7420     return 0;
7421 }
7422 
7423 static int test_opcode_table(opc_handler_t **table, int len)
7424 {
7425     int i, count, tmp;
7426 
7427     for (i = 0, count = 0; i < len; i++) {
7428         /* Consistency fixup */
7429         if (table[i] == NULL) {
7430             table[i] = &invalid_handler;
7431         }
7432         if (table[i] != &invalid_handler) {
7433             if (is_indirect_opcode(table[i])) {
7434                 tmp = test_opcode_table(ind_table(table[i]),
7435                     PPC_CPU_INDIRECT_OPCODES_LEN);
7436                 if (tmp == 0) {
7437                     free(table[i]);
7438                     table[i] = &invalid_handler;
7439                 } else {
7440                     count++;
7441                 }
7442             } else {
7443                 count++;
7444             }
7445         }
7446     }
7447 
7448     return count;
7449 }
7450 
7451 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
7452 {
7453     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
7454         printf("*** WARNING: no opcode defined !\n");
7455     }
7456 }
7457 
7458 /*****************************************************************************/
7459 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
7460 {
7461     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
7462     opcode_t *opc;
7463 
7464     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
7465     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
7466         if (((opc->handler.type & pcc->insns_flags) != 0) ||
7467             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
7468             if (register_insn(cpu->opcodes, opc) < 0) {
7469                 error_setg(errp, "ERROR initializing PowerPC instruction "
7470                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
7471                            opc->opc3);
7472                 return;
7473             }
7474         }
7475     }
7476     fix_opcode_tables(cpu->opcodes);
7477     fflush(stdout);
7478     fflush(stderr);
7479 }
7480 
7481 void destroy_ppc_opcodes(PowerPCCPU *cpu)
7482 {
7483     opc_handler_t **table, **table_2;
7484     int i, j, k;
7485 
7486     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
7487         if (cpu->opcodes[i] == &invalid_handler) {
7488             continue;
7489         }
7490         if (is_indirect_opcode(cpu->opcodes[i])) {
7491             table = ind_table(cpu->opcodes[i]);
7492             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
7493                 if (table[j] == &invalid_handler) {
7494                     continue;
7495                 }
7496                 if (is_indirect_opcode(table[j])) {
7497                     table_2 = ind_table(table[j]);
7498                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
7499                         if (table_2[k] != &invalid_handler &&
7500                             is_indirect_opcode(table_2[k])) {
7501                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
7502                                                      ~PPC_INDIRECT));
7503                         }
7504                     }
7505                     g_free((opc_handler_t *)((uintptr_t)table[j] &
7506                                              ~PPC_INDIRECT));
7507                 }
7508             }
7509             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
7510                 ~PPC_INDIRECT));
7511         }
7512     }
7513 }
7514 
7515 int ppc_fixup_cpu(PowerPCCPU *cpu)
7516 {
7517     CPUPPCState *env = &cpu->env;
7518 
7519     /*
7520      * TCG doesn't (yet) emulate some groups of instructions that are
7521      * implemented on some otherwise supported CPUs (e.g. VSX and
7522      * decimal floating point instructions on POWER7).  We remove
7523      * unsupported instruction groups from the cpu state's instruction
7524      * masks and hope the guest can cope.  For at least the pseries
7525      * machine, the unavailability of these instructions can be
7526      * advertised to the guest via the device tree.
7527      */
7528     if ((env->insns_flags & ~PPC_TCG_INSNS)
7529         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
7530         warn_report("Disabling some instructions which are not "
7531                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
7532                     env->insns_flags & ~PPC_TCG_INSNS,
7533                     env->insns_flags2 & ~PPC_TCG_INSNS2);
7534     }
7535     env->insns_flags &= PPC_TCG_INSNS;
7536     env->insns_flags2 &= PPC_TCG_INSNS2;
7537     return 0;
7538 }
7539 
7540 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
7541 {
7542     opc_handler_t **table, *handler;
7543     uint32_t inval;
7544 
7545     ctx->opcode = insn;
7546 
7547     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7548               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7549               ctx->le_mode ? "little" : "big");
7550 
7551     table = cpu->opcodes;
7552     handler = table[opc1(insn)];
7553     if (is_indirect_opcode(handler)) {
7554         table = ind_table(handler);
7555         handler = table[opc2(insn)];
7556         if (is_indirect_opcode(handler)) {
7557             table = ind_table(handler);
7558             handler = table[opc3(insn)];
7559             if (is_indirect_opcode(handler)) {
7560                 table = ind_table(handler);
7561                 handler = table[opc4(insn)];
7562             }
7563         }
7564     }
7565 
7566     /* Is opcode *REALLY* valid ? */
7567     if (unlikely(handler->handler == &gen_invalid)) {
7568         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7569                       "%02x - %02x - %02x - %02x (%08x) "
7570                       TARGET_FMT_lx "\n",
7571                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7572                       insn, ctx->cia);
7573         return false;
7574     }
7575 
7576     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
7577                  && Rc(insn))) {
7578         inval = handler->inval2;
7579     } else {
7580         inval = handler->inval1;
7581     }
7582 
7583     if (unlikely((insn & inval) != 0)) {
7584         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7585                       "%02x - %02x - %02x - %02x (%08x) "
7586                       TARGET_FMT_lx "\n", insn & inval,
7587                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7588                       insn, ctx->cia);
7589         return false;
7590     }
7591 
7592     handler->handler(ctx);
7593     return true;
7594 }
7595 
7596 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
7597 {
7598     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7599     CPUPPCState *env = cs->env_ptr;
7600     uint32_t hflags = ctx->base.tb->flags;
7601 
7602     ctx->spr_cb = env->spr_cb;
7603     ctx->pr = (hflags >> HFLAGS_PR) & 1;
7604     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
7605     ctx->dr = (hflags >> HFLAGS_DR) & 1;
7606     ctx->hv = (hflags >> HFLAGS_HV) & 1;
7607     ctx->insns_flags = env->insns_flags;
7608     ctx->insns_flags2 = env->insns_flags2;
7609     ctx->access_type = -1;
7610     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
7611     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
7612     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
7613     ctx->flags = env->flags;
7614 #if defined(TARGET_PPC64)
7615     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
7616     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7617 #endif
7618     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
7619         || env->mmu_model & POWERPC_MMU_64;
7620 
7621     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
7622     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
7623     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
7624     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
7625     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
7626     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
7627     ctx->hr = (hflags >> HFLAGS_HR) & 1;
7628     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
7629     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
7630     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
7631 
7632     ctx->singlestep_enabled = 0;
7633     if ((hflags >> HFLAGS_SE) & 1) {
7634         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
7635         ctx->base.max_insns = 1;
7636     }
7637     if ((hflags >> HFLAGS_BE) & 1) {
7638         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
7639     }
7640 }
7641 
7642 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
7643 {
7644 }
7645 
7646 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
7647 {
7648     tcg_gen_insn_start(dcbase->pc_next);
7649 }
7650 
7651 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
7652 {
7653     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
7654     return opc1(insn) == 1;
7655 }
7656 
7657 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
7658 {
7659     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7660     PowerPCCPU *cpu = POWERPC_CPU(cs);
7661     CPUPPCState *env = cs->env_ptr;
7662     target_ulong pc;
7663     uint32_t insn;
7664     bool ok;
7665 
7666     LOG_DISAS("----------------\n");
7667     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7668               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
7669 
7670     ctx->cia = pc = ctx->base.pc_next;
7671     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
7672     ctx->base.pc_next = pc += 4;
7673 
7674     if (!is_prefix_insn(ctx, insn)) {
7675         ok = (decode_insn32(ctx, insn) ||
7676               decode_legacy(cpu, ctx, insn));
7677     } else if ((pc & 63) == 0) {
7678         /*
7679          * Power v3.1, section 1.9 Exceptions:
7680          * attempt to execute a prefixed instruction that crosses a
7681          * 64-byte address boundary (system alignment error).
7682          */
7683         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
7684         ok = true;
7685     } else {
7686         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
7687                                              need_byteswap(ctx));
7688         ctx->base.pc_next = pc += 4;
7689         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
7690     }
7691     if (!ok) {
7692         gen_invalid(ctx);
7693     }
7694 
7695     /* End the TB when crossing a page boundary. */
7696     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
7697         ctx->base.is_jmp = DISAS_TOO_MANY;
7698     }
7699 
7700     translator_loop_temp_check(&ctx->base);
7701 }
7702 
7703 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
7704 {
7705     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7706     DisasJumpType is_jmp = ctx->base.is_jmp;
7707     target_ulong nip = ctx->base.pc_next;
7708 
7709     if (is_jmp == DISAS_NORETURN) {
7710         /* We have already exited the TB. */
7711         return;
7712     }
7713 
7714     /* Honor single stepping. */
7715     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)
7716         && (nip <= 0x100 || nip > 0xf00)) {
7717         switch (is_jmp) {
7718         case DISAS_TOO_MANY:
7719         case DISAS_EXIT_UPDATE:
7720         case DISAS_CHAIN_UPDATE:
7721             gen_update_nip(ctx, nip);
7722             break;
7723         case DISAS_EXIT:
7724         case DISAS_CHAIN:
7725             break;
7726         default:
7727             g_assert_not_reached();
7728         }
7729 
7730         gen_debug_exception(ctx);
7731         return;
7732     }
7733 
7734     switch (is_jmp) {
7735     case DISAS_TOO_MANY:
7736         if (use_goto_tb(ctx, nip)) {
7737             pmu_count_insns(ctx);
7738             tcg_gen_goto_tb(0);
7739             gen_update_nip(ctx, nip);
7740             tcg_gen_exit_tb(ctx->base.tb, 0);
7741             break;
7742         }
7743         /* fall through */
7744     case DISAS_CHAIN_UPDATE:
7745         gen_update_nip(ctx, nip);
7746         /* fall through */
7747     case DISAS_CHAIN:
7748         /*
7749          * tcg_gen_lookup_and_goto_ptr will exit the TB if
7750          * CF_NO_GOTO_PTR is set. Count insns now.
7751          */
7752         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
7753             pmu_count_insns(ctx);
7754         }
7755 
7756         tcg_gen_lookup_and_goto_ptr();
7757         break;
7758 
7759     case DISAS_EXIT_UPDATE:
7760         gen_update_nip(ctx, nip);
7761         /* fall through */
7762     case DISAS_EXIT:
7763         pmu_count_insns(ctx);
7764         tcg_gen_exit_tb(NULL, 0);
7765         break;
7766 
7767     default:
7768         g_assert_not_reached();
7769     }
7770 }
7771 
7772 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs)
7773 {
7774     qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first));
7775     log_target_disas(cs, dcbase->pc_first, dcbase->tb->size);
7776 }
7777 
7778 static const TranslatorOps ppc_tr_ops = {
7779     .init_disas_context = ppc_tr_init_disas_context,
7780     .tb_start           = ppc_tr_tb_start,
7781     .insn_start         = ppc_tr_insn_start,
7782     .translate_insn     = ppc_tr_translate_insn,
7783     .tb_stop            = ppc_tr_tb_stop,
7784     .disas_log          = ppc_tr_disas_log,
7785 };
7786 
7787 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns)
7788 {
7789     DisasContext ctx;
7790 
7791     translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns);
7792 }
7793 
7794 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb,
7795                           target_ulong *data)
7796 {
7797     env->nip = data[0];
7798 }
7799