xref: /openbmc/qemu/target/ppc/translate.c (revision f3b2e38c)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31 
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34 
35 #include "exec/translator.h"
36 #include "exec/log.h"
37 #include "qemu/atomic128.h"
38 #include "spr_common.h"
39 
40 #include "qemu/qemu-print.h"
41 #include "qapi/error.h"
42 
43 #define CPU_SINGLE_STEP 0x1
44 #define CPU_BRANCH_STEP 0x2
45 
46 /* Include definitions for instructions classes and implementations flags */
47 /* #define PPC_DEBUG_DISAS */
48 
49 #ifdef PPC_DEBUG_DISAS
50 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
51 #else
52 #  define LOG_DISAS(...) do { } while (0)
53 #endif
54 /*****************************************************************************/
55 /* Code translation helpers                                                  */
56 
57 /* global register indexes */
58 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
59                           + 10 * 4 + 22 * 5 /* SPE GPRh */
60                           + 8 * 5           /* CRF */];
61 static TCGv cpu_gpr[32];
62 static TCGv cpu_gprh[32];
63 static TCGv_i32 cpu_crf[8];
64 static TCGv cpu_nip;
65 static TCGv cpu_msr;
66 static TCGv cpu_ctr;
67 static TCGv cpu_lr;
68 #if defined(TARGET_PPC64)
69 static TCGv cpu_cfar;
70 #endif
71 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
72 static TCGv cpu_reserve;
73 static TCGv cpu_reserve_val;
74 static TCGv cpu_fpscr;
75 static TCGv_i32 cpu_access_type;
76 
77 #include "exec/gen-icount.h"
78 
79 void ppc_translate_init(void)
80 {
81     int i;
82     char *p;
83     size_t cpu_reg_names_size;
84 
85     p = cpu_reg_names;
86     cpu_reg_names_size = sizeof(cpu_reg_names);
87 
88     for (i = 0; i < 8; i++) {
89         snprintf(p, cpu_reg_names_size, "crf%d", i);
90         cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
91                                             offsetof(CPUPPCState, crf[i]), p);
92         p += 5;
93         cpu_reg_names_size -= 5;
94     }
95 
96     for (i = 0; i < 32; i++) {
97         snprintf(p, cpu_reg_names_size, "r%d", i);
98         cpu_gpr[i] = tcg_global_mem_new(cpu_env,
99                                         offsetof(CPUPPCState, gpr[i]), p);
100         p += (i < 10) ? 3 : 4;
101         cpu_reg_names_size -= (i < 10) ? 3 : 4;
102         snprintf(p, cpu_reg_names_size, "r%dH", i);
103         cpu_gprh[i] = tcg_global_mem_new(cpu_env,
104                                          offsetof(CPUPPCState, gprh[i]), p);
105         p += (i < 10) ? 4 : 5;
106         cpu_reg_names_size -= (i < 10) ? 4 : 5;
107     }
108 
109     cpu_nip = tcg_global_mem_new(cpu_env,
110                                  offsetof(CPUPPCState, nip), "nip");
111 
112     cpu_msr = tcg_global_mem_new(cpu_env,
113                                  offsetof(CPUPPCState, msr), "msr");
114 
115     cpu_ctr = tcg_global_mem_new(cpu_env,
116                                  offsetof(CPUPPCState, ctr), "ctr");
117 
118     cpu_lr = tcg_global_mem_new(cpu_env,
119                                 offsetof(CPUPPCState, lr), "lr");
120 
121 #if defined(TARGET_PPC64)
122     cpu_cfar = tcg_global_mem_new(cpu_env,
123                                   offsetof(CPUPPCState, cfar), "cfar");
124 #endif
125 
126     cpu_xer = tcg_global_mem_new(cpu_env,
127                                  offsetof(CPUPPCState, xer), "xer");
128     cpu_so = tcg_global_mem_new(cpu_env,
129                                 offsetof(CPUPPCState, so), "SO");
130     cpu_ov = tcg_global_mem_new(cpu_env,
131                                 offsetof(CPUPPCState, ov), "OV");
132     cpu_ca = tcg_global_mem_new(cpu_env,
133                                 offsetof(CPUPPCState, ca), "CA");
134     cpu_ov32 = tcg_global_mem_new(cpu_env,
135                                   offsetof(CPUPPCState, ov32), "OV32");
136     cpu_ca32 = tcg_global_mem_new(cpu_env,
137                                   offsetof(CPUPPCState, ca32), "CA32");
138 
139     cpu_reserve = tcg_global_mem_new(cpu_env,
140                                      offsetof(CPUPPCState, reserve_addr),
141                                      "reserve_addr");
142     cpu_reserve_val = tcg_global_mem_new(cpu_env,
143                                      offsetof(CPUPPCState, reserve_val),
144                                      "reserve_val");
145 
146     cpu_fpscr = tcg_global_mem_new(cpu_env,
147                                    offsetof(CPUPPCState, fpscr), "fpscr");
148 
149     cpu_access_type = tcg_global_mem_new_i32(cpu_env,
150                                              offsetof(CPUPPCState, access_type),
151                                              "access_type");
152 }
153 
154 /* internal defines */
155 struct DisasContext {
156     DisasContextBase base;
157     target_ulong cia;  /* current instruction address */
158     uint32_t opcode;
159     /* Routine used to access memory */
160     bool pr, hv, dr, le_mode;
161     bool lazy_tlb_flush;
162     bool need_access_type;
163     int mem_idx;
164     int access_type;
165     /* Translation flags */
166     MemOp default_tcg_memop_mask;
167 #if defined(TARGET_PPC64)
168     bool sf_mode;
169     bool has_cfar;
170 #endif
171     bool fpu_enabled;
172     bool altivec_enabled;
173     bool vsx_enabled;
174     bool spe_enabled;
175     bool tm_enabled;
176     bool gtse;
177     bool hr;
178     bool mmcr0_pmcc0;
179     bool mmcr0_pmcc1;
180     bool pmu_insn_cnt;
181     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
182     int singlestep_enabled;
183     uint32_t flags;
184     uint64_t insns_flags;
185     uint64_t insns_flags2;
186 };
187 
188 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
189 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
190 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
191 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
192 
193 /* Return true iff byteswap is needed in a scalar memop */
194 static inline bool need_byteswap(const DisasContext *ctx)
195 {
196 #if TARGET_BIG_ENDIAN
197      return ctx->le_mode;
198 #else
199      return !ctx->le_mode;
200 #endif
201 }
202 
203 /* True when active word size < size of target_long.  */
204 #ifdef TARGET_PPC64
205 # define NARROW_MODE(C)  (!(C)->sf_mode)
206 #else
207 # define NARROW_MODE(C)  0
208 #endif
209 
210 struct opc_handler_t {
211     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
212     uint32_t inval1;
213     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
214     uint32_t inval2;
215     /* instruction type */
216     uint64_t type;
217     /* extended instruction type */
218     uint64_t type2;
219     /* handler */
220     void (*handler)(DisasContext *ctx);
221 };
222 
223 /* SPR load/store helpers */
224 static inline void gen_load_spr(TCGv t, int reg)
225 {
226     tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
227 }
228 
229 static inline void gen_store_spr(int reg, TCGv t)
230 {
231     tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
232 }
233 
234 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
235 {
236     if (ctx->need_access_type && ctx->access_type != access_type) {
237         tcg_gen_movi_i32(cpu_access_type, access_type);
238         ctx->access_type = access_type;
239     }
240 }
241 
242 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
243 {
244     if (NARROW_MODE(ctx)) {
245         nip = (uint32_t)nip;
246     }
247     tcg_gen_movi_tl(cpu_nip, nip);
248 }
249 
250 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
251 {
252     TCGv_i32 t0, t1;
253 
254     /*
255      * These are all synchronous exceptions, we set the PC back to the
256      * faulting instruction
257      */
258     gen_update_nip(ctx, ctx->cia);
259     t0 = tcg_const_i32(excp);
260     t1 = tcg_const_i32(error);
261     gen_helper_raise_exception_err(cpu_env, t0, t1);
262     tcg_temp_free_i32(t0);
263     tcg_temp_free_i32(t1);
264     ctx->base.is_jmp = DISAS_NORETURN;
265 }
266 
267 static void gen_exception(DisasContext *ctx, uint32_t excp)
268 {
269     TCGv_i32 t0;
270 
271     /*
272      * These are all synchronous exceptions, we set the PC back to the
273      * faulting instruction
274      */
275     gen_update_nip(ctx, ctx->cia);
276     t0 = tcg_const_i32(excp);
277     gen_helper_raise_exception(cpu_env, t0);
278     tcg_temp_free_i32(t0);
279     ctx->base.is_jmp = DISAS_NORETURN;
280 }
281 
282 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
283                               target_ulong nip)
284 {
285     TCGv_i32 t0;
286 
287     gen_update_nip(ctx, nip);
288     t0 = tcg_const_i32(excp);
289     gen_helper_raise_exception(cpu_env, t0);
290     tcg_temp_free_i32(t0);
291     ctx->base.is_jmp = DISAS_NORETURN;
292 }
293 
294 static void gen_icount_io_start(DisasContext *ctx)
295 {
296     if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
297         gen_io_start();
298         /*
299          * An I/O instruction must be last in the TB.
300          * Chain to the next TB, and let the code from gen_tb_start
301          * decide if we need to return to the main loop.
302          * Doing this first also allows this value to be overridden.
303          */
304         ctx->base.is_jmp = DISAS_TOO_MANY;
305     }
306 }
307 
308 /*
309  * Tells the caller what is the appropriate exception to generate and prepares
310  * SPR registers for this exception.
311  *
312  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
313  * POWERPC_EXCP_DEBUG (on BookE).
314  */
315 static uint32_t gen_prep_dbgex(DisasContext *ctx)
316 {
317     if (ctx->flags & POWERPC_FLAG_DE) {
318         target_ulong dbsr = 0;
319         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
320             dbsr = DBCR0_ICMP;
321         } else {
322             /* Must have been branch */
323             dbsr = DBCR0_BRT;
324         }
325         TCGv t0 = tcg_temp_new();
326         gen_load_spr(t0, SPR_BOOKE_DBSR);
327         tcg_gen_ori_tl(t0, t0, dbsr);
328         gen_store_spr(SPR_BOOKE_DBSR, t0);
329         tcg_temp_free(t0);
330         return POWERPC_EXCP_DEBUG;
331     } else {
332         return POWERPC_EXCP_TRACE;
333     }
334 }
335 
336 static void gen_debug_exception(DisasContext *ctx)
337 {
338     gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx)));
339     ctx->base.is_jmp = DISAS_NORETURN;
340 }
341 
342 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
343 {
344     /* Will be converted to program check if needed */
345     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
346 }
347 
348 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
349 {
350     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
351 }
352 
353 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
354 {
355     /* Will be converted to program check if needed */
356     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
357 }
358 
359 /*****************************************************************************/
360 /* SPR READ/WRITE CALLBACKS */
361 
362 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
363 {
364 #if 0
365     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
366     printf("ERROR: try to access SPR %d !\n", sprn);
367 #endif
368 }
369 
370 /* #define PPC_DUMP_SPR_ACCESSES */
371 
372 /*
373  * Generic callbacks:
374  * do nothing but store/retrieve spr value
375  */
376 static void spr_load_dump_spr(int sprn)
377 {
378 #ifdef PPC_DUMP_SPR_ACCESSES
379     TCGv_i32 t0 = tcg_const_i32(sprn);
380     gen_helper_load_dump_spr(cpu_env, t0);
381     tcg_temp_free_i32(t0);
382 #endif
383 }
384 
385 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
386 {
387     gen_load_spr(cpu_gpr[gprn], sprn);
388     spr_load_dump_spr(sprn);
389 }
390 
391 static void spr_store_dump_spr(int sprn)
392 {
393 #ifdef PPC_DUMP_SPR_ACCESSES
394     TCGv_i32 t0 = tcg_const_i32(sprn);
395     gen_helper_store_dump_spr(cpu_env, t0);
396     tcg_temp_free_i32(t0);
397 #endif
398 }
399 
400 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
401 {
402     gen_store_spr(sprn, cpu_gpr[gprn]);
403     spr_store_dump_spr(sprn);
404 }
405 
406 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
407 {
408     spr_write_generic(ctx, sprn, gprn);
409 
410     /*
411      * SPR_CTRL writes must force a new translation block,
412      * allowing the PMU to calculate the run latch events with
413      * more accuracy.
414      */
415     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
416 }
417 
418 #if !defined(CONFIG_USER_ONLY)
419 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
420 {
421 #ifdef TARGET_PPC64
422     TCGv t0 = tcg_temp_new();
423     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
424     gen_store_spr(sprn, t0);
425     tcg_temp_free(t0);
426     spr_store_dump_spr(sprn);
427 #else
428     spr_write_generic(ctx, sprn, gprn);
429 #endif
430 }
431 
432 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
433 {
434     TCGv t0 = tcg_temp_new();
435     TCGv t1 = tcg_temp_new();
436     gen_load_spr(t0, sprn);
437     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
438     tcg_gen_and_tl(t0, t0, t1);
439     gen_store_spr(sprn, t0);
440     tcg_temp_free(t0);
441     tcg_temp_free(t1);
442 }
443 
444 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
445 {
446 }
447 
448 #endif
449 
450 /* SPR common to all PowerPC */
451 /* XER */
452 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
453 {
454     TCGv dst = cpu_gpr[gprn];
455     TCGv t0 = tcg_temp_new();
456     TCGv t1 = tcg_temp_new();
457     TCGv t2 = tcg_temp_new();
458     tcg_gen_mov_tl(dst, cpu_xer);
459     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
460     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
461     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
462     tcg_gen_or_tl(t0, t0, t1);
463     tcg_gen_or_tl(dst, dst, t2);
464     tcg_gen_or_tl(dst, dst, t0);
465     if (is_isa300(ctx)) {
466         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
467         tcg_gen_or_tl(dst, dst, t0);
468         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
469         tcg_gen_or_tl(dst, dst, t0);
470     }
471     tcg_temp_free(t0);
472     tcg_temp_free(t1);
473     tcg_temp_free(t2);
474 }
475 
476 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
477 {
478     TCGv src = cpu_gpr[gprn];
479     /* Write all flags, while reading back check for isa300 */
480     tcg_gen_andi_tl(cpu_xer, src,
481                     ~((1u << XER_SO) |
482                       (1u << XER_OV) | (1u << XER_OV32) |
483                       (1u << XER_CA) | (1u << XER_CA32)));
484     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
485     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
486     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
487     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
488     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
489 }
490 
491 /* LR */
492 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
493 {
494     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
495 }
496 
497 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
498 {
499     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
500 }
501 
502 /* CFAR */
503 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
504 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
505 {
506     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
507 }
508 
509 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
510 {
511     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
512 }
513 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
514 
515 /* CTR */
516 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
517 {
518     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
519 }
520 
521 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
522 {
523     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
524 }
525 
526 /* User read access to SPR */
527 /* USPRx */
528 /* UMMCRx */
529 /* UPMCx */
530 /* USIA */
531 /* UDECR */
532 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
533 {
534     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
535 }
536 
537 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
538 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
539 {
540     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
541 }
542 #endif
543 
544 /* SPR common to all non-embedded PowerPC */
545 /* DECR */
546 #if !defined(CONFIG_USER_ONLY)
547 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
548 {
549     gen_icount_io_start(ctx);
550     gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
551 }
552 
553 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
554 {
555     gen_icount_io_start(ctx);
556     gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
557 }
558 #endif
559 
560 /* SPR common to all non-embedded PowerPC, except 601 */
561 /* Time base */
562 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
563 {
564     gen_icount_io_start(ctx);
565     gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
566 }
567 
568 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
569 {
570     gen_icount_io_start(ctx);
571     gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
572 }
573 
574 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
575 {
576     gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
577 }
578 
579 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
580 {
581     gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
582 }
583 
584 #if !defined(CONFIG_USER_ONLY)
585 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
586 {
587     gen_icount_io_start(ctx);
588     gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
589 }
590 
591 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
592 {
593     gen_icount_io_start(ctx);
594     gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
595 }
596 
597 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
598 {
599     gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
600 }
601 
602 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
603 {
604     gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
605 }
606 
607 #if defined(TARGET_PPC64)
608 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
609 {
610     gen_icount_io_start(ctx);
611     gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
612 }
613 
614 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
615 {
616     gen_icount_io_start(ctx);
617     gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
618 }
619 
620 /* HDECR */
621 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
622 {
623     gen_icount_io_start(ctx);
624     gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
625 }
626 
627 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
628 {
629     gen_icount_io_start(ctx);
630     gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
631 }
632 
633 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
634 {
635     gen_icount_io_start(ctx);
636     gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
637 }
638 
639 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
640 {
641     gen_icount_io_start(ctx);
642     gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
643 }
644 
645 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
646 {
647     gen_icount_io_start(ctx);
648     gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
649 }
650 
651 #endif
652 #endif
653 
654 #if !defined(CONFIG_USER_ONLY)
655 /* IBAT0U...IBAT0U */
656 /* IBAT0L...IBAT7L */
657 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
658 {
659     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
660                   offsetof(CPUPPCState,
661                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
662 }
663 
664 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
665 {
666     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
667                   offsetof(CPUPPCState,
668                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
669 }
670 
671 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
672 {
673     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
674     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
675     tcg_temp_free_i32(t0);
676 }
677 
678 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
679 {
680     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
681     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
682     tcg_temp_free_i32(t0);
683 }
684 
685 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
686 {
687     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
688     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
689     tcg_temp_free_i32(t0);
690 }
691 
692 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
693 {
694     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
695     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
696     tcg_temp_free_i32(t0);
697 }
698 
699 /* DBAT0U...DBAT7U */
700 /* DBAT0L...DBAT7L */
701 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
702 {
703     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
704                   offsetof(CPUPPCState,
705                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
706 }
707 
708 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
709 {
710     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
711                   offsetof(CPUPPCState,
712                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
713 }
714 
715 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
716 {
717     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
718     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
719     tcg_temp_free_i32(t0);
720 }
721 
722 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
723 {
724     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
725     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
726     tcg_temp_free_i32(t0);
727 }
728 
729 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
730 {
731     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
732     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
733     tcg_temp_free_i32(t0);
734 }
735 
736 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
737 {
738     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
739     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
740     tcg_temp_free_i32(t0);
741 }
742 
743 /* SDR1 */
744 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
745 {
746     gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
747 }
748 
749 #if defined(TARGET_PPC64)
750 /* 64 bits PowerPC specific SPRs */
751 /* PIDR */
752 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
753 {
754     gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
755 }
756 
757 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
758 {
759     gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
760 }
761 
762 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
763 {
764     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
765 }
766 
767 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
768 {
769     TCGv t0 = tcg_temp_new();
770     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
771     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
772     tcg_temp_free(t0);
773 }
774 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
775 {
776     gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
777 }
778 
779 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
780 {
781     gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
782 }
783 
784 /* DPDES */
785 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
786 {
787     gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
788 }
789 
790 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
791 {
792     gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
793 }
794 #endif
795 #endif
796 
797 /* PowerPC 40x specific registers */
798 #if !defined(CONFIG_USER_ONLY)
799 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
800 {
801     gen_icount_io_start(ctx);
802     gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
803 }
804 
805 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
806 {
807     gen_icount_io_start(ctx);
808     gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
809 }
810 
811 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
812 {
813     gen_icount_io_start(ctx);
814     gen_store_spr(sprn, cpu_gpr[gprn]);
815     gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
816     /* We must stop translation as we may have rebooted */
817     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
818 }
819 
820 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
821 {
822     gen_icount_io_start(ctx);
823     gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
824 }
825 
826 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
827 {
828     gen_icount_io_start(ctx);
829     gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]);
830 }
831 
832 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
833 {
834     gen_icount_io_start(ctx);
835     gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]);
836 }
837 
838 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
839 {
840     TCGv t0 = tcg_temp_new();
841     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
842     gen_helper_store_40x_pid(cpu_env, t0);
843     tcg_temp_free(t0);
844 }
845 
846 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
847 {
848     gen_icount_io_start(ctx);
849     gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
850 }
851 
852 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
853 {
854     gen_icount_io_start(ctx);
855     gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
856 }
857 #endif
858 
859 /* PIR */
860 #if !defined(CONFIG_USER_ONLY)
861 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
862 {
863     TCGv t0 = tcg_temp_new();
864     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
865     gen_store_spr(SPR_PIR, t0);
866     tcg_temp_free(t0);
867 }
868 #endif
869 
870 /* SPE specific registers */
871 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
872 {
873     TCGv_i32 t0 = tcg_temp_new_i32();
874     tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
875     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
876     tcg_temp_free_i32(t0);
877 }
878 
879 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
880 {
881     TCGv_i32 t0 = tcg_temp_new_i32();
882     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
883     tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
884     tcg_temp_free_i32(t0);
885 }
886 
887 #if !defined(CONFIG_USER_ONLY)
888 /* Callback used to write the exception vector base */
889 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
890 {
891     TCGv t0 = tcg_temp_new();
892     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
893     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
894     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
895     gen_store_spr(sprn, t0);
896     tcg_temp_free(t0);
897 }
898 
899 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
900 {
901     int sprn_offs;
902 
903     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
904         sprn_offs = sprn - SPR_BOOKE_IVOR0;
905     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
906         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
907     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
908         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
909     } else {
910         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
911                       " vector 0x%03x\n", sprn);
912         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
913         return;
914     }
915 
916     TCGv t0 = tcg_temp_new();
917     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
918     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
919     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
920     gen_store_spr(sprn, t0);
921     tcg_temp_free(t0);
922 }
923 #endif
924 
925 #ifdef TARGET_PPC64
926 #ifndef CONFIG_USER_ONLY
927 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
928 {
929     TCGv t0 = tcg_temp_new();
930     TCGv t1 = tcg_temp_new();
931     TCGv t2 = tcg_temp_new();
932 
933     /*
934      * Note, the HV=1 PR=0 case is handled earlier by simply using
935      * spr_write_generic for HV mode in the SPR table
936      */
937 
938     /* Build insertion mask into t1 based on context */
939     if (ctx->pr) {
940         gen_load_spr(t1, SPR_UAMOR);
941     } else {
942         gen_load_spr(t1, SPR_AMOR);
943     }
944 
945     /* Mask new bits into t2 */
946     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
947 
948     /* Load AMR and clear new bits in t0 */
949     gen_load_spr(t0, SPR_AMR);
950     tcg_gen_andc_tl(t0, t0, t1);
951 
952     /* Or'in new bits and write it out */
953     tcg_gen_or_tl(t0, t0, t2);
954     gen_store_spr(SPR_AMR, t0);
955     spr_store_dump_spr(SPR_AMR);
956 
957     tcg_temp_free(t0);
958     tcg_temp_free(t1);
959     tcg_temp_free(t2);
960 }
961 
962 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
963 {
964     TCGv t0 = tcg_temp_new();
965     TCGv t1 = tcg_temp_new();
966     TCGv t2 = tcg_temp_new();
967 
968     /*
969      * Note, the HV=1 case is handled earlier by simply using
970      * spr_write_generic for HV mode in the SPR table
971      */
972 
973     /* Build insertion mask into t1 based on context */
974     gen_load_spr(t1, SPR_AMOR);
975 
976     /* Mask new bits into t2 */
977     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
978 
979     /* Load AMR and clear new bits in t0 */
980     gen_load_spr(t0, SPR_UAMOR);
981     tcg_gen_andc_tl(t0, t0, t1);
982 
983     /* Or'in new bits and write it out */
984     tcg_gen_or_tl(t0, t0, t2);
985     gen_store_spr(SPR_UAMOR, t0);
986     spr_store_dump_spr(SPR_UAMOR);
987 
988     tcg_temp_free(t0);
989     tcg_temp_free(t1);
990     tcg_temp_free(t2);
991 }
992 
993 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
994 {
995     TCGv t0 = tcg_temp_new();
996     TCGv t1 = tcg_temp_new();
997     TCGv t2 = tcg_temp_new();
998 
999     /*
1000      * Note, the HV=1 case is handled earlier by simply using
1001      * spr_write_generic for HV mode in the SPR table
1002      */
1003 
1004     /* Build insertion mask into t1 based on context */
1005     gen_load_spr(t1, SPR_AMOR);
1006 
1007     /* Mask new bits into t2 */
1008     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1009 
1010     /* Load AMR and clear new bits in t0 */
1011     gen_load_spr(t0, SPR_IAMR);
1012     tcg_gen_andc_tl(t0, t0, t1);
1013 
1014     /* Or'in new bits and write it out */
1015     tcg_gen_or_tl(t0, t0, t2);
1016     gen_store_spr(SPR_IAMR, t0);
1017     spr_store_dump_spr(SPR_IAMR);
1018 
1019     tcg_temp_free(t0);
1020     tcg_temp_free(t1);
1021     tcg_temp_free(t2);
1022 }
1023 #endif
1024 #endif
1025 
1026 #ifndef CONFIG_USER_ONLY
1027 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1028 {
1029     gen_helper_fixup_thrm(cpu_env);
1030     gen_load_spr(cpu_gpr[gprn], sprn);
1031     spr_load_dump_spr(sprn);
1032 }
1033 #endif /* !CONFIG_USER_ONLY */
1034 
1035 #if !defined(CONFIG_USER_ONLY)
1036 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1037 {
1038     TCGv t0 = tcg_temp_new();
1039 
1040     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1041     gen_store_spr(sprn, t0);
1042     tcg_temp_free(t0);
1043 }
1044 
1045 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1046 {
1047     TCGv t0 = tcg_temp_new();
1048 
1049     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1050     gen_store_spr(sprn, t0);
1051     tcg_temp_free(t0);
1052 }
1053 
1054 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1055 {
1056     TCGv t0 = tcg_temp_new();
1057 
1058     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1059                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1060     gen_store_spr(sprn, t0);
1061     tcg_temp_free(t0);
1062 }
1063 
1064 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1065 {
1066     gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1067 }
1068 
1069 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1070 {
1071     TCGv_i32 t0 = tcg_const_i32(sprn);
1072     gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1073     tcg_temp_free_i32(t0);
1074 }
1075 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1076 {
1077     gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1078 }
1079 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1080 {
1081     gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1082 }
1083 
1084 #endif
1085 
1086 #if !defined(CONFIG_USER_ONLY)
1087 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1088 {
1089     TCGv val = tcg_temp_new();
1090     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1091     gen_store_spr(SPR_BOOKE_MAS3, val);
1092     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1093     gen_store_spr(SPR_BOOKE_MAS7, val);
1094     tcg_temp_free(val);
1095 }
1096 
1097 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1098 {
1099     TCGv mas7 = tcg_temp_new();
1100     TCGv mas3 = tcg_temp_new();
1101     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1102     tcg_gen_shli_tl(mas7, mas7, 32);
1103     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1104     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1105     tcg_temp_free(mas3);
1106     tcg_temp_free(mas7);
1107 }
1108 
1109 #endif
1110 
1111 #ifdef TARGET_PPC64
1112 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1113                                     int bit, int sprn, int cause)
1114 {
1115     TCGv_i32 t1 = tcg_const_i32(bit);
1116     TCGv_i32 t2 = tcg_const_i32(sprn);
1117     TCGv_i32 t3 = tcg_const_i32(cause);
1118 
1119     gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1120 
1121     tcg_temp_free_i32(t3);
1122     tcg_temp_free_i32(t2);
1123     tcg_temp_free_i32(t1);
1124 }
1125 
1126 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1127                                    int bit, int sprn, int cause)
1128 {
1129     TCGv_i32 t1 = tcg_const_i32(bit);
1130     TCGv_i32 t2 = tcg_const_i32(sprn);
1131     TCGv_i32 t3 = tcg_const_i32(cause);
1132 
1133     gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1134 
1135     tcg_temp_free_i32(t3);
1136     tcg_temp_free_i32(t2);
1137     tcg_temp_free_i32(t1);
1138 }
1139 
1140 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1141 {
1142     TCGv spr_up = tcg_temp_new();
1143     TCGv spr = tcg_temp_new();
1144 
1145     gen_load_spr(spr, sprn - 1);
1146     tcg_gen_shri_tl(spr_up, spr, 32);
1147     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1148 
1149     tcg_temp_free(spr);
1150     tcg_temp_free(spr_up);
1151 }
1152 
1153 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1154 {
1155     TCGv spr = tcg_temp_new();
1156 
1157     gen_load_spr(spr, sprn - 1);
1158     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1159     gen_store_spr(sprn - 1, spr);
1160 
1161     tcg_temp_free(spr);
1162 }
1163 
1164 #if !defined(CONFIG_USER_ONLY)
1165 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1166 {
1167     TCGv hmer = tcg_temp_new();
1168 
1169     gen_load_spr(hmer, sprn);
1170     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1171     gen_store_spr(sprn, hmer);
1172     spr_store_dump_spr(sprn);
1173     tcg_temp_free(hmer);
1174 }
1175 
1176 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1177 {
1178     gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1179 }
1180 #endif /* !defined(CONFIG_USER_ONLY) */
1181 
1182 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1183 {
1184     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1185     spr_read_generic(ctx, gprn, sprn);
1186 }
1187 
1188 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1189 {
1190     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1191     spr_write_generic(ctx, sprn, gprn);
1192 }
1193 
1194 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1195 {
1196     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1197     spr_read_generic(ctx, gprn, sprn);
1198 }
1199 
1200 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1201 {
1202     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1203     spr_write_generic(ctx, sprn, gprn);
1204 }
1205 
1206 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1207 {
1208     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1209     spr_read_prev_upper32(ctx, gprn, sprn);
1210 }
1211 
1212 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1213 {
1214     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1215     spr_write_prev_upper32(ctx, sprn, gprn);
1216 }
1217 
1218 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1219 {
1220     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1221     spr_read_generic(ctx, gprn, sprn);
1222 }
1223 
1224 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1225 {
1226     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1227     spr_write_generic(ctx, sprn, gprn);
1228 }
1229 
1230 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1231 {
1232     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1233     spr_read_prev_upper32(ctx, gprn, sprn);
1234 }
1235 
1236 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1237 {
1238     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1239     spr_write_prev_upper32(ctx, sprn, gprn);
1240 }
1241 #endif
1242 
1243 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1244 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1245 
1246 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1247 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1248 
1249 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1250 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1251 
1252 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1253 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1254 
1255 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1256 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1257 
1258 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1259 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1260 
1261 typedef struct opcode_t {
1262     unsigned char opc1, opc2, opc3, opc4;
1263 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1264     unsigned char pad[4];
1265 #endif
1266     opc_handler_t handler;
1267     const char *oname;
1268 } opcode_t;
1269 
1270 static void gen_priv_opc(DisasContext *ctx)
1271 {
1272     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1273 }
1274 
1275 /* Helpers for priv. check */
1276 #define GEN_PRIV(CTX)              \
1277     do {                           \
1278         gen_priv_opc(CTX); return; \
1279     } while (0)
1280 
1281 #if defined(CONFIG_USER_ONLY)
1282 #define CHK_HV(CTX) GEN_PRIV(CTX)
1283 #define CHK_SV(CTX) GEN_PRIV(CTX)
1284 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1285 #else
1286 #define CHK_HV(CTX)                         \
1287     do {                                    \
1288         if (unlikely(ctx->pr || !ctx->hv)) {\
1289             GEN_PRIV(CTX);                  \
1290         }                                   \
1291     } while (0)
1292 #define CHK_SV(CTX)              \
1293     do {                         \
1294         if (unlikely(ctx->pr)) { \
1295             GEN_PRIV(CTX);       \
1296         }                        \
1297     } while (0)
1298 #define CHK_HVRM(CTX)                                   \
1299     do {                                                \
1300         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1301             GEN_PRIV(CTX);                              \
1302         }                                               \
1303     } while (0)
1304 #endif
1305 
1306 #define CHK_NONE(CTX)
1307 
1308 /*****************************************************************************/
1309 /* PowerPC instructions table                                                */
1310 
1311 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1312 {                                                                             \
1313     .opc1 = op1,                                                              \
1314     .opc2 = op2,                                                              \
1315     .opc3 = op3,                                                              \
1316     .opc4 = 0xff,                                                             \
1317     .handler = {                                                              \
1318         .inval1  = invl,                                                      \
1319         .type = _typ,                                                         \
1320         .type2 = _typ2,                                                       \
1321         .handler = &gen_##name,                                               \
1322     },                                                                        \
1323     .oname = stringify(name),                                                 \
1324 }
1325 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1326 {                                                                             \
1327     .opc1 = op1,                                                              \
1328     .opc2 = op2,                                                              \
1329     .opc3 = op3,                                                              \
1330     .opc4 = 0xff,                                                             \
1331     .handler = {                                                              \
1332         .inval1  = invl1,                                                     \
1333         .inval2  = invl2,                                                     \
1334         .type = _typ,                                                         \
1335         .type2 = _typ2,                                                       \
1336         .handler = &gen_##name,                                               \
1337     },                                                                        \
1338     .oname = stringify(name),                                                 \
1339 }
1340 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1341 {                                                                             \
1342     .opc1 = op1,                                                              \
1343     .opc2 = op2,                                                              \
1344     .opc3 = op3,                                                              \
1345     .opc4 = 0xff,                                                             \
1346     .handler = {                                                              \
1347         .inval1  = invl,                                                      \
1348         .type = _typ,                                                         \
1349         .type2 = _typ2,                                                       \
1350         .handler = &gen_##name,                                               \
1351     },                                                                        \
1352     .oname = onam,                                                            \
1353 }
1354 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1355 {                                                                             \
1356     .opc1 = op1,                                                              \
1357     .opc2 = op2,                                                              \
1358     .opc3 = op3,                                                              \
1359     .opc4 = op4,                                                              \
1360     .handler = {                                                              \
1361         .inval1  = invl,                                                      \
1362         .type = _typ,                                                         \
1363         .type2 = _typ2,                                                       \
1364         .handler = &gen_##name,                                               \
1365     },                                                                        \
1366     .oname = stringify(name),                                                 \
1367 }
1368 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1369 {                                                                             \
1370     .opc1 = op1,                                                              \
1371     .opc2 = op2,                                                              \
1372     .opc3 = op3,                                                              \
1373     .opc4 = op4,                                                              \
1374     .handler = {                                                              \
1375         .inval1  = invl,                                                      \
1376         .type = _typ,                                                         \
1377         .type2 = _typ2,                                                       \
1378         .handler = &gen_##name,                                               \
1379     },                                                                        \
1380     .oname = onam,                                                            \
1381 }
1382 
1383 /* Invalid instruction */
1384 static void gen_invalid(DisasContext *ctx)
1385 {
1386     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1387 }
1388 
1389 static opc_handler_t invalid_handler = {
1390     .inval1  = 0xFFFFFFFF,
1391     .inval2  = 0xFFFFFFFF,
1392     .type    = PPC_NONE,
1393     .type2   = PPC_NONE,
1394     .handler = gen_invalid,
1395 };
1396 
1397 /***                           Integer comparison                          ***/
1398 
1399 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1400 {
1401     TCGv t0 = tcg_temp_new();
1402     TCGv t1 = tcg_temp_new();
1403     TCGv_i32 t = tcg_temp_new_i32();
1404 
1405     tcg_gen_movi_tl(t0, CRF_EQ);
1406     tcg_gen_movi_tl(t1, CRF_LT);
1407     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1408                        t0, arg0, arg1, t1, t0);
1409     tcg_gen_movi_tl(t1, CRF_GT);
1410     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1411                        t0, arg0, arg1, t1, t0);
1412 
1413     tcg_gen_trunc_tl_i32(t, t0);
1414     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1415     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1416 
1417     tcg_temp_free(t0);
1418     tcg_temp_free(t1);
1419     tcg_temp_free_i32(t);
1420 }
1421 
1422 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1423 {
1424     TCGv t0 = tcg_const_tl(arg1);
1425     gen_op_cmp(arg0, t0, s, crf);
1426     tcg_temp_free(t0);
1427 }
1428 
1429 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1430 {
1431     TCGv t0, t1;
1432     t0 = tcg_temp_new();
1433     t1 = tcg_temp_new();
1434     if (s) {
1435         tcg_gen_ext32s_tl(t0, arg0);
1436         tcg_gen_ext32s_tl(t1, arg1);
1437     } else {
1438         tcg_gen_ext32u_tl(t0, arg0);
1439         tcg_gen_ext32u_tl(t1, arg1);
1440     }
1441     gen_op_cmp(t0, t1, s, crf);
1442     tcg_temp_free(t1);
1443     tcg_temp_free(t0);
1444 }
1445 
1446 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1447 {
1448     TCGv t0 = tcg_const_tl(arg1);
1449     gen_op_cmp32(arg0, t0, s, crf);
1450     tcg_temp_free(t0);
1451 }
1452 
1453 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1454 {
1455     if (NARROW_MODE(ctx)) {
1456         gen_op_cmpi32(reg, 0, 1, 0);
1457     } else {
1458         gen_op_cmpi(reg, 0, 1, 0);
1459     }
1460 }
1461 
1462 /* cmprb - range comparison: isupper, isaplha, islower*/
1463 static void gen_cmprb(DisasContext *ctx)
1464 {
1465     TCGv_i32 src1 = tcg_temp_new_i32();
1466     TCGv_i32 src2 = tcg_temp_new_i32();
1467     TCGv_i32 src2lo = tcg_temp_new_i32();
1468     TCGv_i32 src2hi = tcg_temp_new_i32();
1469     TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1470 
1471     tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1472     tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1473 
1474     tcg_gen_andi_i32(src1, src1, 0xFF);
1475     tcg_gen_ext8u_i32(src2lo, src2);
1476     tcg_gen_shri_i32(src2, src2, 8);
1477     tcg_gen_ext8u_i32(src2hi, src2);
1478 
1479     tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1480     tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1481     tcg_gen_and_i32(crf, src2lo, src2hi);
1482 
1483     if (ctx->opcode & 0x00200000) {
1484         tcg_gen_shri_i32(src2, src2, 8);
1485         tcg_gen_ext8u_i32(src2lo, src2);
1486         tcg_gen_shri_i32(src2, src2, 8);
1487         tcg_gen_ext8u_i32(src2hi, src2);
1488         tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1489         tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1490         tcg_gen_and_i32(src2lo, src2lo, src2hi);
1491         tcg_gen_or_i32(crf, crf, src2lo);
1492     }
1493     tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1494     tcg_temp_free_i32(src1);
1495     tcg_temp_free_i32(src2);
1496     tcg_temp_free_i32(src2lo);
1497     tcg_temp_free_i32(src2hi);
1498 }
1499 
1500 #if defined(TARGET_PPC64)
1501 /* cmpeqb */
1502 static void gen_cmpeqb(DisasContext *ctx)
1503 {
1504     gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1505                       cpu_gpr[rB(ctx->opcode)]);
1506 }
1507 #endif
1508 
1509 /* isel (PowerPC 2.03 specification) */
1510 static void gen_isel(DisasContext *ctx)
1511 {
1512     uint32_t bi = rC(ctx->opcode);
1513     uint32_t mask = 0x08 >> (bi & 0x03);
1514     TCGv t0 = tcg_temp_new();
1515     TCGv zr;
1516 
1517     tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1518     tcg_gen_andi_tl(t0, t0, mask);
1519 
1520     zr = tcg_const_tl(0);
1521     tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1522                        rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1523                        cpu_gpr[rB(ctx->opcode)]);
1524     tcg_temp_free(zr);
1525     tcg_temp_free(t0);
1526 }
1527 
1528 /* cmpb: PowerPC 2.05 specification */
1529 static void gen_cmpb(DisasContext *ctx)
1530 {
1531     gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1532                     cpu_gpr[rB(ctx->opcode)]);
1533 }
1534 
1535 /***                           Integer arithmetic                          ***/
1536 
1537 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1538                                            TCGv arg1, TCGv arg2, int sub)
1539 {
1540     TCGv t0 = tcg_temp_new();
1541 
1542     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1543     tcg_gen_xor_tl(t0, arg1, arg2);
1544     if (sub) {
1545         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1546     } else {
1547         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1548     }
1549     tcg_temp_free(t0);
1550     if (NARROW_MODE(ctx)) {
1551         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1552         if (is_isa300(ctx)) {
1553             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1554         }
1555     } else {
1556         if (is_isa300(ctx)) {
1557             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1558         }
1559         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1560     }
1561     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1562 }
1563 
1564 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1565                                              TCGv res, TCGv arg0, TCGv arg1,
1566                                              TCGv ca32, int sub)
1567 {
1568     TCGv t0;
1569 
1570     if (!is_isa300(ctx)) {
1571         return;
1572     }
1573 
1574     t0 = tcg_temp_new();
1575     if (sub) {
1576         tcg_gen_eqv_tl(t0, arg0, arg1);
1577     } else {
1578         tcg_gen_xor_tl(t0, arg0, arg1);
1579     }
1580     tcg_gen_xor_tl(t0, t0, res);
1581     tcg_gen_extract_tl(ca32, t0, 32, 1);
1582     tcg_temp_free(t0);
1583 }
1584 
1585 /* Common add function */
1586 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1587                                     TCGv arg2, TCGv ca, TCGv ca32,
1588                                     bool add_ca, bool compute_ca,
1589                                     bool compute_ov, bool compute_rc0)
1590 {
1591     TCGv t0 = ret;
1592 
1593     if (compute_ca || compute_ov) {
1594         t0 = tcg_temp_new();
1595     }
1596 
1597     if (compute_ca) {
1598         if (NARROW_MODE(ctx)) {
1599             /*
1600              * Caution: a non-obvious corner case of the spec is that
1601              * we must produce the *entire* 64-bit addition, but
1602              * produce the carry into bit 32.
1603              */
1604             TCGv t1 = tcg_temp_new();
1605             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1606             tcg_gen_add_tl(t0, arg1, arg2);
1607             if (add_ca) {
1608                 tcg_gen_add_tl(t0, t0, ca);
1609             }
1610             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1611             tcg_temp_free(t1);
1612             tcg_gen_extract_tl(ca, ca, 32, 1);
1613             if (is_isa300(ctx)) {
1614                 tcg_gen_mov_tl(ca32, ca);
1615             }
1616         } else {
1617             TCGv zero = tcg_const_tl(0);
1618             if (add_ca) {
1619                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1620                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1621             } else {
1622                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1623             }
1624             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1625             tcg_temp_free(zero);
1626         }
1627     } else {
1628         tcg_gen_add_tl(t0, arg1, arg2);
1629         if (add_ca) {
1630             tcg_gen_add_tl(t0, t0, ca);
1631         }
1632     }
1633 
1634     if (compute_ov) {
1635         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1636     }
1637     if (unlikely(compute_rc0)) {
1638         gen_set_Rc0(ctx, t0);
1639     }
1640 
1641     if (t0 != ret) {
1642         tcg_gen_mov_tl(ret, t0);
1643         tcg_temp_free(t0);
1644     }
1645 }
1646 /* Add functions with two operands */
1647 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov)     \
1648 static void glue(gen_, name)(DisasContext *ctx)                               \
1649 {                                                                             \
1650     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1651                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1652                      ca, glue(ca, 32),                                        \
1653                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1654 }
1655 /* Add functions with one operand and one immediate */
1656 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca,                    \
1657                                 add_ca, compute_ca, compute_ov)               \
1658 static void glue(gen_, name)(DisasContext *ctx)                               \
1659 {                                                                             \
1660     TCGv t0 = tcg_const_tl(const_val);                                        \
1661     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1662                      cpu_gpr[rA(ctx->opcode)], t0,                            \
1663                      ca, glue(ca, 32),                                        \
1664                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1665     tcg_temp_free(t0);                                                        \
1666 }
1667 
1668 /* add  add.  addo  addo. */
1669 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1670 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1671 /* addc  addc.  addco  addco. */
1672 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1673 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1674 /* adde  adde.  addeo  addeo. */
1675 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1676 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1677 /* addme  addme.  addmeo  addmeo.  */
1678 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1679 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1680 /* addex */
1681 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1682 /* addze  addze.  addzeo  addzeo.*/
1683 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1684 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1685 /* addic  addic.*/
1686 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1687 {
1688     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1689     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1690                      c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1691     tcg_temp_free(c);
1692 }
1693 
1694 static void gen_addic(DisasContext *ctx)
1695 {
1696     gen_op_addic(ctx, 0);
1697 }
1698 
1699 static void gen_addic_(DisasContext *ctx)
1700 {
1701     gen_op_addic(ctx, 1);
1702 }
1703 
1704 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1705                                      TCGv arg2, int sign, int compute_ov)
1706 {
1707     TCGv_i32 t0 = tcg_temp_new_i32();
1708     TCGv_i32 t1 = tcg_temp_new_i32();
1709     TCGv_i32 t2 = tcg_temp_new_i32();
1710     TCGv_i32 t3 = tcg_temp_new_i32();
1711 
1712     tcg_gen_trunc_tl_i32(t0, arg1);
1713     tcg_gen_trunc_tl_i32(t1, arg2);
1714     if (sign) {
1715         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1716         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1717         tcg_gen_and_i32(t2, t2, t3);
1718         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1719         tcg_gen_or_i32(t2, t2, t3);
1720         tcg_gen_movi_i32(t3, 0);
1721         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1722         tcg_gen_div_i32(t3, t0, t1);
1723         tcg_gen_extu_i32_tl(ret, t3);
1724     } else {
1725         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1726         tcg_gen_movi_i32(t3, 0);
1727         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1728         tcg_gen_divu_i32(t3, t0, t1);
1729         tcg_gen_extu_i32_tl(ret, t3);
1730     }
1731     if (compute_ov) {
1732         tcg_gen_extu_i32_tl(cpu_ov, t2);
1733         if (is_isa300(ctx)) {
1734             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1735         }
1736         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1737     }
1738     tcg_temp_free_i32(t0);
1739     tcg_temp_free_i32(t1);
1740     tcg_temp_free_i32(t2);
1741     tcg_temp_free_i32(t3);
1742 
1743     if (unlikely(Rc(ctx->opcode) != 0)) {
1744         gen_set_Rc0(ctx, ret);
1745     }
1746 }
1747 /* Div functions */
1748 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
1749 static void glue(gen_, name)(DisasContext *ctx)                               \
1750 {                                                                             \
1751     gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1752                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1753                      sign, compute_ov);                                       \
1754 }
1755 /* divwu  divwu.  divwuo  divwuo.   */
1756 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1757 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1758 /* divw  divw.  divwo  divwo.   */
1759 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1760 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1761 
1762 /* div[wd]eu[o][.] */
1763 #define GEN_DIVE(name, hlpr, compute_ov)                                      \
1764 static void gen_##name(DisasContext *ctx)                                     \
1765 {                                                                             \
1766     TCGv_i32 t0 = tcg_const_i32(compute_ov);                                  \
1767     gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env,                      \
1768                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1769     tcg_temp_free_i32(t0);                                                    \
1770     if (unlikely(Rc(ctx->opcode) != 0)) {                                     \
1771         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);                           \
1772     }                                                                         \
1773 }
1774 
1775 GEN_DIVE(divweu, divweu, 0);
1776 GEN_DIVE(divweuo, divweu, 1);
1777 GEN_DIVE(divwe, divwe, 0);
1778 GEN_DIVE(divweo, divwe, 1);
1779 
1780 #if defined(TARGET_PPC64)
1781 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1782                                      TCGv arg2, int sign, int compute_ov)
1783 {
1784     TCGv_i64 t0 = tcg_temp_new_i64();
1785     TCGv_i64 t1 = tcg_temp_new_i64();
1786     TCGv_i64 t2 = tcg_temp_new_i64();
1787     TCGv_i64 t3 = tcg_temp_new_i64();
1788 
1789     tcg_gen_mov_i64(t0, arg1);
1790     tcg_gen_mov_i64(t1, arg2);
1791     if (sign) {
1792         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1793         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1794         tcg_gen_and_i64(t2, t2, t3);
1795         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1796         tcg_gen_or_i64(t2, t2, t3);
1797         tcg_gen_movi_i64(t3, 0);
1798         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1799         tcg_gen_div_i64(ret, t0, t1);
1800     } else {
1801         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1802         tcg_gen_movi_i64(t3, 0);
1803         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1804         tcg_gen_divu_i64(ret, t0, t1);
1805     }
1806     if (compute_ov) {
1807         tcg_gen_mov_tl(cpu_ov, t2);
1808         if (is_isa300(ctx)) {
1809             tcg_gen_mov_tl(cpu_ov32, t2);
1810         }
1811         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1812     }
1813     tcg_temp_free_i64(t0);
1814     tcg_temp_free_i64(t1);
1815     tcg_temp_free_i64(t2);
1816     tcg_temp_free_i64(t3);
1817 
1818     if (unlikely(Rc(ctx->opcode) != 0)) {
1819         gen_set_Rc0(ctx, ret);
1820     }
1821 }
1822 
1823 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
1824 static void glue(gen_, name)(DisasContext *ctx)                               \
1825 {                                                                             \
1826     gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1827                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
1828                       sign, compute_ov);                                      \
1829 }
1830 /* divdu  divdu.  divduo  divduo.   */
1831 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1832 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1833 /* divd  divd.  divdo  divdo.   */
1834 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1835 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1836 
1837 GEN_DIVE(divdeu, divdeu, 0);
1838 GEN_DIVE(divdeuo, divdeu, 1);
1839 GEN_DIVE(divde, divde, 0);
1840 GEN_DIVE(divdeo, divde, 1);
1841 #endif
1842 
1843 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1844                                      TCGv arg2, int sign)
1845 {
1846     TCGv_i32 t0 = tcg_temp_new_i32();
1847     TCGv_i32 t1 = tcg_temp_new_i32();
1848 
1849     tcg_gen_trunc_tl_i32(t0, arg1);
1850     tcg_gen_trunc_tl_i32(t1, arg2);
1851     if (sign) {
1852         TCGv_i32 t2 = tcg_temp_new_i32();
1853         TCGv_i32 t3 = tcg_temp_new_i32();
1854         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1855         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1856         tcg_gen_and_i32(t2, t2, t3);
1857         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1858         tcg_gen_or_i32(t2, t2, t3);
1859         tcg_gen_movi_i32(t3, 0);
1860         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1861         tcg_gen_rem_i32(t3, t0, t1);
1862         tcg_gen_ext_i32_tl(ret, t3);
1863         tcg_temp_free_i32(t2);
1864         tcg_temp_free_i32(t3);
1865     } else {
1866         TCGv_i32 t2 = tcg_const_i32(1);
1867         TCGv_i32 t3 = tcg_const_i32(0);
1868         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1869         tcg_gen_remu_i32(t3, t0, t1);
1870         tcg_gen_extu_i32_tl(ret, t3);
1871         tcg_temp_free_i32(t2);
1872         tcg_temp_free_i32(t3);
1873     }
1874     tcg_temp_free_i32(t0);
1875     tcg_temp_free_i32(t1);
1876 }
1877 
1878 #define GEN_INT_ARITH_MODW(name, opc3, sign)                                \
1879 static void glue(gen_, name)(DisasContext *ctx)                             \
1880 {                                                                           \
1881     gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1882                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1883                       sign);                                                \
1884 }
1885 
1886 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1887 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1888 
1889 #if defined(TARGET_PPC64)
1890 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1891                                      TCGv arg2, int sign)
1892 {
1893     TCGv_i64 t0 = tcg_temp_new_i64();
1894     TCGv_i64 t1 = tcg_temp_new_i64();
1895 
1896     tcg_gen_mov_i64(t0, arg1);
1897     tcg_gen_mov_i64(t1, arg2);
1898     if (sign) {
1899         TCGv_i64 t2 = tcg_temp_new_i64();
1900         TCGv_i64 t3 = tcg_temp_new_i64();
1901         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1902         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1903         tcg_gen_and_i64(t2, t2, t3);
1904         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1905         tcg_gen_or_i64(t2, t2, t3);
1906         tcg_gen_movi_i64(t3, 0);
1907         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1908         tcg_gen_rem_i64(ret, t0, t1);
1909         tcg_temp_free_i64(t2);
1910         tcg_temp_free_i64(t3);
1911     } else {
1912         TCGv_i64 t2 = tcg_const_i64(1);
1913         TCGv_i64 t3 = tcg_const_i64(0);
1914         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1915         tcg_gen_remu_i64(ret, t0, t1);
1916         tcg_temp_free_i64(t2);
1917         tcg_temp_free_i64(t3);
1918     }
1919     tcg_temp_free_i64(t0);
1920     tcg_temp_free_i64(t1);
1921 }
1922 
1923 #define GEN_INT_ARITH_MODD(name, opc3, sign)                            \
1924 static void glue(gen_, name)(DisasContext *ctx)                           \
1925 {                                                                         \
1926   gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1927                     cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1928                     sign);                                                \
1929 }
1930 
1931 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1932 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1933 #endif
1934 
1935 /* mulhw  mulhw. */
1936 static void gen_mulhw(DisasContext *ctx)
1937 {
1938     TCGv_i32 t0 = tcg_temp_new_i32();
1939     TCGv_i32 t1 = tcg_temp_new_i32();
1940 
1941     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1942     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1943     tcg_gen_muls2_i32(t0, t1, t0, t1);
1944     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1945     tcg_temp_free_i32(t0);
1946     tcg_temp_free_i32(t1);
1947     if (unlikely(Rc(ctx->opcode) != 0)) {
1948         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1949     }
1950 }
1951 
1952 /* mulhwu  mulhwu.  */
1953 static void gen_mulhwu(DisasContext *ctx)
1954 {
1955     TCGv_i32 t0 = tcg_temp_new_i32();
1956     TCGv_i32 t1 = tcg_temp_new_i32();
1957 
1958     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1959     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1960     tcg_gen_mulu2_i32(t0, t1, t0, t1);
1961     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1962     tcg_temp_free_i32(t0);
1963     tcg_temp_free_i32(t1);
1964     if (unlikely(Rc(ctx->opcode) != 0)) {
1965         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1966     }
1967 }
1968 
1969 /* mullw  mullw. */
1970 static void gen_mullw(DisasContext *ctx)
1971 {
1972 #if defined(TARGET_PPC64)
1973     TCGv_i64 t0, t1;
1974     t0 = tcg_temp_new_i64();
1975     t1 = tcg_temp_new_i64();
1976     tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1977     tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1978     tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1979     tcg_temp_free(t0);
1980     tcg_temp_free(t1);
1981 #else
1982     tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1983                     cpu_gpr[rB(ctx->opcode)]);
1984 #endif
1985     if (unlikely(Rc(ctx->opcode) != 0)) {
1986         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1987     }
1988 }
1989 
1990 /* mullwo  mullwo. */
1991 static void gen_mullwo(DisasContext *ctx)
1992 {
1993     TCGv_i32 t0 = tcg_temp_new_i32();
1994     TCGv_i32 t1 = tcg_temp_new_i32();
1995 
1996     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1997     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1998     tcg_gen_muls2_i32(t0, t1, t0, t1);
1999 #if defined(TARGET_PPC64)
2000     tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2001 #else
2002     tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2003 #endif
2004 
2005     tcg_gen_sari_i32(t0, t0, 31);
2006     tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2007     tcg_gen_extu_i32_tl(cpu_ov, t0);
2008     if (is_isa300(ctx)) {
2009         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2010     }
2011     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2012 
2013     tcg_temp_free_i32(t0);
2014     tcg_temp_free_i32(t1);
2015     if (unlikely(Rc(ctx->opcode) != 0)) {
2016         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2017     }
2018 }
2019 
2020 /* mulli */
2021 static void gen_mulli(DisasContext *ctx)
2022 {
2023     tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2024                     SIMM(ctx->opcode));
2025 }
2026 
2027 #if defined(TARGET_PPC64)
2028 /* mulhd  mulhd. */
2029 static void gen_mulhd(DisasContext *ctx)
2030 {
2031     TCGv lo = tcg_temp_new();
2032     tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2033                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2034     tcg_temp_free(lo);
2035     if (unlikely(Rc(ctx->opcode) != 0)) {
2036         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2037     }
2038 }
2039 
2040 /* mulhdu  mulhdu. */
2041 static void gen_mulhdu(DisasContext *ctx)
2042 {
2043     TCGv lo = tcg_temp_new();
2044     tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2045                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2046     tcg_temp_free(lo);
2047     if (unlikely(Rc(ctx->opcode) != 0)) {
2048         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2049     }
2050 }
2051 
2052 /* mulld  mulld. */
2053 static void gen_mulld(DisasContext *ctx)
2054 {
2055     tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2056                    cpu_gpr[rB(ctx->opcode)]);
2057     if (unlikely(Rc(ctx->opcode) != 0)) {
2058         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2059     }
2060 }
2061 
2062 /* mulldo  mulldo. */
2063 static void gen_mulldo(DisasContext *ctx)
2064 {
2065     TCGv_i64 t0 = tcg_temp_new_i64();
2066     TCGv_i64 t1 = tcg_temp_new_i64();
2067 
2068     tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2069                       cpu_gpr[rB(ctx->opcode)]);
2070     tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2071 
2072     tcg_gen_sari_i64(t0, t0, 63);
2073     tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2074     if (is_isa300(ctx)) {
2075         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2076     }
2077     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2078 
2079     tcg_temp_free_i64(t0);
2080     tcg_temp_free_i64(t1);
2081 
2082     if (unlikely(Rc(ctx->opcode) != 0)) {
2083         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2084     }
2085 }
2086 #endif
2087 
2088 /* Common subf function */
2089 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2090                                      TCGv arg2, bool add_ca, bool compute_ca,
2091                                      bool compute_ov, bool compute_rc0)
2092 {
2093     TCGv t0 = ret;
2094 
2095     if (compute_ca || compute_ov) {
2096         t0 = tcg_temp_new();
2097     }
2098 
2099     if (compute_ca) {
2100         /* dest = ~arg1 + arg2 [+ ca].  */
2101         if (NARROW_MODE(ctx)) {
2102             /*
2103              * Caution: a non-obvious corner case of the spec is that
2104              * we must produce the *entire* 64-bit addition, but
2105              * produce the carry into bit 32.
2106              */
2107             TCGv inv1 = tcg_temp_new();
2108             TCGv t1 = tcg_temp_new();
2109             tcg_gen_not_tl(inv1, arg1);
2110             if (add_ca) {
2111                 tcg_gen_add_tl(t0, arg2, cpu_ca);
2112             } else {
2113                 tcg_gen_addi_tl(t0, arg2, 1);
2114             }
2115             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
2116             tcg_gen_add_tl(t0, t0, inv1);
2117             tcg_temp_free(inv1);
2118             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
2119             tcg_temp_free(t1);
2120             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2121             if (is_isa300(ctx)) {
2122                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2123             }
2124         } else if (add_ca) {
2125             TCGv zero, inv1 = tcg_temp_new();
2126             tcg_gen_not_tl(inv1, arg1);
2127             zero = tcg_const_tl(0);
2128             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2129             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2130             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2131             tcg_temp_free(zero);
2132             tcg_temp_free(inv1);
2133         } else {
2134             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2135             tcg_gen_sub_tl(t0, arg2, arg1);
2136             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2137         }
2138     } else if (add_ca) {
2139         /*
2140          * Since we're ignoring carry-out, we can simplify the
2141          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2142          */
2143         tcg_gen_sub_tl(t0, arg2, arg1);
2144         tcg_gen_add_tl(t0, t0, cpu_ca);
2145         tcg_gen_subi_tl(t0, t0, 1);
2146     } else {
2147         tcg_gen_sub_tl(t0, arg2, arg1);
2148     }
2149 
2150     if (compute_ov) {
2151         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2152     }
2153     if (unlikely(compute_rc0)) {
2154         gen_set_Rc0(ctx, t0);
2155     }
2156 
2157     if (t0 != ret) {
2158         tcg_gen_mov_tl(ret, t0);
2159         tcg_temp_free(t0);
2160     }
2161 }
2162 /* Sub functions with Two operands functions */
2163 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
2164 static void glue(gen_, name)(DisasContext *ctx)                               \
2165 {                                                                             \
2166     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2167                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
2168                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2169 }
2170 /* Sub functions with one operand and one immediate */
2171 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
2172                                 add_ca, compute_ca, compute_ov)               \
2173 static void glue(gen_, name)(DisasContext *ctx)                               \
2174 {                                                                             \
2175     TCGv t0 = tcg_const_tl(const_val);                                        \
2176     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2177                       cpu_gpr[rA(ctx->opcode)], t0,                           \
2178                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2179     tcg_temp_free(t0);                                                        \
2180 }
2181 /* subf  subf.  subfo  subfo. */
2182 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2183 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2184 /* subfc  subfc.  subfco  subfco. */
2185 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2186 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2187 /* subfe  subfe.  subfeo  subfo. */
2188 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2189 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2190 /* subfme  subfme.  subfmeo  subfmeo.  */
2191 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2192 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2193 /* subfze  subfze.  subfzeo  subfzeo.*/
2194 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2195 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2196 
2197 /* subfic */
2198 static void gen_subfic(DisasContext *ctx)
2199 {
2200     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2201     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2202                       c, 0, 1, 0, 0);
2203     tcg_temp_free(c);
2204 }
2205 
2206 /* neg neg. nego nego. */
2207 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2208 {
2209     TCGv zero = tcg_const_tl(0);
2210     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2211                       zero, 0, 0, compute_ov, Rc(ctx->opcode));
2212     tcg_temp_free(zero);
2213 }
2214 
2215 static void gen_neg(DisasContext *ctx)
2216 {
2217     tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2218     if (unlikely(Rc(ctx->opcode))) {
2219         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2220     }
2221 }
2222 
2223 static void gen_nego(DisasContext *ctx)
2224 {
2225     gen_op_arith_neg(ctx, 1);
2226 }
2227 
2228 /***                            Integer logical                            ***/
2229 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
2230 static void glue(gen_, name)(DisasContext *ctx)                               \
2231 {                                                                             \
2232     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],                \
2233        cpu_gpr[rB(ctx->opcode)]);                                             \
2234     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2235         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2236 }
2237 
2238 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
2239 static void glue(gen_, name)(DisasContext *ctx)                               \
2240 {                                                                             \
2241     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);               \
2242     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2243         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2244 }
2245 
2246 /* and & and. */
2247 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2248 /* andc & andc. */
2249 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2250 
2251 /* andi. */
2252 static void gen_andi_(DisasContext *ctx)
2253 {
2254     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2255                     UIMM(ctx->opcode));
2256     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2257 }
2258 
2259 /* andis. */
2260 static void gen_andis_(DisasContext *ctx)
2261 {
2262     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2263                     UIMM(ctx->opcode) << 16);
2264     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2265 }
2266 
2267 /* cntlzw */
2268 static void gen_cntlzw(DisasContext *ctx)
2269 {
2270     TCGv_i32 t = tcg_temp_new_i32();
2271 
2272     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2273     tcg_gen_clzi_i32(t, t, 32);
2274     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2275     tcg_temp_free_i32(t);
2276 
2277     if (unlikely(Rc(ctx->opcode) != 0)) {
2278         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2279     }
2280 }
2281 
2282 /* cnttzw */
2283 static void gen_cnttzw(DisasContext *ctx)
2284 {
2285     TCGv_i32 t = tcg_temp_new_i32();
2286 
2287     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2288     tcg_gen_ctzi_i32(t, t, 32);
2289     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2290     tcg_temp_free_i32(t);
2291 
2292     if (unlikely(Rc(ctx->opcode) != 0)) {
2293         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2294     }
2295 }
2296 
2297 /* eqv & eqv. */
2298 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2299 /* extsb & extsb. */
2300 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2301 /* extsh & extsh. */
2302 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2303 /* nand & nand. */
2304 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2305 /* nor & nor. */
2306 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2307 
2308 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2309 static void gen_pause(DisasContext *ctx)
2310 {
2311     TCGv_i32 t0 = tcg_const_i32(0);
2312     tcg_gen_st_i32(t0, cpu_env,
2313                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2314     tcg_temp_free_i32(t0);
2315 
2316     /* Stop translation, this gives other CPUs a chance to run */
2317     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2318 }
2319 #endif /* defined(TARGET_PPC64) */
2320 
2321 /* or & or. */
2322 static void gen_or(DisasContext *ctx)
2323 {
2324     int rs, ra, rb;
2325 
2326     rs = rS(ctx->opcode);
2327     ra = rA(ctx->opcode);
2328     rb = rB(ctx->opcode);
2329     /* Optimisation for mr. ri case */
2330     if (rs != ra || rs != rb) {
2331         if (rs != rb) {
2332             tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2333         } else {
2334             tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2335         }
2336         if (unlikely(Rc(ctx->opcode) != 0)) {
2337             gen_set_Rc0(ctx, cpu_gpr[ra]);
2338         }
2339     } else if (unlikely(Rc(ctx->opcode) != 0)) {
2340         gen_set_Rc0(ctx, cpu_gpr[rs]);
2341 #if defined(TARGET_PPC64)
2342     } else if (rs != 0) { /* 0 is nop */
2343         int prio = 0;
2344 
2345         switch (rs) {
2346         case 1:
2347             /* Set process priority to low */
2348             prio = 2;
2349             break;
2350         case 6:
2351             /* Set process priority to medium-low */
2352             prio = 3;
2353             break;
2354         case 2:
2355             /* Set process priority to normal */
2356             prio = 4;
2357             break;
2358 #if !defined(CONFIG_USER_ONLY)
2359         case 31:
2360             if (!ctx->pr) {
2361                 /* Set process priority to very low */
2362                 prio = 1;
2363             }
2364             break;
2365         case 5:
2366             if (!ctx->pr) {
2367                 /* Set process priority to medium-hight */
2368                 prio = 5;
2369             }
2370             break;
2371         case 3:
2372             if (!ctx->pr) {
2373                 /* Set process priority to high */
2374                 prio = 6;
2375             }
2376             break;
2377         case 7:
2378             if (ctx->hv && !ctx->pr) {
2379                 /* Set process priority to very high */
2380                 prio = 7;
2381             }
2382             break;
2383 #endif
2384         default:
2385             break;
2386         }
2387         if (prio) {
2388             TCGv t0 = tcg_temp_new();
2389             gen_load_spr(t0, SPR_PPR);
2390             tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2391             tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2392             gen_store_spr(SPR_PPR, t0);
2393             tcg_temp_free(t0);
2394         }
2395 #if !defined(CONFIG_USER_ONLY)
2396         /*
2397          * Pause out of TCG otherwise spin loops with smt_low eat too
2398          * much CPU and the kernel hangs.  This applies to all
2399          * encodings other than no-op, e.g., miso(rs=26), yield(27),
2400          * mdoio(29), mdoom(30), and all currently undefined.
2401          */
2402         gen_pause(ctx);
2403 #endif
2404 #endif
2405     }
2406 }
2407 /* orc & orc. */
2408 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2409 
2410 /* xor & xor. */
2411 static void gen_xor(DisasContext *ctx)
2412 {
2413     /* Optimisation for "set to zero" case */
2414     if (rS(ctx->opcode) != rB(ctx->opcode)) {
2415         tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2416                        cpu_gpr[rB(ctx->opcode)]);
2417     } else {
2418         tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2419     }
2420     if (unlikely(Rc(ctx->opcode) != 0)) {
2421         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2422     }
2423 }
2424 
2425 /* ori */
2426 static void gen_ori(DisasContext *ctx)
2427 {
2428     target_ulong uimm = UIMM(ctx->opcode);
2429 
2430     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2431         return;
2432     }
2433     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2434 }
2435 
2436 /* oris */
2437 static void gen_oris(DisasContext *ctx)
2438 {
2439     target_ulong uimm = UIMM(ctx->opcode);
2440 
2441     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2442         /* NOP */
2443         return;
2444     }
2445     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2446                    uimm << 16);
2447 }
2448 
2449 /* xori */
2450 static void gen_xori(DisasContext *ctx)
2451 {
2452     target_ulong uimm = UIMM(ctx->opcode);
2453 
2454     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2455         /* NOP */
2456         return;
2457     }
2458     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2459 }
2460 
2461 /* xoris */
2462 static void gen_xoris(DisasContext *ctx)
2463 {
2464     target_ulong uimm = UIMM(ctx->opcode);
2465 
2466     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2467         /* NOP */
2468         return;
2469     }
2470     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2471                     uimm << 16);
2472 }
2473 
2474 /* popcntb : PowerPC 2.03 specification */
2475 static void gen_popcntb(DisasContext *ctx)
2476 {
2477     gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2478 }
2479 
2480 static void gen_popcntw(DisasContext *ctx)
2481 {
2482 #if defined(TARGET_PPC64)
2483     gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2484 #else
2485     tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2486 #endif
2487 }
2488 
2489 #if defined(TARGET_PPC64)
2490 /* popcntd: PowerPC 2.06 specification */
2491 static void gen_popcntd(DisasContext *ctx)
2492 {
2493     tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2494 }
2495 #endif
2496 
2497 /* prtyw: PowerPC 2.05 specification */
2498 static void gen_prtyw(DisasContext *ctx)
2499 {
2500     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2501     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2502     TCGv t0 = tcg_temp_new();
2503     tcg_gen_shri_tl(t0, rs, 16);
2504     tcg_gen_xor_tl(ra, rs, t0);
2505     tcg_gen_shri_tl(t0, ra, 8);
2506     tcg_gen_xor_tl(ra, ra, t0);
2507     tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2508     tcg_temp_free(t0);
2509 }
2510 
2511 #if defined(TARGET_PPC64)
2512 /* prtyd: PowerPC 2.05 specification */
2513 static void gen_prtyd(DisasContext *ctx)
2514 {
2515     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2516     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2517     TCGv t0 = tcg_temp_new();
2518     tcg_gen_shri_tl(t0, rs, 32);
2519     tcg_gen_xor_tl(ra, rs, t0);
2520     tcg_gen_shri_tl(t0, ra, 16);
2521     tcg_gen_xor_tl(ra, ra, t0);
2522     tcg_gen_shri_tl(t0, ra, 8);
2523     tcg_gen_xor_tl(ra, ra, t0);
2524     tcg_gen_andi_tl(ra, ra, 1);
2525     tcg_temp_free(t0);
2526 }
2527 #endif
2528 
2529 #if defined(TARGET_PPC64)
2530 /* bpermd */
2531 static void gen_bpermd(DisasContext *ctx)
2532 {
2533     gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2534                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2535 }
2536 #endif
2537 
2538 #if defined(TARGET_PPC64)
2539 /* extsw & extsw. */
2540 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2541 
2542 /* cntlzd */
2543 static void gen_cntlzd(DisasContext *ctx)
2544 {
2545     tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2546     if (unlikely(Rc(ctx->opcode) != 0)) {
2547         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2548     }
2549 }
2550 
2551 /* cnttzd */
2552 static void gen_cnttzd(DisasContext *ctx)
2553 {
2554     tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2555     if (unlikely(Rc(ctx->opcode) != 0)) {
2556         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2557     }
2558 }
2559 
2560 /* darn */
2561 static void gen_darn(DisasContext *ctx)
2562 {
2563     int l = L(ctx->opcode);
2564 
2565     if (l > 2) {
2566         tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2567     } else {
2568         gen_icount_io_start(ctx);
2569         if (l == 0) {
2570             gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2571         } else {
2572             /* Return 64-bit random for both CRN and RRN */
2573             gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2574         }
2575     }
2576 }
2577 #endif
2578 
2579 /***                             Integer rotate                            ***/
2580 
2581 /* rlwimi & rlwimi. */
2582 static void gen_rlwimi(DisasContext *ctx)
2583 {
2584     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2585     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2586     uint32_t sh = SH(ctx->opcode);
2587     uint32_t mb = MB(ctx->opcode);
2588     uint32_t me = ME(ctx->opcode);
2589 
2590     if (sh == (31 - me) && mb <= me) {
2591         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2592     } else {
2593         target_ulong mask;
2594         bool mask_in_32b = true;
2595         TCGv t1;
2596 
2597 #if defined(TARGET_PPC64)
2598         mb += 32;
2599         me += 32;
2600 #endif
2601         mask = MASK(mb, me);
2602 
2603 #if defined(TARGET_PPC64)
2604         if (mask > 0xffffffffu) {
2605             mask_in_32b = false;
2606         }
2607 #endif
2608         t1 = tcg_temp_new();
2609         if (mask_in_32b) {
2610             TCGv_i32 t0 = tcg_temp_new_i32();
2611             tcg_gen_trunc_tl_i32(t0, t_rs);
2612             tcg_gen_rotli_i32(t0, t0, sh);
2613             tcg_gen_extu_i32_tl(t1, t0);
2614             tcg_temp_free_i32(t0);
2615         } else {
2616 #if defined(TARGET_PPC64)
2617             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2618             tcg_gen_rotli_i64(t1, t1, sh);
2619 #else
2620             g_assert_not_reached();
2621 #endif
2622         }
2623 
2624         tcg_gen_andi_tl(t1, t1, mask);
2625         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2626         tcg_gen_or_tl(t_ra, t_ra, t1);
2627         tcg_temp_free(t1);
2628     }
2629     if (unlikely(Rc(ctx->opcode) != 0)) {
2630         gen_set_Rc0(ctx, t_ra);
2631     }
2632 }
2633 
2634 /* rlwinm & rlwinm. */
2635 static void gen_rlwinm(DisasContext *ctx)
2636 {
2637     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2638     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2639     int sh = SH(ctx->opcode);
2640     int mb = MB(ctx->opcode);
2641     int me = ME(ctx->opcode);
2642     int len = me - mb + 1;
2643     int rsh = (32 - sh) & 31;
2644 
2645     if (sh != 0 && len > 0 && me == (31 - sh)) {
2646         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2647     } else if (me == 31 && rsh + len <= 32) {
2648         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2649     } else {
2650         target_ulong mask;
2651         bool mask_in_32b = true;
2652 #if defined(TARGET_PPC64)
2653         mb += 32;
2654         me += 32;
2655 #endif
2656         mask = MASK(mb, me);
2657 #if defined(TARGET_PPC64)
2658         if (mask > 0xffffffffu) {
2659             mask_in_32b = false;
2660         }
2661 #endif
2662         if (mask_in_32b) {
2663             if (sh == 0) {
2664                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2665             } else {
2666                 TCGv_i32 t0 = tcg_temp_new_i32();
2667                 tcg_gen_trunc_tl_i32(t0, t_rs);
2668                 tcg_gen_rotli_i32(t0, t0, sh);
2669                 tcg_gen_andi_i32(t0, t0, mask);
2670                 tcg_gen_extu_i32_tl(t_ra, t0);
2671                 tcg_temp_free_i32(t0);
2672             }
2673         } else {
2674 #if defined(TARGET_PPC64)
2675             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2676             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2677             tcg_gen_andi_i64(t_ra, t_ra, mask);
2678 #else
2679             g_assert_not_reached();
2680 #endif
2681         }
2682     }
2683     if (unlikely(Rc(ctx->opcode) != 0)) {
2684         gen_set_Rc0(ctx, t_ra);
2685     }
2686 }
2687 
2688 /* rlwnm & rlwnm. */
2689 static void gen_rlwnm(DisasContext *ctx)
2690 {
2691     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2692     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2693     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2694     uint32_t mb = MB(ctx->opcode);
2695     uint32_t me = ME(ctx->opcode);
2696     target_ulong mask;
2697     bool mask_in_32b = true;
2698 
2699 #if defined(TARGET_PPC64)
2700     mb += 32;
2701     me += 32;
2702 #endif
2703     mask = MASK(mb, me);
2704 
2705 #if defined(TARGET_PPC64)
2706     if (mask > 0xffffffffu) {
2707         mask_in_32b = false;
2708     }
2709 #endif
2710     if (mask_in_32b) {
2711         TCGv_i32 t0 = tcg_temp_new_i32();
2712         TCGv_i32 t1 = tcg_temp_new_i32();
2713         tcg_gen_trunc_tl_i32(t0, t_rb);
2714         tcg_gen_trunc_tl_i32(t1, t_rs);
2715         tcg_gen_andi_i32(t0, t0, 0x1f);
2716         tcg_gen_rotl_i32(t1, t1, t0);
2717         tcg_gen_extu_i32_tl(t_ra, t1);
2718         tcg_temp_free_i32(t0);
2719         tcg_temp_free_i32(t1);
2720     } else {
2721 #if defined(TARGET_PPC64)
2722         TCGv_i64 t0 = tcg_temp_new_i64();
2723         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2724         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2725         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2726         tcg_temp_free_i64(t0);
2727 #else
2728         g_assert_not_reached();
2729 #endif
2730     }
2731 
2732     tcg_gen_andi_tl(t_ra, t_ra, mask);
2733 
2734     if (unlikely(Rc(ctx->opcode) != 0)) {
2735         gen_set_Rc0(ctx, t_ra);
2736     }
2737 }
2738 
2739 #if defined(TARGET_PPC64)
2740 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2741 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2742 {                                                                             \
2743     gen_##name(ctx, 0);                                                       \
2744 }                                                                             \
2745                                                                               \
2746 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2747 {                                                                             \
2748     gen_##name(ctx, 1);                                                       \
2749 }
2750 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2751 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2752 {                                                                             \
2753     gen_##name(ctx, 0, 0);                                                    \
2754 }                                                                             \
2755                                                                               \
2756 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2757 {                                                                             \
2758     gen_##name(ctx, 0, 1);                                                    \
2759 }                                                                             \
2760                                                                               \
2761 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2762 {                                                                             \
2763     gen_##name(ctx, 1, 0);                                                    \
2764 }                                                                             \
2765                                                                               \
2766 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2767 {                                                                             \
2768     gen_##name(ctx, 1, 1);                                                    \
2769 }
2770 
2771 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2772 {
2773     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2774     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2775     int len = me - mb + 1;
2776     int rsh = (64 - sh) & 63;
2777 
2778     if (sh != 0 && len > 0 && me == (63 - sh)) {
2779         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2780     } else if (me == 63 && rsh + len <= 64) {
2781         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2782     } else {
2783         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2784         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2785     }
2786     if (unlikely(Rc(ctx->opcode) != 0)) {
2787         gen_set_Rc0(ctx, t_ra);
2788     }
2789 }
2790 
2791 /* rldicl - rldicl. */
2792 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2793 {
2794     uint32_t sh, mb;
2795 
2796     sh = SH(ctx->opcode) | (shn << 5);
2797     mb = MB(ctx->opcode) | (mbn << 5);
2798     gen_rldinm(ctx, mb, 63, sh);
2799 }
2800 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2801 
2802 /* rldicr - rldicr. */
2803 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2804 {
2805     uint32_t sh, me;
2806 
2807     sh = SH(ctx->opcode) | (shn << 5);
2808     me = MB(ctx->opcode) | (men << 5);
2809     gen_rldinm(ctx, 0, me, sh);
2810 }
2811 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2812 
2813 /* rldic - rldic. */
2814 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2815 {
2816     uint32_t sh, mb;
2817 
2818     sh = SH(ctx->opcode) | (shn << 5);
2819     mb = MB(ctx->opcode) | (mbn << 5);
2820     gen_rldinm(ctx, mb, 63 - sh, sh);
2821 }
2822 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2823 
2824 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2825 {
2826     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2827     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2828     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2829     TCGv t0;
2830 
2831     t0 = tcg_temp_new();
2832     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2833     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2834     tcg_temp_free(t0);
2835 
2836     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2837     if (unlikely(Rc(ctx->opcode) != 0)) {
2838         gen_set_Rc0(ctx, t_ra);
2839     }
2840 }
2841 
2842 /* rldcl - rldcl. */
2843 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2844 {
2845     uint32_t mb;
2846 
2847     mb = MB(ctx->opcode) | (mbn << 5);
2848     gen_rldnm(ctx, mb, 63);
2849 }
2850 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2851 
2852 /* rldcr - rldcr. */
2853 static inline void gen_rldcr(DisasContext *ctx, int men)
2854 {
2855     uint32_t me;
2856 
2857     me = MB(ctx->opcode) | (men << 5);
2858     gen_rldnm(ctx, 0, me);
2859 }
2860 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2861 
2862 /* rldimi - rldimi. */
2863 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2864 {
2865     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2866     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2867     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2868     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2869     uint32_t me = 63 - sh;
2870 
2871     if (mb <= me) {
2872         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2873     } else {
2874         target_ulong mask = MASK(mb, me);
2875         TCGv t1 = tcg_temp_new();
2876 
2877         tcg_gen_rotli_tl(t1, t_rs, sh);
2878         tcg_gen_andi_tl(t1, t1, mask);
2879         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2880         tcg_gen_or_tl(t_ra, t_ra, t1);
2881         tcg_temp_free(t1);
2882     }
2883     if (unlikely(Rc(ctx->opcode) != 0)) {
2884         gen_set_Rc0(ctx, t_ra);
2885     }
2886 }
2887 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2888 #endif
2889 
2890 /***                             Integer shift                             ***/
2891 
2892 /* slw & slw. */
2893 static void gen_slw(DisasContext *ctx)
2894 {
2895     TCGv t0, t1;
2896 
2897     t0 = tcg_temp_new();
2898     /* AND rS with a mask that is 0 when rB >= 0x20 */
2899 #if defined(TARGET_PPC64)
2900     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2901     tcg_gen_sari_tl(t0, t0, 0x3f);
2902 #else
2903     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2904     tcg_gen_sari_tl(t0, t0, 0x1f);
2905 #endif
2906     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2907     t1 = tcg_temp_new();
2908     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2909     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2910     tcg_temp_free(t1);
2911     tcg_temp_free(t0);
2912     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2913     if (unlikely(Rc(ctx->opcode) != 0)) {
2914         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2915     }
2916 }
2917 
2918 /* sraw & sraw. */
2919 static void gen_sraw(DisasContext *ctx)
2920 {
2921     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2922                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2923     if (unlikely(Rc(ctx->opcode) != 0)) {
2924         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2925     }
2926 }
2927 
2928 /* srawi & srawi. */
2929 static void gen_srawi(DisasContext *ctx)
2930 {
2931     int sh = SH(ctx->opcode);
2932     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2933     TCGv src = cpu_gpr[rS(ctx->opcode)];
2934     if (sh == 0) {
2935         tcg_gen_ext32s_tl(dst, src);
2936         tcg_gen_movi_tl(cpu_ca, 0);
2937         if (is_isa300(ctx)) {
2938             tcg_gen_movi_tl(cpu_ca32, 0);
2939         }
2940     } else {
2941         TCGv t0;
2942         tcg_gen_ext32s_tl(dst, src);
2943         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2944         t0 = tcg_temp_new();
2945         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2946         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2947         tcg_temp_free(t0);
2948         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2949         if (is_isa300(ctx)) {
2950             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2951         }
2952         tcg_gen_sari_tl(dst, dst, sh);
2953     }
2954     if (unlikely(Rc(ctx->opcode) != 0)) {
2955         gen_set_Rc0(ctx, dst);
2956     }
2957 }
2958 
2959 /* srw & srw. */
2960 static void gen_srw(DisasContext *ctx)
2961 {
2962     TCGv t0, t1;
2963 
2964     t0 = tcg_temp_new();
2965     /* AND rS with a mask that is 0 when rB >= 0x20 */
2966 #if defined(TARGET_PPC64)
2967     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2968     tcg_gen_sari_tl(t0, t0, 0x3f);
2969 #else
2970     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2971     tcg_gen_sari_tl(t0, t0, 0x1f);
2972 #endif
2973     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2974     tcg_gen_ext32u_tl(t0, t0);
2975     t1 = tcg_temp_new();
2976     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2977     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2978     tcg_temp_free(t1);
2979     tcg_temp_free(t0);
2980     if (unlikely(Rc(ctx->opcode) != 0)) {
2981         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2982     }
2983 }
2984 
2985 #if defined(TARGET_PPC64)
2986 /* sld & sld. */
2987 static void gen_sld(DisasContext *ctx)
2988 {
2989     TCGv t0, t1;
2990 
2991     t0 = tcg_temp_new();
2992     /* AND rS with a mask that is 0 when rB >= 0x40 */
2993     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2994     tcg_gen_sari_tl(t0, t0, 0x3f);
2995     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2996     t1 = tcg_temp_new();
2997     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2998     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2999     tcg_temp_free(t1);
3000     tcg_temp_free(t0);
3001     if (unlikely(Rc(ctx->opcode) != 0)) {
3002         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3003     }
3004 }
3005 
3006 /* srad & srad. */
3007 static void gen_srad(DisasContext *ctx)
3008 {
3009     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3010                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3011     if (unlikely(Rc(ctx->opcode) != 0)) {
3012         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3013     }
3014 }
3015 /* sradi & sradi. */
3016 static inline void gen_sradi(DisasContext *ctx, int n)
3017 {
3018     int sh = SH(ctx->opcode) + (n << 5);
3019     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3020     TCGv src = cpu_gpr[rS(ctx->opcode)];
3021     if (sh == 0) {
3022         tcg_gen_mov_tl(dst, src);
3023         tcg_gen_movi_tl(cpu_ca, 0);
3024         if (is_isa300(ctx)) {
3025             tcg_gen_movi_tl(cpu_ca32, 0);
3026         }
3027     } else {
3028         TCGv t0;
3029         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3030         t0 = tcg_temp_new();
3031         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3032         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3033         tcg_temp_free(t0);
3034         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3035         if (is_isa300(ctx)) {
3036             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3037         }
3038         tcg_gen_sari_tl(dst, src, sh);
3039     }
3040     if (unlikely(Rc(ctx->opcode) != 0)) {
3041         gen_set_Rc0(ctx, dst);
3042     }
3043 }
3044 
3045 static void gen_sradi0(DisasContext *ctx)
3046 {
3047     gen_sradi(ctx, 0);
3048 }
3049 
3050 static void gen_sradi1(DisasContext *ctx)
3051 {
3052     gen_sradi(ctx, 1);
3053 }
3054 
3055 /* extswsli & extswsli. */
3056 static inline void gen_extswsli(DisasContext *ctx, int n)
3057 {
3058     int sh = SH(ctx->opcode) + (n << 5);
3059     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3060     TCGv src = cpu_gpr[rS(ctx->opcode)];
3061 
3062     tcg_gen_ext32s_tl(dst, src);
3063     tcg_gen_shli_tl(dst, dst, sh);
3064     if (unlikely(Rc(ctx->opcode) != 0)) {
3065         gen_set_Rc0(ctx, dst);
3066     }
3067 }
3068 
3069 static void gen_extswsli0(DisasContext *ctx)
3070 {
3071     gen_extswsli(ctx, 0);
3072 }
3073 
3074 static void gen_extswsli1(DisasContext *ctx)
3075 {
3076     gen_extswsli(ctx, 1);
3077 }
3078 
3079 /* srd & srd. */
3080 static void gen_srd(DisasContext *ctx)
3081 {
3082     TCGv t0, t1;
3083 
3084     t0 = tcg_temp_new();
3085     /* AND rS with a mask that is 0 when rB >= 0x40 */
3086     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3087     tcg_gen_sari_tl(t0, t0, 0x3f);
3088     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3089     t1 = tcg_temp_new();
3090     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3091     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3092     tcg_temp_free(t1);
3093     tcg_temp_free(t0);
3094     if (unlikely(Rc(ctx->opcode) != 0)) {
3095         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3096     }
3097 }
3098 #endif
3099 
3100 /***                           Addressing modes                            ***/
3101 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3102 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3103                                       target_long maskl)
3104 {
3105     target_long simm = SIMM(ctx->opcode);
3106 
3107     simm &= ~maskl;
3108     if (rA(ctx->opcode) == 0) {
3109         if (NARROW_MODE(ctx)) {
3110             simm = (uint32_t)simm;
3111         }
3112         tcg_gen_movi_tl(EA, simm);
3113     } else if (likely(simm != 0)) {
3114         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3115         if (NARROW_MODE(ctx)) {
3116             tcg_gen_ext32u_tl(EA, EA);
3117         }
3118     } else {
3119         if (NARROW_MODE(ctx)) {
3120             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3121         } else {
3122             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3123         }
3124     }
3125 }
3126 
3127 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3128 {
3129     if (rA(ctx->opcode) == 0) {
3130         if (NARROW_MODE(ctx)) {
3131             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3132         } else {
3133             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3134         }
3135     } else {
3136         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3137         if (NARROW_MODE(ctx)) {
3138             tcg_gen_ext32u_tl(EA, EA);
3139         }
3140     }
3141 }
3142 
3143 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3144 {
3145     if (rA(ctx->opcode) == 0) {
3146         tcg_gen_movi_tl(EA, 0);
3147     } else if (NARROW_MODE(ctx)) {
3148         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3149     } else {
3150         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3151     }
3152 }
3153 
3154 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3155                                 target_long val)
3156 {
3157     tcg_gen_addi_tl(ret, arg1, val);
3158     if (NARROW_MODE(ctx)) {
3159         tcg_gen_ext32u_tl(ret, ret);
3160     }
3161 }
3162 
3163 static inline void gen_align_no_le(DisasContext *ctx)
3164 {
3165     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3166                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3167 }
3168 
3169 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
3170 {
3171     TCGv ea = tcg_temp_new();
3172     if (ra) {
3173         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
3174     } else {
3175         tcg_gen_mov_tl(ea, displ);
3176     }
3177     if (NARROW_MODE(ctx)) {
3178         tcg_gen_ext32u_tl(ea, ea);
3179     }
3180     return ea;
3181 }
3182 
3183 /***                             Integer load                              ***/
3184 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3185 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3186 
3187 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
3188 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
3189                                   TCGv val,                             \
3190                                   TCGv addr)                            \
3191 {                                                                       \
3192     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
3193 }
3194 
3195 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
3196 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3197 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3198 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3199 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3200 
3201 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3202 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3203 
3204 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
3205 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
3206                                              TCGv_i64 val,          \
3207                                              TCGv addr)             \
3208 {                                                                   \
3209     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
3210 }
3211 
3212 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
3213 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3214 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3215 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3216 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
3217 
3218 #if defined(TARGET_PPC64)
3219 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
3220 #endif
3221 
3222 #define GEN_QEMU_STORE_TL(stop, op)                                     \
3223 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
3224                                   TCGv val,                             \
3225                                   TCGv addr)                            \
3226 {                                                                       \
3227     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
3228 }
3229 
3230 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
3231 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
3232 #endif
3233 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3234 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3235 
3236 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3237 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3238 
3239 #define GEN_QEMU_STORE_64(stop, op)                               \
3240 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
3241                                               TCGv_i64 val,       \
3242                                               TCGv addr)          \
3243 {                                                                 \
3244     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
3245 }
3246 
3247 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
3248 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3249 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3250 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
3251 
3252 #if defined(TARGET_PPC64)
3253 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
3254 #endif
3255 
3256 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
3257 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3258 {                                                                             \
3259     TCGv EA;                                                                  \
3260     chk(ctx);                                                                 \
3261     gen_set_access_type(ctx, ACCESS_INT);                                     \
3262     EA = tcg_temp_new();                                                      \
3263     gen_addr_reg_index(ctx, EA);                                              \
3264     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
3265     tcg_temp_free(EA);                                                        \
3266 }
3267 
3268 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
3269     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3270 
3271 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
3272     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3273 
3274 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
3275 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3276 {                                                                             \
3277     TCGv EA;                                                                  \
3278     CHK_SV(ctx);                                                              \
3279     gen_set_access_type(ctx, ACCESS_INT);                                     \
3280     EA = tcg_temp_new();                                                      \
3281     gen_addr_reg_index(ctx, EA);                                              \
3282     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3283     tcg_temp_free(EA);                                                        \
3284 }
3285 
3286 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3287 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3288 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3289 #if defined(TARGET_PPC64)
3290 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
3291 #endif
3292 
3293 #if defined(TARGET_PPC64)
3294 /* CI load/store variants */
3295 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3296 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3297 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3298 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3299 #endif
3300 
3301 /***                              Integer store                            ***/
3302 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
3303 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3304 {                                                                             \
3305     TCGv EA;                                                                  \
3306     chk(ctx);                                                                 \
3307     gen_set_access_type(ctx, ACCESS_INT);                                     \
3308     EA = tcg_temp_new();                                                      \
3309     gen_addr_reg_index(ctx, EA);                                              \
3310     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
3311     tcg_temp_free(EA);                                                        \
3312 }
3313 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
3314     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3315 
3316 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
3317     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3318 
3319 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
3320 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3321 {                                                                             \
3322     TCGv EA;                                                                  \
3323     CHK_SV(ctx);                                                              \
3324     gen_set_access_type(ctx, ACCESS_INT);                                     \
3325     EA = tcg_temp_new();                                                      \
3326     gen_addr_reg_index(ctx, EA);                                              \
3327     tcg_gen_qemu_st_tl(                                                       \
3328         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
3329     tcg_temp_free(EA);                                                        \
3330 }
3331 
3332 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3333 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3334 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3335 #if defined(TARGET_PPC64)
3336 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
3337 #endif
3338 
3339 #if defined(TARGET_PPC64)
3340 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3341 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3342 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3343 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3344 #endif
3345 /***                Integer load and store with byte reverse               ***/
3346 
3347 /* lhbrx */
3348 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3349 
3350 /* lwbrx */
3351 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3352 
3353 #if defined(TARGET_PPC64)
3354 /* ldbrx */
3355 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3356 /* stdbrx */
3357 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3358 #endif  /* TARGET_PPC64 */
3359 
3360 /* sthbrx */
3361 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3362 /* stwbrx */
3363 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3364 
3365 /***                    Integer load and store multiple                    ***/
3366 
3367 /* lmw */
3368 static void gen_lmw(DisasContext *ctx)
3369 {
3370     TCGv t0;
3371     TCGv_i32 t1;
3372 
3373     if (ctx->le_mode) {
3374         gen_align_no_le(ctx);
3375         return;
3376     }
3377     gen_set_access_type(ctx, ACCESS_INT);
3378     t0 = tcg_temp_new();
3379     t1 = tcg_const_i32(rD(ctx->opcode));
3380     gen_addr_imm_index(ctx, t0, 0);
3381     gen_helper_lmw(cpu_env, t0, t1);
3382     tcg_temp_free(t0);
3383     tcg_temp_free_i32(t1);
3384 }
3385 
3386 /* stmw */
3387 static void gen_stmw(DisasContext *ctx)
3388 {
3389     TCGv t0;
3390     TCGv_i32 t1;
3391 
3392     if (ctx->le_mode) {
3393         gen_align_no_le(ctx);
3394         return;
3395     }
3396     gen_set_access_type(ctx, ACCESS_INT);
3397     t0 = tcg_temp_new();
3398     t1 = tcg_const_i32(rS(ctx->opcode));
3399     gen_addr_imm_index(ctx, t0, 0);
3400     gen_helper_stmw(cpu_env, t0, t1);
3401     tcg_temp_free(t0);
3402     tcg_temp_free_i32(t1);
3403 }
3404 
3405 /***                    Integer load and store strings                     ***/
3406 
3407 /* lswi */
3408 /*
3409  * PowerPC32 specification says we must generate an exception if rA is
3410  * in the range of registers to be loaded.  In an other hand, IBM says
3411  * this is valid, but rA won't be loaded.  For now, I'll follow the
3412  * spec...
3413  */
3414 static void gen_lswi(DisasContext *ctx)
3415 {
3416     TCGv t0;
3417     TCGv_i32 t1, t2;
3418     int nb = NB(ctx->opcode);
3419     int start = rD(ctx->opcode);
3420     int ra = rA(ctx->opcode);
3421     int nr;
3422 
3423     if (ctx->le_mode) {
3424         gen_align_no_le(ctx);
3425         return;
3426     }
3427     if (nb == 0) {
3428         nb = 32;
3429     }
3430     nr = DIV_ROUND_UP(nb, 4);
3431     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3432         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3433         return;
3434     }
3435     gen_set_access_type(ctx, ACCESS_INT);
3436     t0 = tcg_temp_new();
3437     gen_addr_register(ctx, t0);
3438     t1 = tcg_const_i32(nb);
3439     t2 = tcg_const_i32(start);
3440     gen_helper_lsw(cpu_env, t0, t1, t2);
3441     tcg_temp_free(t0);
3442     tcg_temp_free_i32(t1);
3443     tcg_temp_free_i32(t2);
3444 }
3445 
3446 /* lswx */
3447 static void gen_lswx(DisasContext *ctx)
3448 {
3449     TCGv t0;
3450     TCGv_i32 t1, t2, t3;
3451 
3452     if (ctx->le_mode) {
3453         gen_align_no_le(ctx);
3454         return;
3455     }
3456     gen_set_access_type(ctx, ACCESS_INT);
3457     t0 = tcg_temp_new();
3458     gen_addr_reg_index(ctx, t0);
3459     t1 = tcg_const_i32(rD(ctx->opcode));
3460     t2 = tcg_const_i32(rA(ctx->opcode));
3461     t3 = tcg_const_i32(rB(ctx->opcode));
3462     gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3463     tcg_temp_free(t0);
3464     tcg_temp_free_i32(t1);
3465     tcg_temp_free_i32(t2);
3466     tcg_temp_free_i32(t3);
3467 }
3468 
3469 /* stswi */
3470 static void gen_stswi(DisasContext *ctx)
3471 {
3472     TCGv t0;
3473     TCGv_i32 t1, t2;
3474     int nb = NB(ctx->opcode);
3475 
3476     if (ctx->le_mode) {
3477         gen_align_no_le(ctx);
3478         return;
3479     }
3480     gen_set_access_type(ctx, ACCESS_INT);
3481     t0 = tcg_temp_new();
3482     gen_addr_register(ctx, t0);
3483     if (nb == 0) {
3484         nb = 32;
3485     }
3486     t1 = tcg_const_i32(nb);
3487     t2 = tcg_const_i32(rS(ctx->opcode));
3488     gen_helper_stsw(cpu_env, t0, t1, t2);
3489     tcg_temp_free(t0);
3490     tcg_temp_free_i32(t1);
3491     tcg_temp_free_i32(t2);
3492 }
3493 
3494 /* stswx */
3495 static void gen_stswx(DisasContext *ctx)
3496 {
3497     TCGv t0;
3498     TCGv_i32 t1, t2;
3499 
3500     if (ctx->le_mode) {
3501         gen_align_no_le(ctx);
3502         return;
3503     }
3504     gen_set_access_type(ctx, ACCESS_INT);
3505     t0 = tcg_temp_new();
3506     gen_addr_reg_index(ctx, t0);
3507     t1 = tcg_temp_new_i32();
3508     tcg_gen_trunc_tl_i32(t1, cpu_xer);
3509     tcg_gen_andi_i32(t1, t1, 0x7F);
3510     t2 = tcg_const_i32(rS(ctx->opcode));
3511     gen_helper_stsw(cpu_env, t0, t1, t2);
3512     tcg_temp_free(t0);
3513     tcg_temp_free_i32(t1);
3514     tcg_temp_free_i32(t2);
3515 }
3516 
3517 /***                        Memory synchronisation                         ***/
3518 /* eieio */
3519 static void gen_eieio(DisasContext *ctx)
3520 {
3521     TCGBar bar = TCG_MO_ALL;
3522 
3523     /*
3524      * eieio has complex semanitcs. It provides memory ordering between
3525      * operations in the set:
3526      * - loads from CI memory.
3527      * - stores to CI memory.
3528      * - stores to WT memory.
3529      *
3530      * It separately also orders memory for operations in the set:
3531      * - stores to cacheble memory.
3532      *
3533      * It also serializes instructions:
3534      * - dcbt and dcbst.
3535      *
3536      * It separately serializes:
3537      * - tlbie and tlbsync.
3538      *
3539      * And separately serializes:
3540      * - slbieg, slbiag, and slbsync.
3541      *
3542      * The end result is that CI memory ordering requires TCG_MO_ALL
3543      * and it is not possible to special-case more relaxed ordering for
3544      * cacheable accesses. TCG_BAR_SC is required to provide this
3545      * serialization.
3546      */
3547 
3548     /*
3549      * POWER9 has a eieio instruction variant using bit 6 as a hint to
3550      * tell the CPU it is a store-forwarding barrier.
3551      */
3552     if (ctx->opcode & 0x2000000) {
3553         /*
3554          * ISA says that "Reserved fields in instructions are ignored
3555          * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3556          * as this is not an instruction software should be using,
3557          * complain to the user.
3558          */
3559         if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3560             qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3561                           TARGET_FMT_lx "\n", ctx->cia);
3562         } else {
3563             bar = TCG_MO_ST_LD;
3564         }
3565     }
3566 
3567     tcg_gen_mb(bar | TCG_BAR_SC);
3568 }
3569 
3570 #if !defined(CONFIG_USER_ONLY)
3571 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3572 {
3573     TCGv_i32 t;
3574     TCGLabel *l;
3575 
3576     if (!ctx->lazy_tlb_flush) {
3577         return;
3578     }
3579     l = gen_new_label();
3580     t = tcg_temp_new_i32();
3581     tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3582     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3583     if (global) {
3584         gen_helper_check_tlb_flush_global(cpu_env);
3585     } else {
3586         gen_helper_check_tlb_flush_local(cpu_env);
3587     }
3588     gen_set_label(l);
3589     tcg_temp_free_i32(t);
3590 }
3591 #else
3592 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3593 #endif
3594 
3595 /* isync */
3596 static void gen_isync(DisasContext *ctx)
3597 {
3598     /*
3599      * We need to check for a pending TLB flush. This can only happen in
3600      * kernel mode however so check MSR_PR
3601      */
3602     if (!ctx->pr) {
3603         gen_check_tlb_flush(ctx, false);
3604     }
3605     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3606     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3607 }
3608 
3609 #define MEMOP_GET_SIZE(x)  (1 << ((x) & MO_SIZE))
3610 
3611 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3612 {
3613     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3614     TCGv t0 = tcg_temp_new();
3615 
3616     gen_set_access_type(ctx, ACCESS_RES);
3617     gen_addr_reg_index(ctx, t0);
3618     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3619     tcg_gen_mov_tl(cpu_reserve, t0);
3620     tcg_gen_mov_tl(cpu_reserve_val, gpr);
3621     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3622     tcg_temp_free(t0);
3623 }
3624 
3625 #define LARX(name, memop)                  \
3626 static void gen_##name(DisasContext *ctx)  \
3627 {                                          \
3628     gen_load_locked(ctx, memop);           \
3629 }
3630 
3631 /* lwarx */
3632 LARX(lbarx, DEF_MEMOP(MO_UB))
3633 LARX(lharx, DEF_MEMOP(MO_UW))
3634 LARX(lwarx, DEF_MEMOP(MO_UL))
3635 
3636 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
3637                                       TCGv EA, TCGCond cond, int addend)
3638 {
3639     TCGv t = tcg_temp_new();
3640     TCGv t2 = tcg_temp_new();
3641     TCGv u = tcg_temp_new();
3642 
3643     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3644     tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
3645     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3646     tcg_gen_addi_tl(u, t, addend);
3647 
3648     /* E.g. for fetch and increment bounded... */
3649     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3650     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3651     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3652 
3653     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3654     tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
3655     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
3656 
3657     tcg_temp_free(t);
3658     tcg_temp_free(t2);
3659     tcg_temp_free(u);
3660 }
3661 
3662 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3663 {
3664     uint32_t gpr_FC = FC(ctx->opcode);
3665     TCGv EA = tcg_temp_new();
3666     int rt = rD(ctx->opcode);
3667     bool need_serial;
3668     TCGv src, dst;
3669 
3670     gen_addr_register(ctx, EA);
3671     dst = cpu_gpr[rt];
3672     src = cpu_gpr[(rt + 1) & 31];
3673 
3674     need_serial = false;
3675     memop |= MO_ALIGN;
3676     switch (gpr_FC) {
3677     case 0: /* Fetch and add */
3678         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3679         break;
3680     case 1: /* Fetch and xor */
3681         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3682         break;
3683     case 2: /* Fetch and or */
3684         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3685         break;
3686     case 3: /* Fetch and 'and' */
3687         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3688         break;
3689     case 4:  /* Fetch and max unsigned */
3690         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3691         break;
3692     case 5:  /* Fetch and max signed */
3693         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3694         break;
3695     case 6:  /* Fetch and min unsigned */
3696         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3697         break;
3698     case 7:  /* Fetch and min signed */
3699         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3700         break;
3701     case 8: /* Swap */
3702         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3703         break;
3704 
3705     case 16: /* Compare and swap not equal */
3706         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3707             need_serial = true;
3708         } else {
3709             TCGv t0 = tcg_temp_new();
3710             TCGv t1 = tcg_temp_new();
3711 
3712             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3713             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3714                 tcg_gen_mov_tl(t1, src);
3715             } else {
3716                 tcg_gen_ext32u_tl(t1, src);
3717             }
3718             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3719                                cpu_gpr[(rt + 2) & 31], t0);
3720             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3721             tcg_gen_mov_tl(dst, t0);
3722 
3723             tcg_temp_free(t0);
3724             tcg_temp_free(t1);
3725         }
3726         break;
3727 
3728     case 24: /* Fetch and increment bounded */
3729         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3730             need_serial = true;
3731         } else {
3732             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3733         }
3734         break;
3735     case 25: /* Fetch and increment equal */
3736         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3737             need_serial = true;
3738         } else {
3739             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3740         }
3741         break;
3742     case 28: /* Fetch and decrement bounded */
3743         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3744             need_serial = true;
3745         } else {
3746             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3747         }
3748         break;
3749 
3750     default:
3751         /* invoke data storage error handler */
3752         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3753     }
3754     tcg_temp_free(EA);
3755 
3756     if (need_serial) {
3757         /* Restart with exclusive lock.  */
3758         gen_helper_exit_atomic(cpu_env);
3759         ctx->base.is_jmp = DISAS_NORETURN;
3760     }
3761 }
3762 
3763 static void gen_lwat(DisasContext *ctx)
3764 {
3765     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3766 }
3767 
3768 #ifdef TARGET_PPC64
3769 static void gen_ldat(DisasContext *ctx)
3770 {
3771     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3772 }
3773 #endif
3774 
3775 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3776 {
3777     uint32_t gpr_FC = FC(ctx->opcode);
3778     TCGv EA = tcg_temp_new();
3779     TCGv src, discard;
3780 
3781     gen_addr_register(ctx, EA);
3782     src = cpu_gpr[rD(ctx->opcode)];
3783     discard = tcg_temp_new();
3784 
3785     memop |= MO_ALIGN;
3786     switch (gpr_FC) {
3787     case 0: /* add and Store */
3788         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3789         break;
3790     case 1: /* xor and Store */
3791         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3792         break;
3793     case 2: /* Or and Store */
3794         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3795         break;
3796     case 3: /* 'and' and Store */
3797         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3798         break;
3799     case 4:  /* Store max unsigned */
3800         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3801         break;
3802     case 5:  /* Store max signed */
3803         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3804         break;
3805     case 6:  /* Store min unsigned */
3806         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3807         break;
3808     case 7:  /* Store min signed */
3809         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3810         break;
3811     case 24: /* Store twin  */
3812         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3813             /* Restart with exclusive lock.  */
3814             gen_helper_exit_atomic(cpu_env);
3815             ctx->base.is_jmp = DISAS_NORETURN;
3816         } else {
3817             TCGv t = tcg_temp_new();
3818             TCGv t2 = tcg_temp_new();
3819             TCGv s = tcg_temp_new();
3820             TCGv s2 = tcg_temp_new();
3821             TCGv ea_plus_s = tcg_temp_new();
3822 
3823             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3824             tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
3825             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3826             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3827             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3828             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3829             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3830 
3831             tcg_temp_free(ea_plus_s);
3832             tcg_temp_free(s2);
3833             tcg_temp_free(s);
3834             tcg_temp_free(t2);
3835             tcg_temp_free(t);
3836         }
3837         break;
3838     default:
3839         /* invoke data storage error handler */
3840         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3841     }
3842     tcg_temp_free(discard);
3843     tcg_temp_free(EA);
3844 }
3845 
3846 static void gen_stwat(DisasContext *ctx)
3847 {
3848     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3849 }
3850 
3851 #ifdef TARGET_PPC64
3852 static void gen_stdat(DisasContext *ctx)
3853 {
3854     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3855 }
3856 #endif
3857 
3858 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3859 {
3860     TCGLabel *l1 = gen_new_label();
3861     TCGLabel *l2 = gen_new_label();
3862     TCGv t0 = tcg_temp_new();
3863     int reg = rS(ctx->opcode);
3864 
3865     gen_set_access_type(ctx, ACCESS_RES);
3866     gen_addr_reg_index(ctx, t0);
3867     tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3868     tcg_temp_free(t0);
3869 
3870     t0 = tcg_temp_new();
3871     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3872                               cpu_gpr[reg], ctx->mem_idx,
3873                               DEF_MEMOP(memop) | MO_ALIGN);
3874     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3875     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3876     tcg_gen_or_tl(t0, t0, cpu_so);
3877     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3878     tcg_temp_free(t0);
3879     tcg_gen_br(l2);
3880 
3881     gen_set_label(l1);
3882 
3883     /*
3884      * Address mismatch implies failure.  But we still need to provide
3885      * the memory barrier semantics of the instruction.
3886      */
3887     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3888     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3889 
3890     gen_set_label(l2);
3891     tcg_gen_movi_tl(cpu_reserve, -1);
3892 }
3893 
3894 #define STCX(name, memop)                  \
3895 static void gen_##name(DisasContext *ctx)  \
3896 {                                          \
3897     gen_conditional_store(ctx, memop);     \
3898 }
3899 
3900 STCX(stbcx_, DEF_MEMOP(MO_UB))
3901 STCX(sthcx_, DEF_MEMOP(MO_UW))
3902 STCX(stwcx_, DEF_MEMOP(MO_UL))
3903 
3904 #if defined(TARGET_PPC64)
3905 /* ldarx */
3906 LARX(ldarx, DEF_MEMOP(MO_UQ))
3907 /* stdcx. */
3908 STCX(stdcx_, DEF_MEMOP(MO_UQ))
3909 
3910 /* lqarx */
3911 static void gen_lqarx(DisasContext *ctx)
3912 {
3913     int rd = rD(ctx->opcode);
3914     TCGv EA, hi, lo;
3915 
3916     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3917                  (rd == rB(ctx->opcode)))) {
3918         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3919         return;
3920     }
3921 
3922     gen_set_access_type(ctx, ACCESS_RES);
3923     EA = tcg_temp_new();
3924     gen_addr_reg_index(ctx, EA);
3925 
3926     /* Note that the low part is always in RD+1, even in LE mode.  */
3927     lo = cpu_gpr[rd + 1];
3928     hi = cpu_gpr[rd];
3929 
3930     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3931         if (HAVE_ATOMIC128) {
3932             TCGv_i32 oi = tcg_temp_new_i32();
3933             if (ctx->le_mode) {
3934                 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN,
3935                                                     ctx->mem_idx));
3936                 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3937             } else {
3938                 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN,
3939                                                     ctx->mem_idx));
3940                 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3941             }
3942             tcg_temp_free_i32(oi);
3943             tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3944         } else {
3945             /* Restart with exclusive lock.  */
3946             gen_helper_exit_atomic(cpu_env);
3947             ctx->base.is_jmp = DISAS_NORETURN;
3948             tcg_temp_free(EA);
3949             return;
3950         }
3951     } else if (ctx->le_mode) {
3952         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEUQ | MO_ALIGN_16);
3953         tcg_gen_mov_tl(cpu_reserve, EA);
3954         gen_addr_add(ctx, EA, EA, 8);
3955         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEUQ);
3956     } else {
3957         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEUQ | MO_ALIGN_16);
3958         tcg_gen_mov_tl(cpu_reserve, EA);
3959         gen_addr_add(ctx, EA, EA, 8);
3960         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEUQ);
3961     }
3962     tcg_temp_free(EA);
3963 
3964     tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
3965     tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
3966 }
3967 
3968 /* stqcx. */
3969 static void gen_stqcx_(DisasContext *ctx)
3970 {
3971     int rs = rS(ctx->opcode);
3972     TCGv EA, hi, lo;
3973 
3974     if (unlikely(rs & 1)) {
3975         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3976         return;
3977     }
3978 
3979     gen_set_access_type(ctx, ACCESS_RES);
3980     EA = tcg_temp_new();
3981     gen_addr_reg_index(ctx, EA);
3982 
3983     /* Note that the low part is always in RS+1, even in LE mode.  */
3984     lo = cpu_gpr[rs + 1];
3985     hi = cpu_gpr[rs];
3986 
3987     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3988         if (HAVE_CMPXCHG128) {
3989             TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN);
3990             if (ctx->le_mode) {
3991                 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env,
3992                                              EA, lo, hi, oi);
3993             } else {
3994                 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env,
3995                                              EA, lo, hi, oi);
3996             }
3997             tcg_temp_free_i32(oi);
3998         } else {
3999             /* Restart with exclusive lock.  */
4000             gen_helper_exit_atomic(cpu_env);
4001             ctx->base.is_jmp = DISAS_NORETURN;
4002         }
4003         tcg_temp_free(EA);
4004     } else {
4005         TCGLabel *lab_fail = gen_new_label();
4006         TCGLabel *lab_over = gen_new_label();
4007         TCGv_i64 t0 = tcg_temp_new_i64();
4008         TCGv_i64 t1 = tcg_temp_new_i64();
4009 
4010         tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4011         tcg_temp_free(EA);
4012 
4013         gen_qemu_ld64_i64(ctx, t0, cpu_reserve);
4014         tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4015                                      ? offsetof(CPUPPCState, reserve_val2)
4016                                      : offsetof(CPUPPCState, reserve_val)));
4017         tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4018 
4019         tcg_gen_addi_i64(t0, cpu_reserve, 8);
4020         gen_qemu_ld64_i64(ctx, t0, t0);
4021         tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4022                                      ? offsetof(CPUPPCState, reserve_val)
4023                                      : offsetof(CPUPPCState, reserve_val2)));
4024         tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4025 
4026         /* Success */
4027         gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve);
4028         tcg_gen_addi_i64(t0, cpu_reserve, 8);
4029         gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0);
4030 
4031         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4032         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
4033         tcg_gen_br(lab_over);
4034 
4035         gen_set_label(lab_fail);
4036         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4037 
4038         gen_set_label(lab_over);
4039         tcg_gen_movi_tl(cpu_reserve, -1);
4040         tcg_temp_free_i64(t0);
4041         tcg_temp_free_i64(t1);
4042     }
4043 }
4044 #endif /* defined(TARGET_PPC64) */
4045 
4046 /* sync */
4047 static void gen_sync(DisasContext *ctx)
4048 {
4049     TCGBar bar = TCG_MO_ALL;
4050     uint32_t l = (ctx->opcode >> 21) & 3;
4051 
4052     if ((l == 1) && (ctx->insns_flags2 & PPC2_MEM_LWSYNC)) {
4053         bar = TCG_MO_LD_LD | TCG_MO_LD_ST | TCG_MO_ST_ST;
4054     }
4055 
4056     /*
4057      * We may need to check for a pending TLB flush.
4058      *
4059      * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4060      *
4061      * Additionally, this can only happen in kernel mode however so
4062      * check MSR_PR as well.
4063      */
4064     if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4065         gen_check_tlb_flush(ctx, true);
4066     }
4067 
4068     tcg_gen_mb(bar | TCG_BAR_SC);
4069 }
4070 
4071 /* wait */
4072 static void gen_wait(DisasContext *ctx)
4073 {
4074     TCGv_i32 t0 = tcg_const_i32(1);
4075     tcg_gen_st_i32(t0, cpu_env,
4076                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4077     tcg_temp_free_i32(t0);
4078     /* Stop translation, as the CPU is supposed to sleep from now */
4079     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4080 }
4081 
4082 #if defined(TARGET_PPC64)
4083 static void gen_doze(DisasContext *ctx)
4084 {
4085 #if defined(CONFIG_USER_ONLY)
4086     GEN_PRIV(ctx);
4087 #else
4088     TCGv_i32 t;
4089 
4090     CHK_HV(ctx);
4091     t = tcg_const_i32(PPC_PM_DOZE);
4092     gen_helper_pminsn(cpu_env, t);
4093     tcg_temp_free_i32(t);
4094     /* Stop translation, as the CPU is supposed to sleep from now */
4095     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4096 #endif /* defined(CONFIG_USER_ONLY) */
4097 }
4098 
4099 static void gen_nap(DisasContext *ctx)
4100 {
4101 #if defined(CONFIG_USER_ONLY)
4102     GEN_PRIV(ctx);
4103 #else
4104     TCGv_i32 t;
4105 
4106     CHK_HV(ctx);
4107     t = tcg_const_i32(PPC_PM_NAP);
4108     gen_helper_pminsn(cpu_env, t);
4109     tcg_temp_free_i32(t);
4110     /* Stop translation, as the CPU is supposed to sleep from now */
4111     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4112 #endif /* defined(CONFIG_USER_ONLY) */
4113 }
4114 
4115 static void gen_stop(DisasContext *ctx)
4116 {
4117 #if defined(CONFIG_USER_ONLY)
4118     GEN_PRIV(ctx);
4119 #else
4120     TCGv_i32 t;
4121 
4122     CHK_HV(ctx);
4123     t = tcg_const_i32(PPC_PM_STOP);
4124     gen_helper_pminsn(cpu_env, t);
4125     tcg_temp_free_i32(t);
4126     /* Stop translation, as the CPU is supposed to sleep from now */
4127     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4128 #endif /* defined(CONFIG_USER_ONLY) */
4129 }
4130 
4131 static void gen_sleep(DisasContext *ctx)
4132 {
4133 #if defined(CONFIG_USER_ONLY)
4134     GEN_PRIV(ctx);
4135 #else
4136     TCGv_i32 t;
4137 
4138     CHK_HV(ctx);
4139     t = tcg_const_i32(PPC_PM_SLEEP);
4140     gen_helper_pminsn(cpu_env, t);
4141     tcg_temp_free_i32(t);
4142     /* Stop translation, as the CPU is supposed to sleep from now */
4143     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4144 #endif /* defined(CONFIG_USER_ONLY) */
4145 }
4146 
4147 static void gen_rvwinkle(DisasContext *ctx)
4148 {
4149 #if defined(CONFIG_USER_ONLY)
4150     GEN_PRIV(ctx);
4151 #else
4152     TCGv_i32 t;
4153 
4154     CHK_HV(ctx);
4155     t = tcg_const_i32(PPC_PM_RVWINKLE);
4156     gen_helper_pminsn(cpu_env, t);
4157     tcg_temp_free_i32(t);
4158     /* Stop translation, as the CPU is supposed to sleep from now */
4159     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4160 #endif /* defined(CONFIG_USER_ONLY) */
4161 }
4162 #endif /* #if defined(TARGET_PPC64) */
4163 
4164 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4165 {
4166 #if defined(TARGET_PPC64)
4167     if (ctx->has_cfar) {
4168         tcg_gen_movi_tl(cpu_cfar, nip);
4169     }
4170 #endif
4171 }
4172 
4173 #if defined(TARGET_PPC64)
4174 static void pmu_count_insns(DisasContext *ctx)
4175 {
4176     /*
4177      * Do not bother calling the helper if the PMU isn't counting
4178      * instructions.
4179      */
4180     if (!ctx->pmu_insn_cnt) {
4181         return;
4182     }
4183 
4184  #if !defined(CONFIG_USER_ONLY)
4185     /*
4186      * The PMU insns_inc() helper stops the internal PMU timer if a
4187      * counter overflows happens. In that case, if the guest is
4188      * running with icount and we do not handle it beforehand,
4189      * the helper can trigger a 'bad icount read'.
4190      */
4191     gen_icount_io_start(ctx);
4192 
4193     gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns));
4194 #else
4195     /*
4196      * User mode can read (but not write) PMC5 and start/stop
4197      * the PMU via MMCR0_FC. In this case just increment
4198      * PMC5 with base.num_insns.
4199      */
4200     TCGv t0 = tcg_temp_new();
4201 
4202     gen_load_spr(t0, SPR_POWER_PMC5);
4203     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4204     gen_store_spr(SPR_POWER_PMC5, t0);
4205 
4206     tcg_temp_free(t0);
4207 #endif /* #if !defined(CONFIG_USER_ONLY) */
4208 }
4209 #else
4210 static void pmu_count_insns(DisasContext *ctx)
4211 {
4212     return;
4213 }
4214 #endif /* #if defined(TARGET_PPC64) */
4215 
4216 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4217 {
4218     return translator_use_goto_tb(&ctx->base, dest);
4219 }
4220 
4221 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4222 {
4223     if (unlikely(ctx->singlestep_enabled)) {
4224         gen_debug_exception(ctx);
4225     } else {
4226         /*
4227          * tcg_gen_lookup_and_goto_ptr will exit the TB if
4228          * CF_NO_GOTO_PTR is set. Count insns now.
4229          */
4230         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
4231             pmu_count_insns(ctx);
4232         }
4233 
4234         tcg_gen_lookup_and_goto_ptr();
4235     }
4236 }
4237 
4238 /***                                Branch                                 ***/
4239 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4240 {
4241     if (NARROW_MODE(ctx)) {
4242         dest = (uint32_t) dest;
4243     }
4244     if (use_goto_tb(ctx, dest)) {
4245         pmu_count_insns(ctx);
4246         tcg_gen_goto_tb(n);
4247         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4248         tcg_gen_exit_tb(ctx->base.tb, n);
4249     } else {
4250         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4251         gen_lookup_and_goto_ptr(ctx);
4252     }
4253 }
4254 
4255 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4256 {
4257     if (NARROW_MODE(ctx)) {
4258         nip = (uint32_t)nip;
4259     }
4260     tcg_gen_movi_tl(cpu_lr, nip);
4261 }
4262 
4263 /* b ba bl bla */
4264 static void gen_b(DisasContext *ctx)
4265 {
4266     target_ulong li, target;
4267 
4268     /* sign extend LI */
4269     li = LI(ctx->opcode);
4270     li = (li ^ 0x02000000) - 0x02000000;
4271     if (likely(AA(ctx->opcode) == 0)) {
4272         target = ctx->cia + li;
4273     } else {
4274         target = li;
4275     }
4276     if (LK(ctx->opcode)) {
4277         gen_setlr(ctx, ctx->base.pc_next);
4278     }
4279     gen_update_cfar(ctx, ctx->cia);
4280     gen_goto_tb(ctx, 0, target);
4281     ctx->base.is_jmp = DISAS_NORETURN;
4282 }
4283 
4284 #define BCOND_IM  0
4285 #define BCOND_LR  1
4286 #define BCOND_CTR 2
4287 #define BCOND_TAR 3
4288 
4289 static void gen_bcond(DisasContext *ctx, int type)
4290 {
4291     uint32_t bo = BO(ctx->opcode);
4292     TCGLabel *l1;
4293     TCGv target;
4294 
4295     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4296         target = tcg_temp_local_new();
4297         if (type == BCOND_CTR) {
4298             tcg_gen_mov_tl(target, cpu_ctr);
4299         } else if (type == BCOND_TAR) {
4300             gen_load_spr(target, SPR_TAR);
4301         } else {
4302             tcg_gen_mov_tl(target, cpu_lr);
4303         }
4304     } else {
4305         target = NULL;
4306     }
4307     if (LK(ctx->opcode)) {
4308         gen_setlr(ctx, ctx->base.pc_next);
4309     }
4310     l1 = gen_new_label();
4311     if ((bo & 0x4) == 0) {
4312         /* Decrement and test CTR */
4313         TCGv temp = tcg_temp_new();
4314 
4315         if (type == BCOND_CTR) {
4316             /*
4317              * All ISAs up to v3 describe this form of bcctr as invalid but
4318              * some processors, ie. 64-bit server processors compliant with
4319              * arch 2.x, do implement a "test and decrement" logic instead,
4320              * as described in their respective UMs. This logic involves CTR
4321              * to act as both the branch target and a counter, which makes
4322              * it basically useless and thus never used in real code.
4323              *
4324              * This form was hence chosen to trigger extra micro-architectural
4325              * side-effect on real HW needed for the Spectre v2 workaround.
4326              * It is up to guests that implement such workaround, ie. linux, to
4327              * use this form in a way it just triggers the side-effect without
4328              * doing anything else harmful.
4329              */
4330             if (unlikely(!is_book3s_arch2x(ctx))) {
4331                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4332                 tcg_temp_free(temp);
4333                 tcg_temp_free(target);
4334                 return;
4335             }
4336 
4337             if (NARROW_MODE(ctx)) {
4338                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4339             } else {
4340                 tcg_gen_mov_tl(temp, cpu_ctr);
4341             }
4342             if (bo & 0x2) {
4343                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4344             } else {
4345                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4346             }
4347             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4348         } else {
4349             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4350             if (NARROW_MODE(ctx)) {
4351                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4352             } else {
4353                 tcg_gen_mov_tl(temp, cpu_ctr);
4354             }
4355             if (bo & 0x2) {
4356                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4357             } else {
4358                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4359             }
4360         }
4361         tcg_temp_free(temp);
4362     }
4363     if ((bo & 0x10) == 0) {
4364         /* Test CR */
4365         uint32_t bi = BI(ctx->opcode);
4366         uint32_t mask = 0x08 >> (bi & 0x03);
4367         TCGv_i32 temp = tcg_temp_new_i32();
4368 
4369         if (bo & 0x8) {
4370             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4371             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4372         } else {
4373             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4374             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4375         }
4376         tcg_temp_free_i32(temp);
4377     }
4378     gen_update_cfar(ctx, ctx->cia);
4379     if (type == BCOND_IM) {
4380         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4381         if (likely(AA(ctx->opcode) == 0)) {
4382             gen_goto_tb(ctx, 0, ctx->cia + li);
4383         } else {
4384             gen_goto_tb(ctx, 0, li);
4385         }
4386     } else {
4387         if (NARROW_MODE(ctx)) {
4388             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4389         } else {
4390             tcg_gen_andi_tl(cpu_nip, target, ~3);
4391         }
4392         gen_lookup_and_goto_ptr(ctx);
4393         tcg_temp_free(target);
4394     }
4395     if ((bo & 0x14) != 0x14) {
4396         /* fallthrough case */
4397         gen_set_label(l1);
4398         gen_goto_tb(ctx, 1, ctx->base.pc_next);
4399     }
4400     ctx->base.is_jmp = DISAS_NORETURN;
4401 }
4402 
4403 static void gen_bc(DisasContext *ctx)
4404 {
4405     gen_bcond(ctx, BCOND_IM);
4406 }
4407 
4408 static void gen_bcctr(DisasContext *ctx)
4409 {
4410     gen_bcond(ctx, BCOND_CTR);
4411 }
4412 
4413 static void gen_bclr(DisasContext *ctx)
4414 {
4415     gen_bcond(ctx, BCOND_LR);
4416 }
4417 
4418 static void gen_bctar(DisasContext *ctx)
4419 {
4420     gen_bcond(ctx, BCOND_TAR);
4421 }
4422 
4423 /***                      Condition register logical                       ***/
4424 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
4425 static void glue(gen_, name)(DisasContext *ctx)                               \
4426 {                                                                             \
4427     uint8_t bitmask;                                                          \
4428     int sh;                                                                   \
4429     TCGv_i32 t0, t1;                                                          \
4430     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
4431     t0 = tcg_temp_new_i32();                                                  \
4432     if (sh > 0)                                                               \
4433         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
4434     else if (sh < 0)                                                          \
4435         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
4436     else                                                                      \
4437         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
4438     t1 = tcg_temp_new_i32();                                                  \
4439     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
4440     if (sh > 0)                                                               \
4441         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
4442     else if (sh < 0)                                                          \
4443         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
4444     else                                                                      \
4445         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
4446     tcg_op(t0, t0, t1);                                                       \
4447     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
4448     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
4449     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
4450     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
4451     tcg_temp_free_i32(t0);                                                    \
4452     tcg_temp_free_i32(t1);                                                    \
4453 }
4454 
4455 /* crand */
4456 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4457 /* crandc */
4458 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4459 /* creqv */
4460 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4461 /* crnand */
4462 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4463 /* crnor */
4464 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4465 /* cror */
4466 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4467 /* crorc */
4468 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4469 /* crxor */
4470 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4471 
4472 /* mcrf */
4473 static void gen_mcrf(DisasContext *ctx)
4474 {
4475     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4476 }
4477 
4478 /***                           System linkage                              ***/
4479 
4480 /* rfi (supervisor only) */
4481 static void gen_rfi(DisasContext *ctx)
4482 {
4483 #if defined(CONFIG_USER_ONLY)
4484     GEN_PRIV(ctx);
4485 #else
4486     /*
4487      * This instruction doesn't exist anymore on 64-bit server
4488      * processors compliant with arch 2.x
4489      */
4490     if (is_book3s_arch2x(ctx)) {
4491         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4492         return;
4493     }
4494     /* Restore CPU state */
4495     CHK_SV(ctx);
4496     gen_icount_io_start(ctx);
4497     gen_update_cfar(ctx, ctx->cia);
4498     gen_helper_rfi(cpu_env);
4499     ctx->base.is_jmp = DISAS_EXIT;
4500 #endif
4501 }
4502 
4503 #if defined(TARGET_PPC64)
4504 static void gen_rfid(DisasContext *ctx)
4505 {
4506 #if defined(CONFIG_USER_ONLY)
4507     GEN_PRIV(ctx);
4508 #else
4509     /* Restore CPU state */
4510     CHK_SV(ctx);
4511     gen_icount_io_start(ctx);
4512     gen_update_cfar(ctx, ctx->cia);
4513     gen_helper_rfid(cpu_env);
4514     ctx->base.is_jmp = DISAS_EXIT;
4515 #endif
4516 }
4517 
4518 #if !defined(CONFIG_USER_ONLY)
4519 static void gen_rfscv(DisasContext *ctx)
4520 {
4521 #if defined(CONFIG_USER_ONLY)
4522     GEN_PRIV(ctx);
4523 #else
4524     /* Restore CPU state */
4525     CHK_SV(ctx);
4526     gen_icount_io_start(ctx);
4527     gen_update_cfar(ctx, ctx->cia);
4528     gen_helper_rfscv(cpu_env);
4529     ctx->base.is_jmp = DISAS_EXIT;
4530 #endif
4531 }
4532 #endif
4533 
4534 static void gen_hrfid(DisasContext *ctx)
4535 {
4536 #if defined(CONFIG_USER_ONLY)
4537     GEN_PRIV(ctx);
4538 #else
4539     /* Restore CPU state */
4540     CHK_HV(ctx);
4541     gen_helper_hrfid(cpu_env);
4542     ctx->base.is_jmp = DISAS_EXIT;
4543 #endif
4544 }
4545 #endif
4546 
4547 /* sc */
4548 #if defined(CONFIG_USER_ONLY)
4549 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4550 #else
4551 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4552 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4553 #endif
4554 static void gen_sc(DisasContext *ctx)
4555 {
4556     uint32_t lev;
4557 
4558     lev = (ctx->opcode >> 5) & 0x7F;
4559     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4560 }
4561 
4562 #if defined(TARGET_PPC64)
4563 #if !defined(CONFIG_USER_ONLY)
4564 static void gen_scv(DisasContext *ctx)
4565 {
4566     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4567 
4568     /* Set the PC back to the faulting instruction. */
4569     gen_update_nip(ctx, ctx->cia);
4570     gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4571 
4572     ctx->base.is_jmp = DISAS_NORETURN;
4573 }
4574 #endif
4575 #endif
4576 
4577 /***                                Trap                                   ***/
4578 
4579 /* Check for unconditional traps (always or never) */
4580 static bool check_unconditional_trap(DisasContext *ctx)
4581 {
4582     /* Trap never */
4583     if (TO(ctx->opcode) == 0) {
4584         return true;
4585     }
4586     /* Trap always */
4587     if (TO(ctx->opcode) == 31) {
4588         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4589         return true;
4590     }
4591     return false;
4592 }
4593 
4594 /* tw */
4595 static void gen_tw(DisasContext *ctx)
4596 {
4597     TCGv_i32 t0;
4598 
4599     if (check_unconditional_trap(ctx)) {
4600         return;
4601     }
4602     t0 = tcg_const_i32(TO(ctx->opcode));
4603     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4604                   t0);
4605     tcg_temp_free_i32(t0);
4606 }
4607 
4608 /* twi */
4609 static void gen_twi(DisasContext *ctx)
4610 {
4611     TCGv t0;
4612     TCGv_i32 t1;
4613 
4614     if (check_unconditional_trap(ctx)) {
4615         return;
4616     }
4617     t0 = tcg_const_tl(SIMM(ctx->opcode));
4618     t1 = tcg_const_i32(TO(ctx->opcode));
4619     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4620     tcg_temp_free(t0);
4621     tcg_temp_free_i32(t1);
4622 }
4623 
4624 #if defined(TARGET_PPC64)
4625 /* td */
4626 static void gen_td(DisasContext *ctx)
4627 {
4628     TCGv_i32 t0;
4629 
4630     if (check_unconditional_trap(ctx)) {
4631         return;
4632     }
4633     t0 = tcg_const_i32(TO(ctx->opcode));
4634     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4635                   t0);
4636     tcg_temp_free_i32(t0);
4637 }
4638 
4639 /* tdi */
4640 static void gen_tdi(DisasContext *ctx)
4641 {
4642     TCGv t0;
4643     TCGv_i32 t1;
4644 
4645     if (check_unconditional_trap(ctx)) {
4646         return;
4647     }
4648     t0 = tcg_const_tl(SIMM(ctx->opcode));
4649     t1 = tcg_const_i32(TO(ctx->opcode));
4650     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4651     tcg_temp_free(t0);
4652     tcg_temp_free_i32(t1);
4653 }
4654 #endif
4655 
4656 /***                          Processor control                            ***/
4657 
4658 /* mcrxr */
4659 static void gen_mcrxr(DisasContext *ctx)
4660 {
4661     TCGv_i32 t0 = tcg_temp_new_i32();
4662     TCGv_i32 t1 = tcg_temp_new_i32();
4663     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4664 
4665     tcg_gen_trunc_tl_i32(t0, cpu_so);
4666     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4667     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4668     tcg_gen_shli_i32(t0, t0, 3);
4669     tcg_gen_shli_i32(t1, t1, 2);
4670     tcg_gen_shli_i32(dst, dst, 1);
4671     tcg_gen_or_i32(dst, dst, t0);
4672     tcg_gen_or_i32(dst, dst, t1);
4673     tcg_temp_free_i32(t0);
4674     tcg_temp_free_i32(t1);
4675 
4676     tcg_gen_movi_tl(cpu_so, 0);
4677     tcg_gen_movi_tl(cpu_ov, 0);
4678     tcg_gen_movi_tl(cpu_ca, 0);
4679 }
4680 
4681 #ifdef TARGET_PPC64
4682 /* mcrxrx */
4683 static void gen_mcrxrx(DisasContext *ctx)
4684 {
4685     TCGv t0 = tcg_temp_new();
4686     TCGv t1 = tcg_temp_new();
4687     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4688 
4689     /* copy OV and OV32 */
4690     tcg_gen_shli_tl(t0, cpu_ov, 1);
4691     tcg_gen_or_tl(t0, t0, cpu_ov32);
4692     tcg_gen_shli_tl(t0, t0, 2);
4693     /* copy CA and CA32 */
4694     tcg_gen_shli_tl(t1, cpu_ca, 1);
4695     tcg_gen_or_tl(t1, t1, cpu_ca32);
4696     tcg_gen_or_tl(t0, t0, t1);
4697     tcg_gen_trunc_tl_i32(dst, t0);
4698     tcg_temp_free(t0);
4699     tcg_temp_free(t1);
4700 }
4701 #endif
4702 
4703 /* mfcr mfocrf */
4704 static void gen_mfcr(DisasContext *ctx)
4705 {
4706     uint32_t crm, crn;
4707 
4708     if (likely(ctx->opcode & 0x00100000)) {
4709         crm = CRM(ctx->opcode);
4710         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4711             crn = ctz32(crm);
4712             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4713             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4714                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4715         }
4716     } else {
4717         TCGv_i32 t0 = tcg_temp_new_i32();
4718         tcg_gen_mov_i32(t0, cpu_crf[0]);
4719         tcg_gen_shli_i32(t0, t0, 4);
4720         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4721         tcg_gen_shli_i32(t0, t0, 4);
4722         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4723         tcg_gen_shli_i32(t0, t0, 4);
4724         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4725         tcg_gen_shli_i32(t0, t0, 4);
4726         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4727         tcg_gen_shli_i32(t0, t0, 4);
4728         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4729         tcg_gen_shli_i32(t0, t0, 4);
4730         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4731         tcg_gen_shli_i32(t0, t0, 4);
4732         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4733         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4734         tcg_temp_free_i32(t0);
4735     }
4736 }
4737 
4738 /* mfmsr */
4739 static void gen_mfmsr(DisasContext *ctx)
4740 {
4741     CHK_SV(ctx);
4742     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4743 }
4744 
4745 /* mfspr */
4746 static inline void gen_op_mfspr(DisasContext *ctx)
4747 {
4748     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4749     uint32_t sprn = SPR(ctx->opcode);
4750 
4751 #if defined(CONFIG_USER_ONLY)
4752     read_cb = ctx->spr_cb[sprn].uea_read;
4753 #else
4754     if (ctx->pr) {
4755         read_cb = ctx->spr_cb[sprn].uea_read;
4756     } else if (ctx->hv) {
4757         read_cb = ctx->spr_cb[sprn].hea_read;
4758     } else {
4759         read_cb = ctx->spr_cb[sprn].oea_read;
4760     }
4761 #endif
4762     if (likely(read_cb != NULL)) {
4763         if (likely(read_cb != SPR_NOACCESS)) {
4764             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4765         } else {
4766             /* Privilege exception */
4767             /*
4768              * This is a hack to avoid warnings when running Linux:
4769              * this OS breaks the PowerPC virtualisation model,
4770              * allowing userland application to read the PVR
4771              */
4772             if (sprn != SPR_PVR) {
4773                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4774                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4775                               ctx->cia);
4776             }
4777             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4778         }
4779     } else {
4780         /* ISA 2.07 defines these as no-ops */
4781         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4782             (sprn >= 808 && sprn <= 811)) {
4783             /* This is a nop */
4784             return;
4785         }
4786         /* Not defined */
4787         qemu_log_mask(LOG_GUEST_ERROR,
4788                       "Trying to read invalid spr %d (0x%03x) at "
4789                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4790 
4791         /*
4792          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4793          * generate a priv, a hv emu or a no-op
4794          */
4795         if (sprn & 0x10) {
4796             if (ctx->pr) {
4797                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4798             }
4799         } else {
4800             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4801                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4802             }
4803         }
4804     }
4805 }
4806 
4807 static void gen_mfspr(DisasContext *ctx)
4808 {
4809     gen_op_mfspr(ctx);
4810 }
4811 
4812 /* mftb */
4813 static void gen_mftb(DisasContext *ctx)
4814 {
4815     gen_op_mfspr(ctx);
4816 }
4817 
4818 /* mtcrf mtocrf*/
4819 static void gen_mtcrf(DisasContext *ctx)
4820 {
4821     uint32_t crm, crn;
4822 
4823     crm = CRM(ctx->opcode);
4824     if (likely((ctx->opcode & 0x00100000))) {
4825         if (crm && ((crm & (crm - 1)) == 0)) {
4826             TCGv_i32 temp = tcg_temp_new_i32();
4827             crn = ctz32(crm);
4828             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4829             tcg_gen_shri_i32(temp, temp, crn * 4);
4830             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4831             tcg_temp_free_i32(temp);
4832         }
4833     } else {
4834         TCGv_i32 temp = tcg_temp_new_i32();
4835         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4836         for (crn = 0 ; crn < 8 ; crn++) {
4837             if (crm & (1 << crn)) {
4838                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4839                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4840             }
4841         }
4842         tcg_temp_free_i32(temp);
4843     }
4844 }
4845 
4846 /* mtmsr */
4847 #if defined(TARGET_PPC64)
4848 static void gen_mtmsrd(DisasContext *ctx)
4849 {
4850     if (unlikely(!is_book3s_arch2x(ctx))) {
4851         gen_invalid(ctx);
4852         return;
4853     }
4854 
4855     CHK_SV(ctx);
4856 
4857 #if !defined(CONFIG_USER_ONLY)
4858     TCGv t0, t1;
4859     target_ulong mask;
4860 
4861     t0 = tcg_temp_new();
4862     t1 = tcg_temp_new();
4863 
4864     gen_icount_io_start(ctx);
4865 
4866     if (ctx->opcode & 0x00010000) {
4867         /* L=1 form only updates EE and RI */
4868         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4869     } else {
4870         /* mtmsrd does not alter HV, S, ME, or LE */
4871         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4872                  (1ULL << MSR_HV));
4873         /*
4874          * XXX: we need to update nip before the store if we enter
4875          *      power saving mode, we will exit the loop directly from
4876          *      ppc_store_msr
4877          */
4878         gen_update_nip(ctx, ctx->base.pc_next);
4879     }
4880 
4881     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4882     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4883     tcg_gen_or_tl(t0, t0, t1);
4884 
4885     gen_helper_store_msr(cpu_env, t0);
4886 
4887     /* Must stop the translation as machine state (may have) changed */
4888     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4889 
4890     tcg_temp_free(t0);
4891     tcg_temp_free(t1);
4892 #endif /* !defined(CONFIG_USER_ONLY) */
4893 }
4894 #endif /* defined(TARGET_PPC64) */
4895 
4896 static void gen_mtmsr(DisasContext *ctx)
4897 {
4898     CHK_SV(ctx);
4899 
4900 #if !defined(CONFIG_USER_ONLY)
4901     TCGv t0, t1;
4902     target_ulong mask = 0xFFFFFFFF;
4903 
4904     t0 = tcg_temp_new();
4905     t1 = tcg_temp_new();
4906 
4907     gen_icount_io_start(ctx);
4908     if (ctx->opcode & 0x00010000) {
4909         /* L=1 form only updates EE and RI */
4910         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
4911     } else {
4912         /* mtmsr does not alter S, ME, or LE */
4913         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
4914 
4915         /*
4916          * XXX: we need to update nip before the store if we enter
4917          *      power saving mode, we will exit the loop directly from
4918          *      ppc_store_msr
4919          */
4920         gen_update_nip(ctx, ctx->base.pc_next);
4921     }
4922 
4923     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4924     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4925     tcg_gen_or_tl(t0, t0, t1);
4926 
4927     gen_helper_store_msr(cpu_env, t0);
4928 
4929     /* Must stop the translation as machine state (may have) changed */
4930     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4931 
4932     tcg_temp_free(t0);
4933     tcg_temp_free(t1);
4934 #endif
4935 }
4936 
4937 /* mtspr */
4938 static void gen_mtspr(DisasContext *ctx)
4939 {
4940     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
4941     uint32_t sprn = SPR(ctx->opcode);
4942 
4943 #if defined(CONFIG_USER_ONLY)
4944     write_cb = ctx->spr_cb[sprn].uea_write;
4945 #else
4946     if (ctx->pr) {
4947         write_cb = ctx->spr_cb[sprn].uea_write;
4948     } else if (ctx->hv) {
4949         write_cb = ctx->spr_cb[sprn].hea_write;
4950     } else {
4951         write_cb = ctx->spr_cb[sprn].oea_write;
4952     }
4953 #endif
4954     if (likely(write_cb != NULL)) {
4955         if (likely(write_cb != SPR_NOACCESS)) {
4956             (*write_cb)(ctx, sprn, rS(ctx->opcode));
4957         } else {
4958             /* Privilege exception */
4959             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
4960                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4961                           ctx->cia);
4962             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4963         }
4964     } else {
4965         /* ISA 2.07 defines these as no-ops */
4966         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4967             (sprn >= 808 && sprn <= 811)) {
4968             /* This is a nop */
4969             return;
4970         }
4971 
4972         /* Not defined */
4973         qemu_log_mask(LOG_GUEST_ERROR,
4974                       "Trying to write invalid spr %d (0x%03x) at "
4975                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4976 
4977 
4978         /*
4979          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4980          * generate a priv, a hv emu or a no-op
4981          */
4982         if (sprn & 0x10) {
4983             if (ctx->pr) {
4984                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4985             }
4986         } else {
4987             if (ctx->pr || sprn == 0) {
4988                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4989             }
4990         }
4991     }
4992 }
4993 
4994 #if defined(TARGET_PPC64)
4995 /* setb */
4996 static void gen_setb(DisasContext *ctx)
4997 {
4998     TCGv_i32 t0 = tcg_temp_new_i32();
4999     TCGv_i32 t8 = tcg_constant_i32(8);
5000     TCGv_i32 tm1 = tcg_constant_i32(-1);
5001     int crf = crfS(ctx->opcode);
5002 
5003     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5004     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5005     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5006 
5007     tcg_temp_free_i32(t0);
5008 }
5009 #endif
5010 
5011 /***                         Cache management                              ***/
5012 
5013 /* dcbf */
5014 static void gen_dcbf(DisasContext *ctx)
5015 {
5016     /* XXX: specification says this is treated as a load by the MMU */
5017     TCGv t0;
5018     gen_set_access_type(ctx, ACCESS_CACHE);
5019     t0 = tcg_temp_new();
5020     gen_addr_reg_index(ctx, t0);
5021     gen_qemu_ld8u(ctx, t0, t0);
5022     tcg_temp_free(t0);
5023 }
5024 
5025 /* dcbfep (external PID dcbf) */
5026 static void gen_dcbfep(DisasContext *ctx)
5027 {
5028     /* XXX: specification says this is treated as a load by the MMU */
5029     TCGv t0;
5030     CHK_SV(ctx);
5031     gen_set_access_type(ctx, ACCESS_CACHE);
5032     t0 = tcg_temp_new();
5033     gen_addr_reg_index(ctx, t0);
5034     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5035     tcg_temp_free(t0);
5036 }
5037 
5038 /* dcbi (Supervisor only) */
5039 static void gen_dcbi(DisasContext *ctx)
5040 {
5041 #if defined(CONFIG_USER_ONLY)
5042     GEN_PRIV(ctx);
5043 #else
5044     TCGv EA, val;
5045 
5046     CHK_SV(ctx);
5047     EA = tcg_temp_new();
5048     gen_set_access_type(ctx, ACCESS_CACHE);
5049     gen_addr_reg_index(ctx, EA);
5050     val = tcg_temp_new();
5051     /* XXX: specification says this should be treated as a store by the MMU */
5052     gen_qemu_ld8u(ctx, val, EA);
5053     gen_qemu_st8(ctx, val, EA);
5054     tcg_temp_free(val);
5055     tcg_temp_free(EA);
5056 #endif /* defined(CONFIG_USER_ONLY) */
5057 }
5058 
5059 /* dcdst */
5060 static void gen_dcbst(DisasContext *ctx)
5061 {
5062     /* XXX: specification say this is treated as a load by the MMU */
5063     TCGv t0;
5064     gen_set_access_type(ctx, ACCESS_CACHE);
5065     t0 = tcg_temp_new();
5066     gen_addr_reg_index(ctx, t0);
5067     gen_qemu_ld8u(ctx, t0, t0);
5068     tcg_temp_free(t0);
5069 }
5070 
5071 /* dcbstep (dcbstep External PID version) */
5072 static void gen_dcbstep(DisasContext *ctx)
5073 {
5074     /* XXX: specification say this is treated as a load by the MMU */
5075     TCGv t0;
5076     gen_set_access_type(ctx, ACCESS_CACHE);
5077     t0 = tcg_temp_new();
5078     gen_addr_reg_index(ctx, t0);
5079     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5080     tcg_temp_free(t0);
5081 }
5082 
5083 /* dcbt */
5084 static void gen_dcbt(DisasContext *ctx)
5085 {
5086     /*
5087      * interpreted as no-op
5088      * XXX: specification say this is treated as a load by the MMU but
5089      *      does not generate any exception
5090      */
5091 }
5092 
5093 /* dcbtep */
5094 static void gen_dcbtep(DisasContext *ctx)
5095 {
5096     /*
5097      * interpreted as no-op
5098      * XXX: specification say this is treated as a load by the MMU but
5099      *      does not generate any exception
5100      */
5101 }
5102 
5103 /* dcbtst */
5104 static void gen_dcbtst(DisasContext *ctx)
5105 {
5106     /*
5107      * interpreted as no-op
5108      * XXX: specification say this is treated as a load by the MMU but
5109      *      does not generate any exception
5110      */
5111 }
5112 
5113 /* dcbtstep */
5114 static void gen_dcbtstep(DisasContext *ctx)
5115 {
5116     /*
5117      * interpreted as no-op
5118      * XXX: specification say this is treated as a load by the MMU but
5119      *      does not generate any exception
5120      */
5121 }
5122 
5123 /* dcbtls */
5124 static void gen_dcbtls(DisasContext *ctx)
5125 {
5126     /* Always fails locking the cache */
5127     TCGv t0 = tcg_temp_new();
5128     gen_load_spr(t0, SPR_Exxx_L1CSR0);
5129     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5130     gen_store_spr(SPR_Exxx_L1CSR0, t0);
5131     tcg_temp_free(t0);
5132 }
5133 
5134 /* dcbz */
5135 static void gen_dcbz(DisasContext *ctx)
5136 {
5137     TCGv tcgv_addr;
5138     TCGv_i32 tcgv_op;
5139 
5140     gen_set_access_type(ctx, ACCESS_CACHE);
5141     tcgv_addr = tcg_temp_new();
5142     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5143     gen_addr_reg_index(ctx, tcgv_addr);
5144     gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5145     tcg_temp_free(tcgv_addr);
5146     tcg_temp_free_i32(tcgv_op);
5147 }
5148 
5149 /* dcbzep */
5150 static void gen_dcbzep(DisasContext *ctx)
5151 {
5152     TCGv tcgv_addr;
5153     TCGv_i32 tcgv_op;
5154 
5155     gen_set_access_type(ctx, ACCESS_CACHE);
5156     tcgv_addr = tcg_temp_new();
5157     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5158     gen_addr_reg_index(ctx, tcgv_addr);
5159     gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5160     tcg_temp_free(tcgv_addr);
5161     tcg_temp_free_i32(tcgv_op);
5162 }
5163 
5164 /* dst / dstt */
5165 static void gen_dst(DisasContext *ctx)
5166 {
5167     if (rA(ctx->opcode) == 0) {
5168         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5169     } else {
5170         /* interpreted as no-op */
5171     }
5172 }
5173 
5174 /* dstst /dststt */
5175 static void gen_dstst(DisasContext *ctx)
5176 {
5177     if (rA(ctx->opcode) == 0) {
5178         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5179     } else {
5180         /* interpreted as no-op */
5181     }
5182 
5183 }
5184 
5185 /* dss / dssall */
5186 static void gen_dss(DisasContext *ctx)
5187 {
5188     /* interpreted as no-op */
5189 }
5190 
5191 /* icbi */
5192 static void gen_icbi(DisasContext *ctx)
5193 {
5194     TCGv t0;
5195     gen_set_access_type(ctx, ACCESS_CACHE);
5196     t0 = tcg_temp_new();
5197     gen_addr_reg_index(ctx, t0);
5198     gen_helper_icbi(cpu_env, t0);
5199     tcg_temp_free(t0);
5200 }
5201 
5202 /* icbiep */
5203 static void gen_icbiep(DisasContext *ctx)
5204 {
5205     TCGv t0;
5206     gen_set_access_type(ctx, ACCESS_CACHE);
5207     t0 = tcg_temp_new();
5208     gen_addr_reg_index(ctx, t0);
5209     gen_helper_icbiep(cpu_env, t0);
5210     tcg_temp_free(t0);
5211 }
5212 
5213 /* Optional: */
5214 /* dcba */
5215 static void gen_dcba(DisasContext *ctx)
5216 {
5217     /*
5218      * interpreted as no-op
5219      * XXX: specification say this is treated as a store by the MMU
5220      *      but does not generate any exception
5221      */
5222 }
5223 
5224 /***                    Segment register manipulation                      ***/
5225 /* Supervisor only: */
5226 
5227 /* mfsr */
5228 static void gen_mfsr(DisasContext *ctx)
5229 {
5230 #if defined(CONFIG_USER_ONLY)
5231     GEN_PRIV(ctx);
5232 #else
5233     TCGv t0;
5234 
5235     CHK_SV(ctx);
5236     t0 = tcg_const_tl(SR(ctx->opcode));
5237     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5238     tcg_temp_free(t0);
5239 #endif /* defined(CONFIG_USER_ONLY) */
5240 }
5241 
5242 /* mfsrin */
5243 static void gen_mfsrin(DisasContext *ctx)
5244 {
5245 #if defined(CONFIG_USER_ONLY)
5246     GEN_PRIV(ctx);
5247 #else
5248     TCGv t0;
5249 
5250     CHK_SV(ctx);
5251     t0 = tcg_temp_new();
5252     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5253     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5254     tcg_temp_free(t0);
5255 #endif /* defined(CONFIG_USER_ONLY) */
5256 }
5257 
5258 /* mtsr */
5259 static void gen_mtsr(DisasContext *ctx)
5260 {
5261 #if defined(CONFIG_USER_ONLY)
5262     GEN_PRIV(ctx);
5263 #else
5264     TCGv t0;
5265 
5266     CHK_SV(ctx);
5267     t0 = tcg_const_tl(SR(ctx->opcode));
5268     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5269     tcg_temp_free(t0);
5270 #endif /* defined(CONFIG_USER_ONLY) */
5271 }
5272 
5273 /* mtsrin */
5274 static void gen_mtsrin(DisasContext *ctx)
5275 {
5276 #if defined(CONFIG_USER_ONLY)
5277     GEN_PRIV(ctx);
5278 #else
5279     TCGv t0;
5280     CHK_SV(ctx);
5281 
5282     t0 = tcg_temp_new();
5283     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5284     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5285     tcg_temp_free(t0);
5286 #endif /* defined(CONFIG_USER_ONLY) */
5287 }
5288 
5289 #if defined(TARGET_PPC64)
5290 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5291 
5292 /* mfsr */
5293 static void gen_mfsr_64b(DisasContext *ctx)
5294 {
5295 #if defined(CONFIG_USER_ONLY)
5296     GEN_PRIV(ctx);
5297 #else
5298     TCGv t0;
5299 
5300     CHK_SV(ctx);
5301     t0 = tcg_const_tl(SR(ctx->opcode));
5302     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5303     tcg_temp_free(t0);
5304 #endif /* defined(CONFIG_USER_ONLY) */
5305 }
5306 
5307 /* mfsrin */
5308 static void gen_mfsrin_64b(DisasContext *ctx)
5309 {
5310 #if defined(CONFIG_USER_ONLY)
5311     GEN_PRIV(ctx);
5312 #else
5313     TCGv t0;
5314 
5315     CHK_SV(ctx);
5316     t0 = tcg_temp_new();
5317     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5318     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5319     tcg_temp_free(t0);
5320 #endif /* defined(CONFIG_USER_ONLY) */
5321 }
5322 
5323 /* mtsr */
5324 static void gen_mtsr_64b(DisasContext *ctx)
5325 {
5326 #if defined(CONFIG_USER_ONLY)
5327     GEN_PRIV(ctx);
5328 #else
5329     TCGv t0;
5330 
5331     CHK_SV(ctx);
5332     t0 = tcg_const_tl(SR(ctx->opcode));
5333     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5334     tcg_temp_free(t0);
5335 #endif /* defined(CONFIG_USER_ONLY) */
5336 }
5337 
5338 /* mtsrin */
5339 static void gen_mtsrin_64b(DisasContext *ctx)
5340 {
5341 #if defined(CONFIG_USER_ONLY)
5342     GEN_PRIV(ctx);
5343 #else
5344     TCGv t0;
5345 
5346     CHK_SV(ctx);
5347     t0 = tcg_temp_new();
5348     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5349     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5350     tcg_temp_free(t0);
5351 #endif /* defined(CONFIG_USER_ONLY) */
5352 }
5353 
5354 #endif /* defined(TARGET_PPC64) */
5355 
5356 /***                      Lookaside buffer management                      ***/
5357 /* Optional & supervisor only: */
5358 
5359 /* tlbia */
5360 static void gen_tlbia(DisasContext *ctx)
5361 {
5362 #if defined(CONFIG_USER_ONLY)
5363     GEN_PRIV(ctx);
5364 #else
5365     CHK_HV(ctx);
5366 
5367     gen_helper_tlbia(cpu_env);
5368 #endif  /* defined(CONFIG_USER_ONLY) */
5369 }
5370 
5371 /* tlbsync */
5372 static void gen_tlbsync(DisasContext *ctx)
5373 {
5374 #if defined(CONFIG_USER_ONLY)
5375     GEN_PRIV(ctx);
5376 #else
5377 
5378     if (ctx->gtse) {
5379         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
5380     } else {
5381         CHK_HV(ctx); /* Else hypervisor privileged */
5382     }
5383 
5384     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5385     if (ctx->insns_flags & PPC_BOOKE) {
5386         gen_check_tlb_flush(ctx, true);
5387     }
5388 #endif /* defined(CONFIG_USER_ONLY) */
5389 }
5390 
5391 /***                              External control                         ***/
5392 /* Optional: */
5393 
5394 /* eciwx */
5395 static void gen_eciwx(DisasContext *ctx)
5396 {
5397     TCGv t0;
5398     /* Should check EAR[E] ! */
5399     gen_set_access_type(ctx, ACCESS_EXT);
5400     t0 = tcg_temp_new();
5401     gen_addr_reg_index(ctx, t0);
5402     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5403                        DEF_MEMOP(MO_UL | MO_ALIGN));
5404     tcg_temp_free(t0);
5405 }
5406 
5407 /* ecowx */
5408 static void gen_ecowx(DisasContext *ctx)
5409 {
5410     TCGv t0;
5411     /* Should check EAR[E] ! */
5412     gen_set_access_type(ctx, ACCESS_EXT);
5413     t0 = tcg_temp_new();
5414     gen_addr_reg_index(ctx, t0);
5415     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5416                        DEF_MEMOP(MO_UL | MO_ALIGN));
5417     tcg_temp_free(t0);
5418 }
5419 
5420 /* 602 - 603 - G2 TLB management */
5421 
5422 /* tlbld */
5423 static void gen_tlbld_6xx(DisasContext *ctx)
5424 {
5425 #if defined(CONFIG_USER_ONLY)
5426     GEN_PRIV(ctx);
5427 #else
5428     CHK_SV(ctx);
5429     gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5430 #endif /* defined(CONFIG_USER_ONLY) */
5431 }
5432 
5433 /* tlbli */
5434 static void gen_tlbli_6xx(DisasContext *ctx)
5435 {
5436 #if defined(CONFIG_USER_ONLY)
5437     GEN_PRIV(ctx);
5438 #else
5439     CHK_SV(ctx);
5440     gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5441 #endif /* defined(CONFIG_USER_ONLY) */
5442 }
5443 
5444 /* BookE specific instructions */
5445 
5446 /* XXX: not implemented on 440 ? */
5447 static void gen_mfapidi(DisasContext *ctx)
5448 {
5449     /* XXX: TODO */
5450     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5451 }
5452 
5453 /* XXX: not implemented on 440 ? */
5454 static void gen_tlbiva(DisasContext *ctx)
5455 {
5456 #if defined(CONFIG_USER_ONLY)
5457     GEN_PRIV(ctx);
5458 #else
5459     TCGv t0;
5460 
5461     CHK_SV(ctx);
5462     t0 = tcg_temp_new();
5463     gen_addr_reg_index(ctx, t0);
5464     gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5465     tcg_temp_free(t0);
5466 #endif /* defined(CONFIG_USER_ONLY) */
5467 }
5468 
5469 /* All 405 MAC instructions are translated here */
5470 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5471                                         int ra, int rb, int rt, int Rc)
5472 {
5473     TCGv t0, t1;
5474 
5475     t0 = tcg_temp_local_new();
5476     t1 = tcg_temp_local_new();
5477 
5478     switch (opc3 & 0x0D) {
5479     case 0x05:
5480         /* macchw    - macchw.    - macchwo   - macchwo.   */
5481         /* macchws   - macchws.   - macchwso  - macchwso.  */
5482         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
5483         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
5484         /* mulchw - mulchw. */
5485         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5486         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5487         tcg_gen_ext16s_tl(t1, t1);
5488         break;
5489     case 0x04:
5490         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
5491         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
5492         /* mulchwu - mulchwu. */
5493         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5494         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5495         tcg_gen_ext16u_tl(t1, t1);
5496         break;
5497     case 0x01:
5498         /* machhw    - machhw.    - machhwo   - machhwo.   */
5499         /* machhws   - machhws.   - machhwso  - machhwso.  */
5500         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
5501         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
5502         /* mulhhw - mulhhw. */
5503         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5504         tcg_gen_ext16s_tl(t0, t0);
5505         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5506         tcg_gen_ext16s_tl(t1, t1);
5507         break;
5508     case 0x00:
5509         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
5510         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
5511         /* mulhhwu - mulhhwu. */
5512         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5513         tcg_gen_ext16u_tl(t0, t0);
5514         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5515         tcg_gen_ext16u_tl(t1, t1);
5516         break;
5517     case 0x0D:
5518         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
5519         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
5520         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
5521         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
5522         /* mullhw - mullhw. */
5523         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5524         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5525         break;
5526     case 0x0C:
5527         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
5528         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
5529         /* mullhwu - mullhwu. */
5530         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5531         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5532         break;
5533     }
5534     if (opc2 & 0x04) {
5535         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5536         tcg_gen_mul_tl(t1, t0, t1);
5537         if (opc2 & 0x02) {
5538             /* nmultiply-and-accumulate (0x0E) */
5539             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5540         } else {
5541             /* multiply-and-accumulate (0x0C) */
5542             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5543         }
5544 
5545         if (opc3 & 0x12) {
5546             /* Check overflow and/or saturate */
5547             TCGLabel *l1 = gen_new_label();
5548 
5549             if (opc3 & 0x10) {
5550                 /* Start with XER OV disabled, the most likely case */
5551                 tcg_gen_movi_tl(cpu_ov, 0);
5552             }
5553             if (opc3 & 0x01) {
5554                 /* Signed */
5555                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5556                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5557                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5558                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5559                 if (opc3 & 0x02) {
5560                     /* Saturate */
5561                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5562                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5563                 }
5564             } else {
5565                 /* Unsigned */
5566                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5567                 if (opc3 & 0x02) {
5568                     /* Saturate */
5569                     tcg_gen_movi_tl(t0, UINT32_MAX);
5570                 }
5571             }
5572             if (opc3 & 0x10) {
5573                 /* Check overflow */
5574                 tcg_gen_movi_tl(cpu_ov, 1);
5575                 tcg_gen_movi_tl(cpu_so, 1);
5576             }
5577             gen_set_label(l1);
5578             tcg_gen_mov_tl(cpu_gpr[rt], t0);
5579         }
5580     } else {
5581         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5582     }
5583     tcg_temp_free(t0);
5584     tcg_temp_free(t1);
5585     if (unlikely(Rc) != 0) {
5586         /* Update Rc0 */
5587         gen_set_Rc0(ctx, cpu_gpr[rt]);
5588     }
5589 }
5590 
5591 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
5592 static void glue(gen_, name)(DisasContext *ctx)                               \
5593 {                                                                             \
5594     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
5595                          rD(ctx->opcode), Rc(ctx->opcode));                   \
5596 }
5597 
5598 /* macchw    - macchw.    */
5599 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5600 /* macchwo   - macchwo.   */
5601 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5602 /* macchws   - macchws.   */
5603 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5604 /* macchwso  - macchwso.  */
5605 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5606 /* macchwsu  - macchwsu.  */
5607 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5608 /* macchwsuo - macchwsuo. */
5609 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5610 /* macchwu   - macchwu.   */
5611 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5612 /* macchwuo  - macchwuo.  */
5613 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5614 /* machhw    - machhw.    */
5615 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5616 /* machhwo   - machhwo.   */
5617 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5618 /* machhws   - machhws.   */
5619 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5620 /* machhwso  - machhwso.  */
5621 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5622 /* machhwsu  - machhwsu.  */
5623 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5624 /* machhwsuo - machhwsuo. */
5625 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5626 /* machhwu   - machhwu.   */
5627 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5628 /* machhwuo  - machhwuo.  */
5629 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5630 /* maclhw    - maclhw.    */
5631 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5632 /* maclhwo   - maclhwo.   */
5633 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5634 /* maclhws   - maclhws.   */
5635 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5636 /* maclhwso  - maclhwso.  */
5637 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5638 /* maclhwu   - maclhwu.   */
5639 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5640 /* maclhwuo  - maclhwuo.  */
5641 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5642 /* maclhwsu  - maclhwsu.  */
5643 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5644 /* maclhwsuo - maclhwsuo. */
5645 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5646 /* nmacchw   - nmacchw.   */
5647 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5648 /* nmacchwo  - nmacchwo.  */
5649 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5650 /* nmacchws  - nmacchws.  */
5651 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5652 /* nmacchwso - nmacchwso. */
5653 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5654 /* nmachhw   - nmachhw.   */
5655 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5656 /* nmachhwo  - nmachhwo.  */
5657 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5658 /* nmachhws  - nmachhws.  */
5659 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5660 /* nmachhwso - nmachhwso. */
5661 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5662 /* nmaclhw   - nmaclhw.   */
5663 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5664 /* nmaclhwo  - nmaclhwo.  */
5665 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5666 /* nmaclhws  - nmaclhws.  */
5667 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5668 /* nmaclhwso - nmaclhwso. */
5669 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5670 
5671 /* mulchw  - mulchw.  */
5672 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5673 /* mulchwu - mulchwu. */
5674 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5675 /* mulhhw  - mulhhw.  */
5676 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5677 /* mulhhwu - mulhhwu. */
5678 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5679 /* mullhw  - mullhw.  */
5680 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5681 /* mullhwu - mullhwu. */
5682 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5683 
5684 /* mfdcr */
5685 static void gen_mfdcr(DisasContext *ctx)
5686 {
5687 #if defined(CONFIG_USER_ONLY)
5688     GEN_PRIV(ctx);
5689 #else
5690     TCGv dcrn;
5691 
5692     CHK_SV(ctx);
5693     dcrn = tcg_const_tl(SPR(ctx->opcode));
5694     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5695     tcg_temp_free(dcrn);
5696 #endif /* defined(CONFIG_USER_ONLY) */
5697 }
5698 
5699 /* mtdcr */
5700 static void gen_mtdcr(DisasContext *ctx)
5701 {
5702 #if defined(CONFIG_USER_ONLY)
5703     GEN_PRIV(ctx);
5704 #else
5705     TCGv dcrn;
5706 
5707     CHK_SV(ctx);
5708     dcrn = tcg_const_tl(SPR(ctx->opcode));
5709     gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5710     tcg_temp_free(dcrn);
5711 #endif /* defined(CONFIG_USER_ONLY) */
5712 }
5713 
5714 /* mfdcrx */
5715 /* XXX: not implemented on 440 ? */
5716 static void gen_mfdcrx(DisasContext *ctx)
5717 {
5718 #if defined(CONFIG_USER_ONLY)
5719     GEN_PRIV(ctx);
5720 #else
5721     CHK_SV(ctx);
5722     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5723                         cpu_gpr[rA(ctx->opcode)]);
5724     /* Note: Rc update flag set leads to undefined state of Rc0 */
5725 #endif /* defined(CONFIG_USER_ONLY) */
5726 }
5727 
5728 /* mtdcrx */
5729 /* XXX: not implemented on 440 ? */
5730 static void gen_mtdcrx(DisasContext *ctx)
5731 {
5732 #if defined(CONFIG_USER_ONLY)
5733     GEN_PRIV(ctx);
5734 #else
5735     CHK_SV(ctx);
5736     gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5737                          cpu_gpr[rS(ctx->opcode)]);
5738     /* Note: Rc update flag set leads to undefined state of Rc0 */
5739 #endif /* defined(CONFIG_USER_ONLY) */
5740 }
5741 
5742 /* dccci */
5743 static void gen_dccci(DisasContext *ctx)
5744 {
5745     CHK_SV(ctx);
5746     /* interpreted as no-op */
5747 }
5748 
5749 /* dcread */
5750 static void gen_dcread(DisasContext *ctx)
5751 {
5752 #if defined(CONFIG_USER_ONLY)
5753     GEN_PRIV(ctx);
5754 #else
5755     TCGv EA, val;
5756 
5757     CHK_SV(ctx);
5758     gen_set_access_type(ctx, ACCESS_CACHE);
5759     EA = tcg_temp_new();
5760     gen_addr_reg_index(ctx, EA);
5761     val = tcg_temp_new();
5762     gen_qemu_ld32u(ctx, val, EA);
5763     tcg_temp_free(val);
5764     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5765     tcg_temp_free(EA);
5766 #endif /* defined(CONFIG_USER_ONLY) */
5767 }
5768 
5769 /* icbt */
5770 static void gen_icbt_40x(DisasContext *ctx)
5771 {
5772     /*
5773      * interpreted as no-op
5774      * XXX: specification say this is treated as a load by the MMU but
5775      *      does not generate any exception
5776      */
5777 }
5778 
5779 /* iccci */
5780 static void gen_iccci(DisasContext *ctx)
5781 {
5782     CHK_SV(ctx);
5783     /* interpreted as no-op */
5784 }
5785 
5786 /* icread */
5787 static void gen_icread(DisasContext *ctx)
5788 {
5789     CHK_SV(ctx);
5790     /* interpreted as no-op */
5791 }
5792 
5793 /* rfci (supervisor only) */
5794 static void gen_rfci_40x(DisasContext *ctx)
5795 {
5796 #if defined(CONFIG_USER_ONLY)
5797     GEN_PRIV(ctx);
5798 #else
5799     CHK_SV(ctx);
5800     /* Restore CPU state */
5801     gen_helper_40x_rfci(cpu_env);
5802     ctx->base.is_jmp = DISAS_EXIT;
5803 #endif /* defined(CONFIG_USER_ONLY) */
5804 }
5805 
5806 static void gen_rfci(DisasContext *ctx)
5807 {
5808 #if defined(CONFIG_USER_ONLY)
5809     GEN_PRIV(ctx);
5810 #else
5811     CHK_SV(ctx);
5812     /* Restore CPU state */
5813     gen_helper_rfci(cpu_env);
5814     ctx->base.is_jmp = DISAS_EXIT;
5815 #endif /* defined(CONFIG_USER_ONLY) */
5816 }
5817 
5818 /* BookE specific */
5819 
5820 /* XXX: not implemented on 440 ? */
5821 static void gen_rfdi(DisasContext *ctx)
5822 {
5823 #if defined(CONFIG_USER_ONLY)
5824     GEN_PRIV(ctx);
5825 #else
5826     CHK_SV(ctx);
5827     /* Restore CPU state */
5828     gen_helper_rfdi(cpu_env);
5829     ctx->base.is_jmp = DISAS_EXIT;
5830 #endif /* defined(CONFIG_USER_ONLY) */
5831 }
5832 
5833 /* XXX: not implemented on 440 ? */
5834 static void gen_rfmci(DisasContext *ctx)
5835 {
5836 #if defined(CONFIG_USER_ONLY)
5837     GEN_PRIV(ctx);
5838 #else
5839     CHK_SV(ctx);
5840     /* Restore CPU state */
5841     gen_helper_rfmci(cpu_env);
5842     ctx->base.is_jmp = DISAS_EXIT;
5843 #endif /* defined(CONFIG_USER_ONLY) */
5844 }
5845 
5846 /* TLB management - PowerPC 405 implementation */
5847 
5848 /* tlbre */
5849 static void gen_tlbre_40x(DisasContext *ctx)
5850 {
5851 #if defined(CONFIG_USER_ONLY)
5852     GEN_PRIV(ctx);
5853 #else
5854     CHK_SV(ctx);
5855     switch (rB(ctx->opcode)) {
5856     case 0:
5857         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5858                                 cpu_gpr[rA(ctx->opcode)]);
5859         break;
5860     case 1:
5861         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5862                                 cpu_gpr[rA(ctx->opcode)]);
5863         break;
5864     default:
5865         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5866         break;
5867     }
5868 #endif /* defined(CONFIG_USER_ONLY) */
5869 }
5870 
5871 /* tlbsx - tlbsx. */
5872 static void gen_tlbsx_40x(DisasContext *ctx)
5873 {
5874 #if defined(CONFIG_USER_ONLY)
5875     GEN_PRIV(ctx);
5876 #else
5877     TCGv t0;
5878 
5879     CHK_SV(ctx);
5880     t0 = tcg_temp_new();
5881     gen_addr_reg_index(ctx, t0);
5882     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5883     tcg_temp_free(t0);
5884     if (Rc(ctx->opcode)) {
5885         TCGLabel *l1 = gen_new_label();
5886         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5887         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5888         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5889         gen_set_label(l1);
5890     }
5891 #endif /* defined(CONFIG_USER_ONLY) */
5892 }
5893 
5894 /* tlbwe */
5895 static void gen_tlbwe_40x(DisasContext *ctx)
5896 {
5897 #if defined(CONFIG_USER_ONLY)
5898     GEN_PRIV(ctx);
5899 #else
5900     CHK_SV(ctx);
5901 
5902     switch (rB(ctx->opcode)) {
5903     case 0:
5904         gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
5905                                 cpu_gpr[rS(ctx->opcode)]);
5906         break;
5907     case 1:
5908         gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
5909                                 cpu_gpr[rS(ctx->opcode)]);
5910         break;
5911     default:
5912         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5913         break;
5914     }
5915 #endif /* defined(CONFIG_USER_ONLY) */
5916 }
5917 
5918 /* TLB management - PowerPC 440 implementation */
5919 
5920 /* tlbre */
5921 static void gen_tlbre_440(DisasContext *ctx)
5922 {
5923 #if defined(CONFIG_USER_ONLY)
5924     GEN_PRIV(ctx);
5925 #else
5926     CHK_SV(ctx);
5927 
5928     switch (rB(ctx->opcode)) {
5929     case 0:
5930     case 1:
5931     case 2:
5932         {
5933             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5934             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
5935                                  t0, cpu_gpr[rA(ctx->opcode)]);
5936             tcg_temp_free_i32(t0);
5937         }
5938         break;
5939     default:
5940         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5941         break;
5942     }
5943 #endif /* defined(CONFIG_USER_ONLY) */
5944 }
5945 
5946 /* tlbsx - tlbsx. */
5947 static void gen_tlbsx_440(DisasContext *ctx)
5948 {
5949 #if defined(CONFIG_USER_ONLY)
5950     GEN_PRIV(ctx);
5951 #else
5952     TCGv t0;
5953 
5954     CHK_SV(ctx);
5955     t0 = tcg_temp_new();
5956     gen_addr_reg_index(ctx, t0);
5957     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5958     tcg_temp_free(t0);
5959     if (Rc(ctx->opcode)) {
5960         TCGLabel *l1 = gen_new_label();
5961         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5962         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5963         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5964         gen_set_label(l1);
5965     }
5966 #endif /* defined(CONFIG_USER_ONLY) */
5967 }
5968 
5969 /* tlbwe */
5970 static void gen_tlbwe_440(DisasContext *ctx)
5971 {
5972 #if defined(CONFIG_USER_ONLY)
5973     GEN_PRIV(ctx);
5974 #else
5975     CHK_SV(ctx);
5976     switch (rB(ctx->opcode)) {
5977     case 0:
5978     case 1:
5979     case 2:
5980         {
5981             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
5982             gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
5983                                  cpu_gpr[rS(ctx->opcode)]);
5984             tcg_temp_free_i32(t0);
5985         }
5986         break;
5987     default:
5988         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5989         break;
5990     }
5991 #endif /* defined(CONFIG_USER_ONLY) */
5992 }
5993 
5994 /* TLB management - PowerPC BookE 2.06 implementation */
5995 
5996 /* tlbre */
5997 static void gen_tlbre_booke206(DisasContext *ctx)
5998 {
5999  #if defined(CONFIG_USER_ONLY)
6000     GEN_PRIV(ctx);
6001 #else
6002    CHK_SV(ctx);
6003     gen_helper_booke206_tlbre(cpu_env);
6004 #endif /* defined(CONFIG_USER_ONLY) */
6005 }
6006 
6007 /* tlbsx - tlbsx. */
6008 static void gen_tlbsx_booke206(DisasContext *ctx)
6009 {
6010 #if defined(CONFIG_USER_ONLY)
6011     GEN_PRIV(ctx);
6012 #else
6013     TCGv t0;
6014 
6015     CHK_SV(ctx);
6016     if (rA(ctx->opcode)) {
6017         t0 = tcg_temp_new();
6018         tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6019     } else {
6020         t0 = tcg_const_tl(0);
6021     }
6022 
6023     tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6024     gen_helper_booke206_tlbsx(cpu_env, t0);
6025     tcg_temp_free(t0);
6026 #endif /* defined(CONFIG_USER_ONLY) */
6027 }
6028 
6029 /* tlbwe */
6030 static void gen_tlbwe_booke206(DisasContext *ctx)
6031 {
6032 #if defined(CONFIG_USER_ONLY)
6033     GEN_PRIV(ctx);
6034 #else
6035     CHK_SV(ctx);
6036     gen_helper_booke206_tlbwe(cpu_env);
6037 #endif /* defined(CONFIG_USER_ONLY) */
6038 }
6039 
6040 static void gen_tlbivax_booke206(DisasContext *ctx)
6041 {
6042 #if defined(CONFIG_USER_ONLY)
6043     GEN_PRIV(ctx);
6044 #else
6045     TCGv t0;
6046 
6047     CHK_SV(ctx);
6048     t0 = tcg_temp_new();
6049     gen_addr_reg_index(ctx, t0);
6050     gen_helper_booke206_tlbivax(cpu_env, t0);
6051     tcg_temp_free(t0);
6052 #endif /* defined(CONFIG_USER_ONLY) */
6053 }
6054 
6055 static void gen_tlbilx_booke206(DisasContext *ctx)
6056 {
6057 #if defined(CONFIG_USER_ONLY)
6058     GEN_PRIV(ctx);
6059 #else
6060     TCGv t0;
6061 
6062     CHK_SV(ctx);
6063     t0 = tcg_temp_new();
6064     gen_addr_reg_index(ctx, t0);
6065 
6066     switch ((ctx->opcode >> 21) & 0x3) {
6067     case 0:
6068         gen_helper_booke206_tlbilx0(cpu_env, t0);
6069         break;
6070     case 1:
6071         gen_helper_booke206_tlbilx1(cpu_env, t0);
6072         break;
6073     case 3:
6074         gen_helper_booke206_tlbilx3(cpu_env, t0);
6075         break;
6076     default:
6077         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6078         break;
6079     }
6080 
6081     tcg_temp_free(t0);
6082 #endif /* defined(CONFIG_USER_ONLY) */
6083 }
6084 
6085 
6086 /* wrtee */
6087 static void gen_wrtee(DisasContext *ctx)
6088 {
6089 #if defined(CONFIG_USER_ONLY)
6090     GEN_PRIV(ctx);
6091 #else
6092     TCGv t0;
6093 
6094     CHK_SV(ctx);
6095     t0 = tcg_temp_new();
6096     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6097     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6098     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6099     tcg_temp_free(t0);
6100     /*
6101      * Stop translation to have a chance to raise an exception if we
6102      * just set msr_ee to 1
6103      */
6104     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6105 #endif /* defined(CONFIG_USER_ONLY) */
6106 }
6107 
6108 /* wrteei */
6109 static void gen_wrteei(DisasContext *ctx)
6110 {
6111 #if defined(CONFIG_USER_ONLY)
6112     GEN_PRIV(ctx);
6113 #else
6114     CHK_SV(ctx);
6115     if (ctx->opcode & 0x00008000) {
6116         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6117         /* Stop translation to have a chance to raise an exception */
6118         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6119     } else {
6120         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6121     }
6122 #endif /* defined(CONFIG_USER_ONLY) */
6123 }
6124 
6125 /* PowerPC 440 specific instructions */
6126 
6127 /* dlmzb */
6128 static void gen_dlmzb(DisasContext *ctx)
6129 {
6130     TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6131     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6132                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6133     tcg_temp_free_i32(t0);
6134 }
6135 
6136 /* mbar replaces eieio on 440 */
6137 static void gen_mbar(DisasContext *ctx)
6138 {
6139     /* interpreted as no-op */
6140 }
6141 
6142 /* msync replaces sync on 440 */
6143 static void gen_msync_4xx(DisasContext *ctx)
6144 {
6145     /* Only e500 seems to treat reserved bits as invalid */
6146     if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
6147         (ctx->opcode & 0x03FFF801)) {
6148         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6149     }
6150     /* otherwise interpreted as no-op */
6151 }
6152 
6153 /* icbt */
6154 static void gen_icbt_440(DisasContext *ctx)
6155 {
6156     /*
6157      * interpreted as no-op
6158      * XXX: specification say this is treated as a load by the MMU but
6159      *      does not generate any exception
6160      */
6161 }
6162 
6163 /* Embedded.Processor Control */
6164 
6165 static void gen_msgclr(DisasContext *ctx)
6166 {
6167 #if defined(CONFIG_USER_ONLY)
6168     GEN_PRIV(ctx);
6169 #else
6170     CHK_HV(ctx);
6171     if (is_book3s_arch2x(ctx)) {
6172         gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6173     } else {
6174         gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6175     }
6176 #endif /* defined(CONFIG_USER_ONLY) */
6177 }
6178 
6179 static void gen_msgsnd(DisasContext *ctx)
6180 {
6181 #if defined(CONFIG_USER_ONLY)
6182     GEN_PRIV(ctx);
6183 #else
6184     CHK_HV(ctx);
6185     if (is_book3s_arch2x(ctx)) {
6186         gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]);
6187     } else {
6188         gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]);
6189     }
6190 #endif /* defined(CONFIG_USER_ONLY) */
6191 }
6192 
6193 #if defined(TARGET_PPC64)
6194 static void gen_msgclrp(DisasContext *ctx)
6195 {
6196 #if defined(CONFIG_USER_ONLY)
6197     GEN_PRIV(ctx);
6198 #else
6199     CHK_SV(ctx);
6200     gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6201 #endif /* defined(CONFIG_USER_ONLY) */
6202 }
6203 
6204 static void gen_msgsndp(DisasContext *ctx)
6205 {
6206 #if defined(CONFIG_USER_ONLY)
6207     GEN_PRIV(ctx);
6208 #else
6209     CHK_SV(ctx);
6210     gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]);
6211 #endif /* defined(CONFIG_USER_ONLY) */
6212 }
6213 #endif
6214 
6215 static void gen_msgsync(DisasContext *ctx)
6216 {
6217 #if defined(CONFIG_USER_ONLY)
6218     GEN_PRIV(ctx);
6219 #else
6220     CHK_HV(ctx);
6221 #endif /* defined(CONFIG_USER_ONLY) */
6222     /* interpreted as no-op */
6223 }
6224 
6225 #if defined(TARGET_PPC64)
6226 static void gen_maddld(DisasContext *ctx)
6227 {
6228     TCGv_i64 t1 = tcg_temp_new_i64();
6229 
6230     tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6231     tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
6232     tcg_temp_free_i64(t1);
6233 }
6234 
6235 /* maddhd maddhdu */
6236 static void gen_maddhd_maddhdu(DisasContext *ctx)
6237 {
6238     TCGv_i64 lo = tcg_temp_new_i64();
6239     TCGv_i64 hi = tcg_temp_new_i64();
6240     TCGv_i64 t1 = tcg_temp_new_i64();
6241 
6242     if (Rc(ctx->opcode)) {
6243         tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6244                           cpu_gpr[rB(ctx->opcode)]);
6245         tcg_gen_movi_i64(t1, 0);
6246     } else {
6247         tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6248                           cpu_gpr[rB(ctx->opcode)]);
6249         tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6250     }
6251     tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6252                      cpu_gpr[rC(ctx->opcode)], t1);
6253     tcg_temp_free_i64(lo);
6254     tcg_temp_free_i64(hi);
6255     tcg_temp_free_i64(t1);
6256 }
6257 #endif /* defined(TARGET_PPC64) */
6258 
6259 static void gen_tbegin(DisasContext *ctx)
6260 {
6261     if (unlikely(!ctx->tm_enabled)) {
6262         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6263         return;
6264     }
6265     gen_helper_tbegin(cpu_env);
6266 }
6267 
6268 #define GEN_TM_NOOP(name)                                      \
6269 static inline void gen_##name(DisasContext *ctx)               \
6270 {                                                              \
6271     if (unlikely(!ctx->tm_enabled)) {                          \
6272         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6273         return;                                                \
6274     }                                                          \
6275     /*                                                         \
6276      * Because tbegin always fails in QEMU, these user         \
6277      * space instructions all have a simple implementation:    \
6278      *                                                         \
6279      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
6280      *           = 0b0 || 0b00    || 0b0                       \
6281      */                                                        \
6282     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6283 }
6284 
6285 GEN_TM_NOOP(tend);
6286 GEN_TM_NOOP(tabort);
6287 GEN_TM_NOOP(tabortwc);
6288 GEN_TM_NOOP(tabortwci);
6289 GEN_TM_NOOP(tabortdc);
6290 GEN_TM_NOOP(tabortdci);
6291 GEN_TM_NOOP(tsr);
6292 
6293 static inline void gen_cp_abort(DisasContext *ctx)
6294 {
6295     /* Do Nothing */
6296 }
6297 
6298 #define GEN_CP_PASTE_NOOP(name)                           \
6299 static inline void gen_##name(DisasContext *ctx)          \
6300 {                                                         \
6301     /*                                                    \
6302      * Generate invalid exception until we have an        \
6303      * implementation of the copy paste facility          \
6304      */                                                   \
6305     gen_invalid(ctx);                                     \
6306 }
6307 
6308 GEN_CP_PASTE_NOOP(copy)
6309 GEN_CP_PASTE_NOOP(paste)
6310 
6311 static void gen_tcheck(DisasContext *ctx)
6312 {
6313     if (unlikely(!ctx->tm_enabled)) {
6314         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6315         return;
6316     }
6317     /*
6318      * Because tbegin always fails, the tcheck implementation is
6319      * simple:
6320      *
6321      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6322      *         = 0b1 || 0b00 || 0b0
6323      */
6324     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6325 }
6326 
6327 #if defined(CONFIG_USER_ONLY)
6328 #define GEN_TM_PRIV_NOOP(name)                                 \
6329 static inline void gen_##name(DisasContext *ctx)               \
6330 {                                                              \
6331     gen_priv_opc(ctx);                                         \
6332 }
6333 
6334 #else
6335 
6336 #define GEN_TM_PRIV_NOOP(name)                                 \
6337 static inline void gen_##name(DisasContext *ctx)               \
6338 {                                                              \
6339     CHK_SV(ctx);                                               \
6340     if (unlikely(!ctx->tm_enabled)) {                          \
6341         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6342         return;                                                \
6343     }                                                          \
6344     /*                                                         \
6345      * Because tbegin always fails, the implementation is      \
6346      * simple:                                                 \
6347      *                                                         \
6348      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
6349      *         = 0b0 || 0b00 | 0b0                             \
6350      */                                                        \
6351     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6352 }
6353 
6354 #endif
6355 
6356 GEN_TM_PRIV_NOOP(treclaim);
6357 GEN_TM_PRIV_NOOP(trechkpt);
6358 
6359 static inline void get_fpr(TCGv_i64 dst, int regno)
6360 {
6361     tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
6362 }
6363 
6364 static inline void set_fpr(int regno, TCGv_i64 src)
6365 {
6366     tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
6367 }
6368 
6369 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
6370 {
6371     tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
6372 }
6373 
6374 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
6375 {
6376     tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
6377 }
6378 
6379 /*
6380  * Helpers for decodetree used by !function for decoding arguments.
6381  */
6382 static int times_2(DisasContext *ctx, int x)
6383 {
6384     return x * 2;
6385 }
6386 
6387 static int times_4(DisasContext *ctx, int x)
6388 {
6389     return x * 4;
6390 }
6391 
6392 static int times_16(DisasContext *ctx, int x)
6393 {
6394     return x * 16;
6395 }
6396 
6397 /*
6398  * Helpers for trans_* functions to check for specific insns flags.
6399  * Use token pasting to ensure that we use the proper flag with the
6400  * proper variable.
6401  */
6402 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
6403     do {                                                \
6404         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
6405             return false;                               \
6406         }                                               \
6407     } while (0)
6408 
6409 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
6410     do {                                                \
6411         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
6412             return false;                               \
6413         }                                               \
6414     } while (0)
6415 
6416 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
6417 #if TARGET_LONG_BITS == 32
6418 # define REQUIRE_64BIT(CTX)  return false
6419 #else
6420 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
6421 #endif
6422 
6423 #define REQUIRE_VECTOR(CTX)                             \
6424     do {                                                \
6425         if (unlikely(!(CTX)->altivec_enabled)) {        \
6426             gen_exception((CTX), POWERPC_EXCP_VPU);     \
6427             return true;                                \
6428         }                                               \
6429     } while (0)
6430 
6431 #define REQUIRE_VSX(CTX)                                \
6432     do {                                                \
6433         if (unlikely(!(CTX)->vsx_enabled)) {            \
6434             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
6435             return true;                                \
6436         }                                               \
6437     } while (0)
6438 
6439 #define REQUIRE_FPU(ctx)                                \
6440     do {                                                \
6441         if (unlikely(!(ctx)->fpu_enabled)) {            \
6442             gen_exception((ctx), POWERPC_EXCP_FPU);     \
6443             return true;                                \
6444         }                                               \
6445     } while (0)
6446 
6447 #if !defined(CONFIG_USER_ONLY)
6448 #define REQUIRE_SV(CTX)             \
6449     do {                            \
6450         if (unlikely((CTX)->pr)) {  \
6451             gen_priv_opc(CTX);      \
6452             return true;            \
6453         }                           \
6454     } while (0)
6455 
6456 #define REQUIRE_HV(CTX)                         \
6457     do {                                        \
6458         if (unlikely((CTX)->pr || !(CTX)->hv))  \
6459             gen_priv_opc(CTX);                  \
6460             return true;                        \
6461         }                                       \
6462     } while (0)
6463 #else
6464 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6465 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6466 #endif
6467 
6468 /*
6469  * Helpers for implementing sets of trans_* functions.
6470  * Defer the implementation of NAME to FUNC, with optional extra arguments.
6471  */
6472 #define TRANS(NAME, FUNC, ...) \
6473     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6474     { return FUNC(ctx, a, __VA_ARGS__); }
6475 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
6476     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6477     {                                                          \
6478         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
6479         return FUNC(ctx, a, __VA_ARGS__);                      \
6480     }
6481 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6482     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6483     {                                                          \
6484         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6485         return FUNC(ctx, a, __VA_ARGS__);                      \
6486     }
6487 
6488 #define TRANS64(NAME, FUNC, ...) \
6489     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6490     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
6491 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6492     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6493     {                                                          \
6494         REQUIRE_64BIT(ctx);                                    \
6495         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6496         return FUNC(ctx, a, __VA_ARGS__);                      \
6497     }
6498 
6499 /* TODO: More TRANS* helpers for extra insn_flags checks. */
6500 
6501 
6502 #include "decode-insn32.c.inc"
6503 #include "decode-insn64.c.inc"
6504 #include "power8-pmu-regs.c.inc"
6505 
6506 /*
6507  * Incorporate CIA into the constant when R=1.
6508  * Validate that when R=1, RA=0.
6509  */
6510 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
6511 {
6512     d->rt = a->rt;
6513     d->ra = a->ra;
6514     d->si = a->si;
6515     if (a->r) {
6516         if (unlikely(a->ra != 0)) {
6517             gen_invalid(ctx);
6518             return false;
6519         }
6520         d->si += ctx->cia;
6521     }
6522     return true;
6523 }
6524 
6525 #include "translate/fixedpoint-impl.c.inc"
6526 
6527 #include "translate/fp-impl.c.inc"
6528 
6529 #include "translate/vmx-impl.c.inc"
6530 
6531 #include "translate/vsx-impl.c.inc"
6532 
6533 #include "translate/dfp-impl.c.inc"
6534 
6535 #include "translate/spe-impl.c.inc"
6536 
6537 #include "translate/branch-impl.c.inc"
6538 
6539 #include "translate/storage-ctrl-impl.c.inc"
6540 
6541 /* Handles lfdp */
6542 static void gen_dform39(DisasContext *ctx)
6543 {
6544     if ((ctx->opcode & 0x3) == 0) {
6545         if (ctx->insns_flags2 & PPC2_ISA205) {
6546             return gen_lfdp(ctx);
6547         }
6548     }
6549     return gen_invalid(ctx);
6550 }
6551 
6552 /* Handles stfdp */
6553 static void gen_dform3D(DisasContext *ctx)
6554 {
6555     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
6556         /* stfdp */
6557         if (ctx->insns_flags2 & PPC2_ISA205) {
6558             return gen_stfdp(ctx);
6559         }
6560     }
6561     return gen_invalid(ctx);
6562 }
6563 
6564 #if defined(TARGET_PPC64)
6565 /* brd */
6566 static void gen_brd(DisasContext *ctx)
6567 {
6568     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6569 }
6570 
6571 /* brw */
6572 static void gen_brw(DisasContext *ctx)
6573 {
6574     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6575     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
6576 
6577 }
6578 
6579 /* brh */
6580 static void gen_brh(DisasContext *ctx)
6581 {
6582     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
6583     TCGv_i64 t1 = tcg_temp_new_i64();
6584     TCGv_i64 t2 = tcg_temp_new_i64();
6585 
6586     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
6587     tcg_gen_and_i64(t2, t1, mask);
6588     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
6589     tcg_gen_shli_i64(t1, t1, 8);
6590     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
6591 
6592     tcg_temp_free_i64(t1);
6593     tcg_temp_free_i64(t2);
6594 }
6595 #endif
6596 
6597 static opcode_t opcodes[] = {
6598 #if defined(TARGET_PPC64)
6599 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
6600 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
6601 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
6602 #endif
6603 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6604 #if defined(TARGET_PPC64)
6605 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6606 #endif
6607 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6608 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6609 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6610 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6611 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6612 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6613 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6614 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6615 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6616 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6617 #if defined(TARGET_PPC64)
6618 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6619 #endif
6620 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6621 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6622 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6623 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6624 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6625 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6626 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6627 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
6628 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6629 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
6630 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6631 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6632 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6633 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6634 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6635 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6636 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6637 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6638 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6639 #if defined(TARGET_PPC64)
6640 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6641 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6642 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6643 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6644 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6645 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6646 #endif
6647 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6648 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6649 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6650 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6651 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6652 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6653 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6654 #if defined(TARGET_PPC64)
6655 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6656 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6657 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6658 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6659 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6660 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6661                PPC_NONE, PPC2_ISA300),
6662 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6663                PPC_NONE, PPC2_ISA300),
6664 #endif
6665 /* handles lfdp, lxsd, lxssp */
6666 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6667 /* handles stfdp, stxsd, stxssp */
6668 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6669 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6670 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6671 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6672 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6673 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6674 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6675 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
6676 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6677 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6678 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6679 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6680 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
6681 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
6682 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6683 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6684 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6685 #if defined(TARGET_PPC64)
6686 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
6687 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
6688 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6689 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6690 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6691 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6692 #endif
6693 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6694 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT),
6695 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300),
6696 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6697 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6698 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6699 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6700 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6701 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6702 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6703 #if defined(TARGET_PPC64)
6704 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6705 #if !defined(CONFIG_USER_ONLY)
6706 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6707 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6708 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6709 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
6710 #endif
6711 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6712 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6713 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6714 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6715 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6716 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6717 #endif
6718 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6719 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
6720 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
6721 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6722 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6723 #if defined(TARGET_PPC64)
6724 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6725 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6726 #endif
6727 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6728 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6729 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6730 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6731 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6732 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6733 #if defined(TARGET_PPC64)
6734 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6735 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6736 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
6737 #endif
6738 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6739 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6740 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6741 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6742 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6743 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6744 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6745 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6746 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6747 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6748 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6749 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6750 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6751 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6752 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6753 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
6754 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6755 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6756 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6757 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6758 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6759 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6760 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6761 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6762 #if defined(TARGET_PPC64)
6763 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6764 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6765              PPC_SEGMENT_64B),
6766 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6767 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6768              PPC_SEGMENT_64B),
6769 #endif
6770 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6771 /*
6772  * XXX Those instructions will need to be handled differently for
6773  * different ISA versions
6774  */
6775 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6776 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6777 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6778 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6779 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6780 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6781 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6782 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6783 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6784 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6785 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6786 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6787 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6788 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6789 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6790 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6791 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6792 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6793 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6794 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6795 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6796 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6797 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6798 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6799 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6800 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6801 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6802                PPC_NONE, PPC2_BOOKE206),
6803 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6804                PPC_NONE, PPC2_BOOKE206),
6805 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6806                PPC_NONE, PPC2_BOOKE206),
6807 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6808                PPC_NONE, PPC2_BOOKE206),
6809 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6810                PPC_NONE, PPC2_BOOKE206),
6811 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001,
6812                PPC_NONE, PPC2_PRCNTL),
6813 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001,
6814                PPC_NONE, PPC2_PRCNTL),
6815 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000,
6816                PPC_NONE, PPC2_PRCNTL),
6817 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6818 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6819 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6820 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6821               PPC_BOOKE, PPC2_BOOKE206),
6822 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
6823 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6824                PPC_BOOKE, PPC2_BOOKE206),
6825 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
6826              PPC_440_SPEC),
6827 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6828 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6829 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6830 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6831 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC),
6832 #if defined(TARGET_PPC64)
6833 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6834               PPC2_ISA300),
6835 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6836 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001,
6837                PPC_NONE, PPC2_ISA207S),
6838 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001,
6839                PPC_NONE, PPC2_ISA207S),
6840 #endif
6841 
6842 #undef GEN_INT_ARITH_ADD
6843 #undef GEN_INT_ARITH_ADD_CONST
6844 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov)         \
6845 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6846 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val,                        \
6847                                 add_ca, compute_ca, compute_ov)               \
6848 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6849 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6850 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6851 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6852 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6853 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
6854 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
6855 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
6856 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
6857 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
6858 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
6859 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
6860 
6861 #undef GEN_INT_ARITH_DIVW
6862 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
6863 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
6864 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
6865 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
6866 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
6867 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
6868 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6869 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6870 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6871 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6872 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6873 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6874 
6875 #if defined(TARGET_PPC64)
6876 #undef GEN_INT_ARITH_DIVD
6877 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
6878 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6879 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
6880 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
6881 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
6882 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
6883 
6884 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6885 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6886 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6887 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6888 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6889 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6890 
6891 #undef GEN_INT_ARITH_MUL_HELPER
6892 #define GEN_INT_ARITH_MUL_HELPER(name, opc3)                                  \
6893 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6894 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
6895 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
6896 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
6897 #endif
6898 
6899 #undef GEN_INT_ARITH_SUBF
6900 #undef GEN_INT_ARITH_SUBF_CONST
6901 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
6902 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
6903 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
6904                                 add_ca, compute_ca, compute_ov)               \
6905 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
6906 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
6907 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
6908 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
6909 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
6910 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
6911 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
6912 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
6913 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
6914 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
6915 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
6916 
6917 #undef GEN_LOGICAL1
6918 #undef GEN_LOGICAL2
6919 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
6920 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
6921 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
6922 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
6923 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
6924 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
6925 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
6926 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
6927 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
6928 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
6929 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
6930 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
6931 #if defined(TARGET_PPC64)
6932 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
6933 #endif
6934 
6935 #if defined(TARGET_PPC64)
6936 #undef GEN_PPC64_R2
6937 #undef GEN_PPC64_R4
6938 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
6939 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6940 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6941              PPC_64B)
6942 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
6943 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6944 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
6945              PPC_64B),                                                        \
6946 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6947              PPC_64B),                                                        \
6948 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
6949              PPC_64B)
6950 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
6951 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
6952 GEN_PPC64_R4(rldic, 0x1E, 0x04),
6953 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
6954 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
6955 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
6956 #endif
6957 
6958 #undef GEN_LDX_E
6959 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
6960 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
6961 
6962 #if defined(TARGET_PPC64)
6963 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
6964 
6965 /* HV/P7 and later only */
6966 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
6967 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
6968 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
6969 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
6970 #endif
6971 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
6972 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
6973 
6974 /* External PID based load */
6975 #undef GEN_LDEPX
6976 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
6977 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6978               0x00000001, PPC_NONE, PPC2_BOOKE206),
6979 
6980 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
6981 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
6982 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
6983 #if defined(TARGET_PPC64)
6984 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
6985 #endif
6986 
6987 #undef GEN_STX_E
6988 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
6989 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
6990 
6991 #if defined(TARGET_PPC64)
6992 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
6993 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
6994 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
6995 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
6996 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
6997 #endif
6998 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
6999 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
7000 
7001 #undef GEN_STEPX
7002 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
7003 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7004               0x00000001, PPC_NONE, PPC2_BOOKE206),
7005 
7006 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
7007 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
7008 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
7009 #if defined(TARGET_PPC64)
7010 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
7011 #endif
7012 
7013 #undef GEN_CRLOGIC
7014 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
7015 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
7016 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
7017 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
7018 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
7019 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
7020 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
7021 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
7022 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
7023 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
7024 
7025 #undef GEN_MAC_HANDLER
7026 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
7027 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
7028 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
7029 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
7030 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
7031 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
7032 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
7033 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
7034 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
7035 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
7036 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
7037 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
7038 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
7039 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
7040 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
7041 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
7042 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
7043 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
7044 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
7045 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
7046 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
7047 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
7048 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
7049 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
7050 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
7051 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
7052 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
7053 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
7054 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
7055 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
7056 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
7057 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
7058 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
7059 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
7060 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
7061 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
7062 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
7063 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
7064 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
7065 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
7066 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
7067 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
7068 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
7069 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
7070 
7071 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
7072                PPC_NONE, PPC2_TM),
7073 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
7074                PPC_NONE, PPC2_TM),
7075 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
7076                PPC_NONE, PPC2_TM),
7077 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
7078                PPC_NONE, PPC2_TM),
7079 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
7080                PPC_NONE, PPC2_TM),
7081 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
7082                PPC_NONE, PPC2_TM),
7083 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
7084                PPC_NONE, PPC2_TM),
7085 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
7086                PPC_NONE, PPC2_TM),
7087 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
7088                PPC_NONE, PPC2_TM),
7089 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
7090                PPC_NONE, PPC2_TM),
7091 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
7092                PPC_NONE, PPC2_TM),
7093 
7094 #include "translate/fp-ops.c.inc"
7095 
7096 #include "translate/vmx-ops.c.inc"
7097 
7098 #include "translate/vsx-ops.c.inc"
7099 
7100 #include "translate/spe-ops.c.inc"
7101 };
7102 
7103 /*****************************************************************************/
7104 /* Opcode types */
7105 enum {
7106     PPC_DIRECT   = 0, /* Opcode routine        */
7107     PPC_INDIRECT = 1, /* Indirect opcode table */
7108 };
7109 
7110 #define PPC_OPCODE_MASK 0x3
7111 
7112 static inline int is_indirect_opcode(void *handler)
7113 {
7114     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
7115 }
7116 
7117 static inline opc_handler_t **ind_table(void *handler)
7118 {
7119     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
7120 }
7121 
7122 /* Instruction table creation */
7123 /* Opcodes tables creation */
7124 static void fill_new_table(opc_handler_t **table, int len)
7125 {
7126     int i;
7127 
7128     for (i = 0; i < len; i++) {
7129         table[i] = &invalid_handler;
7130     }
7131 }
7132 
7133 static int create_new_table(opc_handler_t **table, unsigned char idx)
7134 {
7135     opc_handler_t **tmp;
7136 
7137     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
7138     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
7139     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
7140 
7141     return 0;
7142 }
7143 
7144 static int insert_in_table(opc_handler_t **table, unsigned char idx,
7145                             opc_handler_t *handler)
7146 {
7147     if (table[idx] != &invalid_handler) {
7148         return -1;
7149     }
7150     table[idx] = handler;
7151 
7152     return 0;
7153 }
7154 
7155 static int register_direct_insn(opc_handler_t **ppc_opcodes,
7156                                 unsigned char idx, opc_handler_t *handler)
7157 {
7158     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
7159         printf("*** ERROR: opcode %02x already assigned in main "
7160                "opcode table\n", idx);
7161         return -1;
7162     }
7163 
7164     return 0;
7165 }
7166 
7167 static int register_ind_in_table(opc_handler_t **table,
7168                                  unsigned char idx1, unsigned char idx2,
7169                                  opc_handler_t *handler)
7170 {
7171     if (table[idx1] == &invalid_handler) {
7172         if (create_new_table(table, idx1) < 0) {
7173             printf("*** ERROR: unable to create indirect table "
7174                    "idx=%02x\n", idx1);
7175             return -1;
7176         }
7177     } else {
7178         if (!is_indirect_opcode(table[idx1])) {
7179             printf("*** ERROR: idx %02x already assigned to a direct "
7180                    "opcode\n", idx1);
7181             return -1;
7182         }
7183     }
7184     if (handler != NULL &&
7185         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
7186         printf("*** ERROR: opcode %02x already assigned in "
7187                "opcode table %02x\n", idx2, idx1);
7188         return -1;
7189     }
7190 
7191     return 0;
7192 }
7193 
7194 static int register_ind_insn(opc_handler_t **ppc_opcodes,
7195                              unsigned char idx1, unsigned char idx2,
7196                              opc_handler_t *handler)
7197 {
7198     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
7199 }
7200 
7201 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
7202                                 unsigned char idx1, unsigned char idx2,
7203                                 unsigned char idx3, opc_handler_t *handler)
7204 {
7205     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7206         printf("*** ERROR: unable to join indirect table idx "
7207                "[%02x-%02x]\n", idx1, idx2);
7208         return -1;
7209     }
7210     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
7211                               handler) < 0) {
7212         printf("*** ERROR: unable to insert opcode "
7213                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7214         return -1;
7215     }
7216 
7217     return 0;
7218 }
7219 
7220 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
7221                                  unsigned char idx1, unsigned char idx2,
7222                                  unsigned char idx3, unsigned char idx4,
7223                                  opc_handler_t *handler)
7224 {
7225     opc_handler_t **table;
7226 
7227     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7228         printf("*** ERROR: unable to join indirect table idx "
7229                "[%02x-%02x]\n", idx1, idx2);
7230         return -1;
7231     }
7232     table = ind_table(ppc_opcodes[idx1]);
7233     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
7234         printf("*** ERROR: unable to join 2nd-level indirect table idx "
7235                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7236         return -1;
7237     }
7238     table = ind_table(table[idx2]);
7239     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
7240         printf("*** ERROR: unable to insert opcode "
7241                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
7242         return -1;
7243     }
7244     return 0;
7245 }
7246 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
7247 {
7248     if (insn->opc2 != 0xFF) {
7249         if (insn->opc3 != 0xFF) {
7250             if (insn->opc4 != 0xFF) {
7251                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7252                                           insn->opc3, insn->opc4,
7253                                           &insn->handler) < 0) {
7254                     return -1;
7255                 }
7256             } else {
7257                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7258                                          insn->opc3, &insn->handler) < 0) {
7259                     return -1;
7260                 }
7261             }
7262         } else {
7263             if (register_ind_insn(ppc_opcodes, insn->opc1,
7264                                   insn->opc2, &insn->handler) < 0) {
7265                 return -1;
7266             }
7267         }
7268     } else {
7269         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
7270             return -1;
7271         }
7272     }
7273 
7274     return 0;
7275 }
7276 
7277 static int test_opcode_table(opc_handler_t **table, int len)
7278 {
7279     int i, count, tmp;
7280 
7281     for (i = 0, count = 0; i < len; i++) {
7282         /* Consistency fixup */
7283         if (table[i] == NULL) {
7284             table[i] = &invalid_handler;
7285         }
7286         if (table[i] != &invalid_handler) {
7287             if (is_indirect_opcode(table[i])) {
7288                 tmp = test_opcode_table(ind_table(table[i]),
7289                     PPC_CPU_INDIRECT_OPCODES_LEN);
7290                 if (tmp == 0) {
7291                     free(table[i]);
7292                     table[i] = &invalid_handler;
7293                 } else {
7294                     count++;
7295                 }
7296             } else {
7297                 count++;
7298             }
7299         }
7300     }
7301 
7302     return count;
7303 }
7304 
7305 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
7306 {
7307     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
7308         printf("*** WARNING: no opcode defined !\n");
7309     }
7310 }
7311 
7312 /*****************************************************************************/
7313 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
7314 {
7315     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
7316     opcode_t *opc;
7317 
7318     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
7319     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
7320         if (((opc->handler.type & pcc->insns_flags) != 0) ||
7321             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
7322             if (register_insn(cpu->opcodes, opc) < 0) {
7323                 error_setg(errp, "ERROR initializing PowerPC instruction "
7324                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
7325                            opc->opc3);
7326                 return;
7327             }
7328         }
7329     }
7330     fix_opcode_tables(cpu->opcodes);
7331     fflush(stdout);
7332     fflush(stderr);
7333 }
7334 
7335 void destroy_ppc_opcodes(PowerPCCPU *cpu)
7336 {
7337     opc_handler_t **table, **table_2;
7338     int i, j, k;
7339 
7340     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
7341         if (cpu->opcodes[i] == &invalid_handler) {
7342             continue;
7343         }
7344         if (is_indirect_opcode(cpu->opcodes[i])) {
7345             table = ind_table(cpu->opcodes[i]);
7346             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
7347                 if (table[j] == &invalid_handler) {
7348                     continue;
7349                 }
7350                 if (is_indirect_opcode(table[j])) {
7351                     table_2 = ind_table(table[j]);
7352                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
7353                         if (table_2[k] != &invalid_handler &&
7354                             is_indirect_opcode(table_2[k])) {
7355                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
7356                                                      ~PPC_INDIRECT));
7357                         }
7358                     }
7359                     g_free((opc_handler_t *)((uintptr_t)table[j] &
7360                                              ~PPC_INDIRECT));
7361                 }
7362             }
7363             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
7364                 ~PPC_INDIRECT));
7365         }
7366     }
7367 }
7368 
7369 int ppc_fixup_cpu(PowerPCCPU *cpu)
7370 {
7371     CPUPPCState *env = &cpu->env;
7372 
7373     /*
7374      * TCG doesn't (yet) emulate some groups of instructions that are
7375      * implemented on some otherwise supported CPUs (e.g. VSX and
7376      * decimal floating point instructions on POWER7).  We remove
7377      * unsupported instruction groups from the cpu state's instruction
7378      * masks and hope the guest can cope.  For at least the pseries
7379      * machine, the unavailability of these instructions can be
7380      * advertised to the guest via the device tree.
7381      */
7382     if ((env->insns_flags & ~PPC_TCG_INSNS)
7383         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
7384         warn_report("Disabling some instructions which are not "
7385                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
7386                     env->insns_flags & ~PPC_TCG_INSNS,
7387                     env->insns_flags2 & ~PPC_TCG_INSNS2);
7388     }
7389     env->insns_flags &= PPC_TCG_INSNS;
7390     env->insns_flags2 &= PPC_TCG_INSNS2;
7391     return 0;
7392 }
7393 
7394 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
7395 {
7396     opc_handler_t **table, *handler;
7397     uint32_t inval;
7398 
7399     ctx->opcode = insn;
7400 
7401     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7402               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7403               ctx->le_mode ? "little" : "big");
7404 
7405     table = cpu->opcodes;
7406     handler = table[opc1(insn)];
7407     if (is_indirect_opcode(handler)) {
7408         table = ind_table(handler);
7409         handler = table[opc2(insn)];
7410         if (is_indirect_opcode(handler)) {
7411             table = ind_table(handler);
7412             handler = table[opc3(insn)];
7413             if (is_indirect_opcode(handler)) {
7414                 table = ind_table(handler);
7415                 handler = table[opc4(insn)];
7416             }
7417         }
7418     }
7419 
7420     /* Is opcode *REALLY* valid ? */
7421     if (unlikely(handler->handler == &gen_invalid)) {
7422         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7423                       "%02x - %02x - %02x - %02x (%08x) "
7424                       TARGET_FMT_lx "\n",
7425                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7426                       insn, ctx->cia);
7427         return false;
7428     }
7429 
7430     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
7431                  && Rc(insn))) {
7432         inval = handler->inval2;
7433     } else {
7434         inval = handler->inval1;
7435     }
7436 
7437     if (unlikely((insn & inval) != 0)) {
7438         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7439                       "%02x - %02x - %02x - %02x (%08x) "
7440                       TARGET_FMT_lx "\n", insn & inval,
7441                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7442                       insn, ctx->cia);
7443         return false;
7444     }
7445 
7446     handler->handler(ctx);
7447     return true;
7448 }
7449 
7450 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
7451 {
7452     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7453     CPUPPCState *env = cs->env_ptr;
7454     uint32_t hflags = ctx->base.tb->flags;
7455 
7456     ctx->spr_cb = env->spr_cb;
7457     ctx->pr = (hflags >> HFLAGS_PR) & 1;
7458     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
7459     ctx->dr = (hflags >> HFLAGS_DR) & 1;
7460     ctx->hv = (hflags >> HFLAGS_HV) & 1;
7461     ctx->insns_flags = env->insns_flags;
7462     ctx->insns_flags2 = env->insns_flags2;
7463     ctx->access_type = -1;
7464     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
7465     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
7466     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
7467     ctx->flags = env->flags;
7468 #if defined(TARGET_PPC64)
7469     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
7470     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7471 #endif
7472     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
7473         || env->mmu_model & POWERPC_MMU_64;
7474 
7475     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
7476     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
7477     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
7478     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
7479     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
7480     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
7481     ctx->hr = (hflags >> HFLAGS_HR) & 1;
7482     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
7483     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
7484     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
7485 
7486     ctx->singlestep_enabled = 0;
7487     if ((hflags >> HFLAGS_SE) & 1) {
7488         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
7489         ctx->base.max_insns = 1;
7490     }
7491     if ((hflags >> HFLAGS_BE) & 1) {
7492         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
7493     }
7494 }
7495 
7496 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
7497 {
7498 }
7499 
7500 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
7501 {
7502     tcg_gen_insn_start(dcbase->pc_next);
7503 }
7504 
7505 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
7506 {
7507     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
7508     return opc1(insn) == 1;
7509 }
7510 
7511 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
7512 {
7513     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7514     PowerPCCPU *cpu = POWERPC_CPU(cs);
7515     CPUPPCState *env = cs->env_ptr;
7516     target_ulong pc;
7517     uint32_t insn;
7518     bool ok;
7519 
7520     LOG_DISAS("----------------\n");
7521     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7522               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
7523 
7524     ctx->cia = pc = ctx->base.pc_next;
7525     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
7526     ctx->base.pc_next = pc += 4;
7527 
7528     if (!is_prefix_insn(ctx, insn)) {
7529         ok = (decode_insn32(ctx, insn) ||
7530               decode_legacy(cpu, ctx, insn));
7531     } else if ((pc & 63) == 0) {
7532         /*
7533          * Power v3.1, section 1.9 Exceptions:
7534          * attempt to execute a prefixed instruction that crosses a
7535          * 64-byte address boundary (system alignment error).
7536          */
7537         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
7538         ok = true;
7539     } else {
7540         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
7541                                              need_byteswap(ctx));
7542         ctx->base.pc_next = pc += 4;
7543         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
7544     }
7545     if (!ok) {
7546         gen_invalid(ctx);
7547     }
7548 
7549     /* End the TB when crossing a page boundary. */
7550     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
7551         ctx->base.is_jmp = DISAS_TOO_MANY;
7552     }
7553 
7554     translator_loop_temp_check(&ctx->base);
7555 }
7556 
7557 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
7558 {
7559     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7560     DisasJumpType is_jmp = ctx->base.is_jmp;
7561     target_ulong nip = ctx->base.pc_next;
7562 
7563     if (is_jmp == DISAS_NORETURN) {
7564         /* We have already exited the TB. */
7565         return;
7566     }
7567 
7568     /* Honor single stepping. */
7569     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)
7570         && (nip <= 0x100 || nip > 0xf00)) {
7571         switch (is_jmp) {
7572         case DISAS_TOO_MANY:
7573         case DISAS_EXIT_UPDATE:
7574         case DISAS_CHAIN_UPDATE:
7575             gen_update_nip(ctx, nip);
7576             break;
7577         case DISAS_EXIT:
7578         case DISAS_CHAIN:
7579             break;
7580         default:
7581             g_assert_not_reached();
7582         }
7583 
7584         gen_debug_exception(ctx);
7585         return;
7586     }
7587 
7588     switch (is_jmp) {
7589     case DISAS_TOO_MANY:
7590         if (use_goto_tb(ctx, nip)) {
7591             pmu_count_insns(ctx);
7592             tcg_gen_goto_tb(0);
7593             gen_update_nip(ctx, nip);
7594             tcg_gen_exit_tb(ctx->base.tb, 0);
7595             break;
7596         }
7597         /* fall through */
7598     case DISAS_CHAIN_UPDATE:
7599         gen_update_nip(ctx, nip);
7600         /* fall through */
7601     case DISAS_CHAIN:
7602         /*
7603          * tcg_gen_lookup_and_goto_ptr will exit the TB if
7604          * CF_NO_GOTO_PTR is set. Count insns now.
7605          */
7606         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
7607             pmu_count_insns(ctx);
7608         }
7609 
7610         tcg_gen_lookup_and_goto_ptr();
7611         break;
7612 
7613     case DISAS_EXIT_UPDATE:
7614         gen_update_nip(ctx, nip);
7615         /* fall through */
7616     case DISAS_EXIT:
7617         pmu_count_insns(ctx);
7618         tcg_gen_exit_tb(NULL, 0);
7619         break;
7620 
7621     default:
7622         g_assert_not_reached();
7623     }
7624 }
7625 
7626 static void ppc_tr_disas_log(const DisasContextBase *dcbase,
7627                              CPUState *cs, FILE *logfile)
7628 {
7629     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
7630     target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size);
7631 }
7632 
7633 static const TranslatorOps ppc_tr_ops = {
7634     .init_disas_context = ppc_tr_init_disas_context,
7635     .tb_start           = ppc_tr_tb_start,
7636     .insn_start         = ppc_tr_insn_start,
7637     .translate_insn     = ppc_tr_translate_insn,
7638     .tb_stop            = ppc_tr_tb_stop,
7639     .disas_log          = ppc_tr_disas_log,
7640 };
7641 
7642 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns)
7643 {
7644     DisasContext ctx;
7645 
7646     translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns);
7647 }
7648 
7649 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb,
7650                           target_ulong *data)
7651 {
7652     env->nip = data[0];
7653 }
7654