xref: /openbmc/qemu/target/ppc/translate.c (revision e025e8f5)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31 
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34 
35 #include "exec/translator.h"
36 #include "exec/log.h"
37 #include "qemu/atomic128.h"
38 #include "spr_common.h"
39 #include "power8-pmu.h"
40 
41 #include "qemu/qemu-print.h"
42 #include "qapi/error.h"
43 
44 #define HELPER_H "helper.h"
45 #include "exec/helper-info.c.inc"
46 #undef  HELPER_H
47 
48 #define CPU_SINGLE_STEP 0x1
49 #define CPU_BRANCH_STEP 0x2
50 
51 /* Include definitions for instructions classes and implementations flags */
52 /* #define PPC_DEBUG_DISAS */
53 
54 #ifdef PPC_DEBUG_DISAS
55 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
56 #else
57 #  define LOG_DISAS(...) do { } while (0)
58 #endif
59 /*****************************************************************************/
60 /* Code translation helpers                                                  */
61 
62 /* global register indexes */
63 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
64                           + 10 * 4 + 22 * 5 /* SPE GPRh */
65                           + 8 * 5           /* CRF */];
66 static TCGv cpu_gpr[32];
67 static TCGv cpu_gprh[32];
68 static TCGv_i32 cpu_crf[8];
69 static TCGv cpu_nip;
70 static TCGv cpu_msr;
71 static TCGv cpu_ctr;
72 static TCGv cpu_lr;
73 #if defined(TARGET_PPC64)
74 static TCGv cpu_cfar;
75 #endif
76 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
77 static TCGv cpu_reserve;
78 static TCGv cpu_reserve_val;
79 static TCGv cpu_reserve_val2;
80 static TCGv cpu_fpscr;
81 static TCGv_i32 cpu_access_type;
82 
83 void ppc_translate_init(void)
84 {
85     int i;
86     char *p;
87     size_t cpu_reg_names_size;
88 
89     p = cpu_reg_names;
90     cpu_reg_names_size = sizeof(cpu_reg_names);
91 
92     for (i = 0; i < 8; i++) {
93         snprintf(p, cpu_reg_names_size, "crf%d", i);
94         cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
95                                             offsetof(CPUPPCState, crf[i]), p);
96         p += 5;
97         cpu_reg_names_size -= 5;
98     }
99 
100     for (i = 0; i < 32; i++) {
101         snprintf(p, cpu_reg_names_size, "r%d", i);
102         cpu_gpr[i] = tcg_global_mem_new(cpu_env,
103                                         offsetof(CPUPPCState, gpr[i]), p);
104         p += (i < 10) ? 3 : 4;
105         cpu_reg_names_size -= (i < 10) ? 3 : 4;
106         snprintf(p, cpu_reg_names_size, "r%dH", i);
107         cpu_gprh[i] = tcg_global_mem_new(cpu_env,
108                                          offsetof(CPUPPCState, gprh[i]), p);
109         p += (i < 10) ? 4 : 5;
110         cpu_reg_names_size -= (i < 10) ? 4 : 5;
111     }
112 
113     cpu_nip = tcg_global_mem_new(cpu_env,
114                                  offsetof(CPUPPCState, nip), "nip");
115 
116     cpu_msr = tcg_global_mem_new(cpu_env,
117                                  offsetof(CPUPPCState, msr), "msr");
118 
119     cpu_ctr = tcg_global_mem_new(cpu_env,
120                                  offsetof(CPUPPCState, ctr), "ctr");
121 
122     cpu_lr = tcg_global_mem_new(cpu_env,
123                                 offsetof(CPUPPCState, lr), "lr");
124 
125 #if defined(TARGET_PPC64)
126     cpu_cfar = tcg_global_mem_new(cpu_env,
127                                   offsetof(CPUPPCState, cfar), "cfar");
128 #endif
129 
130     cpu_xer = tcg_global_mem_new(cpu_env,
131                                  offsetof(CPUPPCState, xer), "xer");
132     cpu_so = tcg_global_mem_new(cpu_env,
133                                 offsetof(CPUPPCState, so), "SO");
134     cpu_ov = tcg_global_mem_new(cpu_env,
135                                 offsetof(CPUPPCState, ov), "OV");
136     cpu_ca = tcg_global_mem_new(cpu_env,
137                                 offsetof(CPUPPCState, ca), "CA");
138     cpu_ov32 = tcg_global_mem_new(cpu_env,
139                                   offsetof(CPUPPCState, ov32), "OV32");
140     cpu_ca32 = tcg_global_mem_new(cpu_env,
141                                   offsetof(CPUPPCState, ca32), "CA32");
142 
143     cpu_reserve = tcg_global_mem_new(cpu_env,
144                                      offsetof(CPUPPCState, reserve_addr),
145                                      "reserve_addr");
146     cpu_reserve_val = tcg_global_mem_new(cpu_env,
147                                          offsetof(CPUPPCState, reserve_val),
148                                          "reserve_val");
149     cpu_reserve_val2 = tcg_global_mem_new(cpu_env,
150                                           offsetof(CPUPPCState, reserve_val2),
151                                           "reserve_val2");
152 
153     cpu_fpscr = tcg_global_mem_new(cpu_env,
154                                    offsetof(CPUPPCState, fpscr), "fpscr");
155 
156     cpu_access_type = tcg_global_mem_new_i32(cpu_env,
157                                              offsetof(CPUPPCState, access_type),
158                                              "access_type");
159 }
160 
161 /* internal defines */
162 struct DisasContext {
163     DisasContextBase base;
164     target_ulong cia;  /* current instruction address */
165     uint32_t opcode;
166     /* Routine used to access memory */
167     bool pr, hv, dr, le_mode;
168     bool lazy_tlb_flush;
169     bool need_access_type;
170     int mem_idx;
171     int access_type;
172     /* Translation flags */
173     MemOp default_tcg_memop_mask;
174 #if defined(TARGET_PPC64)
175     bool sf_mode;
176     bool has_cfar;
177 #endif
178     bool fpu_enabled;
179     bool altivec_enabled;
180     bool vsx_enabled;
181     bool spe_enabled;
182     bool tm_enabled;
183     bool gtse;
184     bool hr;
185     bool mmcr0_pmcc0;
186     bool mmcr0_pmcc1;
187     bool mmcr0_pmcjce;
188     bool pmc_other;
189     bool pmu_insn_cnt;
190     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
191     int singlestep_enabled;
192     uint32_t flags;
193     uint64_t insns_flags;
194     uint64_t insns_flags2;
195 };
196 
197 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
198 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
199 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
200 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
201 
202 /* Return true iff byteswap is needed in a scalar memop */
203 static inline bool need_byteswap(const DisasContext *ctx)
204 {
205 #if TARGET_BIG_ENDIAN
206      return ctx->le_mode;
207 #else
208      return !ctx->le_mode;
209 #endif
210 }
211 
212 /* True when active word size < size of target_long.  */
213 #ifdef TARGET_PPC64
214 # define NARROW_MODE(C)  (!(C)->sf_mode)
215 #else
216 # define NARROW_MODE(C)  0
217 #endif
218 
219 struct opc_handler_t {
220     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
221     uint32_t inval1;
222     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
223     uint32_t inval2;
224     /* instruction type */
225     uint64_t type;
226     /* extended instruction type */
227     uint64_t type2;
228     /* handler */
229     void (*handler)(DisasContext *ctx);
230 };
231 
232 /* SPR load/store helpers */
233 static inline void gen_load_spr(TCGv t, int reg)
234 {
235     tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
236 }
237 
238 static inline void gen_store_spr(int reg, TCGv t)
239 {
240     tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
241 }
242 
243 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
244 {
245     if (ctx->need_access_type && ctx->access_type != access_type) {
246         tcg_gen_movi_i32(cpu_access_type, access_type);
247         ctx->access_type = access_type;
248     }
249 }
250 
251 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
252 {
253     if (NARROW_MODE(ctx)) {
254         nip = (uint32_t)nip;
255     }
256     tcg_gen_movi_tl(cpu_nip, nip);
257 }
258 
259 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
260 {
261     TCGv_i32 t0, t1;
262 
263     /*
264      * These are all synchronous exceptions, we set the PC back to the
265      * faulting instruction
266      */
267     gen_update_nip(ctx, ctx->cia);
268     t0 = tcg_constant_i32(excp);
269     t1 = tcg_constant_i32(error);
270     gen_helper_raise_exception_err(cpu_env, t0, t1);
271     ctx->base.is_jmp = DISAS_NORETURN;
272 }
273 
274 static void gen_exception(DisasContext *ctx, uint32_t excp)
275 {
276     TCGv_i32 t0;
277 
278     /*
279      * These are all synchronous exceptions, we set the PC back to the
280      * faulting instruction
281      */
282     gen_update_nip(ctx, ctx->cia);
283     t0 = tcg_constant_i32(excp);
284     gen_helper_raise_exception(cpu_env, t0);
285     ctx->base.is_jmp = DISAS_NORETURN;
286 }
287 
288 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
289                               target_ulong nip)
290 {
291     TCGv_i32 t0;
292 
293     gen_update_nip(ctx, nip);
294     t0 = tcg_constant_i32(excp);
295     gen_helper_raise_exception(cpu_env, t0);
296     ctx->base.is_jmp = DISAS_NORETURN;
297 }
298 
299 #if !defined(CONFIG_USER_ONLY)
300 static void gen_ppc_maybe_interrupt(DisasContext *ctx)
301 {
302     translator_io_start(&ctx->base);
303     gen_helper_ppc_maybe_interrupt(cpu_env);
304 }
305 #endif
306 
307 /*
308  * Tells the caller what is the appropriate exception to generate and prepares
309  * SPR registers for this exception.
310  *
311  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
312  * POWERPC_EXCP_DEBUG (on BookE).
313  */
314 static uint32_t gen_prep_dbgex(DisasContext *ctx)
315 {
316     if (ctx->flags & POWERPC_FLAG_DE) {
317         target_ulong dbsr = 0;
318         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
319             dbsr = DBCR0_ICMP;
320         } else {
321             /* Must have been branch */
322             dbsr = DBCR0_BRT;
323         }
324         TCGv t0 = tcg_temp_new();
325         gen_load_spr(t0, SPR_BOOKE_DBSR);
326         tcg_gen_ori_tl(t0, t0, dbsr);
327         gen_store_spr(SPR_BOOKE_DBSR, t0);
328         return POWERPC_EXCP_DEBUG;
329     } else {
330         return POWERPC_EXCP_TRACE;
331     }
332 }
333 
334 static void gen_debug_exception(DisasContext *ctx)
335 {
336     gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx)));
337     ctx->base.is_jmp = DISAS_NORETURN;
338 }
339 
340 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
341 {
342     /* Will be converted to program check if needed */
343     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
344 }
345 
346 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
347 {
348     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
349 }
350 
351 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
352 {
353     /* Will be converted to program check if needed */
354     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
355 }
356 
357 /*****************************************************************************/
358 /* SPR READ/WRITE CALLBACKS */
359 
360 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
361 {
362 #if 0
363     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
364     printf("ERROR: try to access SPR %d !\n", sprn);
365 #endif
366 }
367 
368 /* #define PPC_DUMP_SPR_ACCESSES */
369 
370 /*
371  * Generic callbacks:
372  * do nothing but store/retrieve spr value
373  */
374 static void spr_load_dump_spr(int sprn)
375 {
376 #ifdef PPC_DUMP_SPR_ACCESSES
377     TCGv_i32 t0 = tcg_constant_i32(sprn);
378     gen_helper_load_dump_spr(cpu_env, t0);
379 #endif
380 }
381 
382 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
383 {
384     gen_load_spr(cpu_gpr[gprn], sprn);
385     spr_load_dump_spr(sprn);
386 }
387 
388 static void spr_store_dump_spr(int sprn)
389 {
390 #ifdef PPC_DUMP_SPR_ACCESSES
391     TCGv_i32 t0 = tcg_constant_i32(sprn);
392     gen_helper_store_dump_spr(cpu_env, t0);
393 #endif
394 }
395 
396 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
397 {
398     gen_store_spr(sprn, cpu_gpr[gprn]);
399     spr_store_dump_spr(sprn);
400 }
401 
402 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
403 {
404 #ifdef TARGET_PPC64
405     TCGv t0 = tcg_temp_new();
406     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
407     gen_store_spr(sprn, t0);
408     spr_store_dump_spr(sprn);
409 #else
410     spr_write_generic(ctx, sprn, gprn);
411 #endif
412 }
413 
414 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
415 {
416     spr_write_generic32(ctx, sprn, gprn);
417 
418     /*
419      * SPR_CTRL writes must force a new translation block,
420      * allowing the PMU to calculate the run latch events with
421      * more accuracy.
422      */
423     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
424 }
425 
426 #if !defined(CONFIG_USER_ONLY)
427 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
428 {
429     TCGv t0 = tcg_temp_new();
430     TCGv t1 = tcg_temp_new();
431     gen_load_spr(t0, sprn);
432     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
433     tcg_gen_and_tl(t0, t0, t1);
434     gen_store_spr(sprn, t0);
435 }
436 
437 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
438 {
439 }
440 
441 #endif
442 
443 /* SPR common to all PowerPC */
444 /* XER */
445 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
446 {
447     TCGv dst = cpu_gpr[gprn];
448     TCGv t0 = tcg_temp_new();
449     TCGv t1 = tcg_temp_new();
450     TCGv t2 = tcg_temp_new();
451     tcg_gen_mov_tl(dst, cpu_xer);
452     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
453     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
454     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
455     tcg_gen_or_tl(t0, t0, t1);
456     tcg_gen_or_tl(dst, dst, t2);
457     tcg_gen_or_tl(dst, dst, t0);
458     if (is_isa300(ctx)) {
459         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
460         tcg_gen_or_tl(dst, dst, t0);
461         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
462         tcg_gen_or_tl(dst, dst, t0);
463     }
464 }
465 
466 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
467 {
468     TCGv src = cpu_gpr[gprn];
469     /* Write all flags, while reading back check for isa300 */
470     tcg_gen_andi_tl(cpu_xer, src,
471                     ~((1u << XER_SO) |
472                       (1u << XER_OV) | (1u << XER_OV32) |
473                       (1u << XER_CA) | (1u << XER_CA32)));
474     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
475     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
476     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
477     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
478     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
479 }
480 
481 /* LR */
482 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
483 {
484     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
485 }
486 
487 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
488 {
489     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
490 }
491 
492 /* CFAR */
493 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
494 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
495 {
496     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
497 }
498 
499 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
500 {
501     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
502 }
503 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
504 
505 /* CTR */
506 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
507 {
508     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
509 }
510 
511 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
512 {
513     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
514 }
515 
516 /* User read access to SPR */
517 /* USPRx */
518 /* UMMCRx */
519 /* UPMCx */
520 /* USIA */
521 /* UDECR */
522 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
523 {
524     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
525 }
526 
527 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
528 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
529 {
530     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
531 }
532 #endif
533 
534 /* SPR common to all non-embedded PowerPC */
535 /* DECR */
536 #if !defined(CONFIG_USER_ONLY)
537 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
538 {
539     translator_io_start(&ctx->base);
540     gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
541 }
542 
543 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
544 {
545     translator_io_start(&ctx->base);
546     gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
547 }
548 #endif
549 
550 /* SPR common to all non-embedded PowerPC, except 601 */
551 /* Time base */
552 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
553 {
554     translator_io_start(&ctx->base);
555     gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
556 }
557 
558 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
559 {
560     translator_io_start(&ctx->base);
561     gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
562 }
563 
564 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
565 {
566     gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
567 }
568 
569 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
570 {
571     gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
572 }
573 
574 #if !defined(CONFIG_USER_ONLY)
575 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
576 {
577     translator_io_start(&ctx->base);
578     gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
579 }
580 
581 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
582 {
583     translator_io_start(&ctx->base);
584     gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
585 }
586 
587 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
588 {
589     gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
590 }
591 
592 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
593 {
594     gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
595 }
596 
597 #if defined(TARGET_PPC64)
598 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
599 {
600     translator_io_start(&ctx->base);
601     gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
602 }
603 
604 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
605 {
606     translator_io_start(&ctx->base);
607     gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
608 }
609 
610 /* HDECR */
611 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
612 {
613     translator_io_start(&ctx->base);
614     gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
615 }
616 
617 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
618 {
619     translator_io_start(&ctx->base);
620     gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
621 }
622 
623 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
624 {
625     translator_io_start(&ctx->base);
626     gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
627 }
628 
629 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
630 {
631     translator_io_start(&ctx->base);
632     gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
633 }
634 
635 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
636 {
637     translator_io_start(&ctx->base);
638     gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
639 }
640 
641 #endif
642 #endif
643 
644 #if !defined(CONFIG_USER_ONLY)
645 /* IBAT0U...IBAT0U */
646 /* IBAT0L...IBAT7L */
647 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
648 {
649     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
650                   offsetof(CPUPPCState,
651                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
652 }
653 
654 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
655 {
656     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
657                   offsetof(CPUPPCState,
658                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
659 }
660 
661 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
662 {
663     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0U) / 2);
664     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
665 }
666 
667 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
668 {
669     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4U) / 2) + 4);
670     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
671 }
672 
673 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
674 {
675     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0L) / 2);
676     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
677 }
678 
679 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
680 {
681     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4L) / 2) + 4);
682     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
683 }
684 
685 /* DBAT0U...DBAT7U */
686 /* DBAT0L...DBAT7L */
687 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
688 {
689     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
690                   offsetof(CPUPPCState,
691                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
692 }
693 
694 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
695 {
696     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
697                   offsetof(CPUPPCState,
698                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
699 }
700 
701 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
702 {
703     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0U) / 2);
704     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
705 }
706 
707 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
708 {
709     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4U) / 2) + 4);
710     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
711 }
712 
713 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
714 {
715     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0L) / 2);
716     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
717 }
718 
719 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
720 {
721     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4L) / 2) + 4);
722     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
723 }
724 
725 /* SDR1 */
726 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
727 {
728     gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
729 }
730 
731 #if defined(TARGET_PPC64)
732 /* 64 bits PowerPC specific SPRs */
733 /* PIDR */
734 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
735 {
736     gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
737 }
738 
739 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
740 {
741     gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
742 }
743 
744 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
745 {
746     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
747 }
748 
749 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
750 {
751     TCGv t0 = tcg_temp_new();
752     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
753     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
754 }
755 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
756 {
757     gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
758 }
759 
760 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
761 {
762     gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
763 }
764 
765 /* DPDES */
766 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
767 {
768     gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
769 }
770 
771 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
772 {
773     gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
774 }
775 #endif
776 #endif
777 
778 /* PowerPC 40x specific registers */
779 #if !defined(CONFIG_USER_ONLY)
780 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
781 {
782     translator_io_start(&ctx->base);
783     gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
784 }
785 
786 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
787 {
788     translator_io_start(&ctx->base);
789     gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
790 }
791 
792 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
793 {
794     translator_io_start(&ctx->base);
795     gen_store_spr(sprn, cpu_gpr[gprn]);
796     gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
797     /* We must stop translation as we may have rebooted */
798     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
799 }
800 
801 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
802 {
803     translator_io_start(&ctx->base);
804     gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
805 }
806 
807 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
808 {
809     translator_io_start(&ctx->base);
810     gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]);
811 }
812 
813 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
814 {
815     translator_io_start(&ctx->base);
816     gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]);
817 }
818 
819 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
820 {
821     TCGv t0 = tcg_temp_new();
822     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
823     gen_helper_store_40x_pid(cpu_env, t0);
824 }
825 
826 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
827 {
828     translator_io_start(&ctx->base);
829     gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
830 }
831 
832 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
833 {
834     translator_io_start(&ctx->base);
835     gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
836 }
837 #endif
838 
839 /* PIR */
840 #if !defined(CONFIG_USER_ONLY)
841 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
842 {
843     TCGv t0 = tcg_temp_new();
844     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
845     gen_store_spr(SPR_PIR, t0);
846 }
847 #endif
848 
849 /* SPE specific registers */
850 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
851 {
852     TCGv_i32 t0 = tcg_temp_new_i32();
853     tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
854     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
855 }
856 
857 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
858 {
859     TCGv_i32 t0 = tcg_temp_new_i32();
860     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
861     tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
862 }
863 
864 #if !defined(CONFIG_USER_ONLY)
865 /* Callback used to write the exception vector base */
866 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
867 {
868     TCGv t0 = tcg_temp_new();
869     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
870     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
871     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
872     gen_store_spr(sprn, t0);
873 }
874 
875 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
876 {
877     int sprn_offs;
878 
879     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
880         sprn_offs = sprn - SPR_BOOKE_IVOR0;
881     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
882         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
883     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
884         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
885     } else {
886         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
887                       " vector 0x%03x\n", sprn);
888         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
889         return;
890     }
891 
892     TCGv t0 = tcg_temp_new();
893     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
894     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
895     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
896     gen_store_spr(sprn, t0);
897 }
898 #endif
899 
900 #ifdef TARGET_PPC64
901 #ifndef CONFIG_USER_ONLY
902 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
903 {
904     TCGv t0 = tcg_temp_new();
905     TCGv t1 = tcg_temp_new();
906     TCGv t2 = tcg_temp_new();
907 
908     /*
909      * Note, the HV=1 PR=0 case is handled earlier by simply using
910      * spr_write_generic for HV mode in the SPR table
911      */
912 
913     /* Build insertion mask into t1 based on context */
914     if (ctx->pr) {
915         gen_load_spr(t1, SPR_UAMOR);
916     } else {
917         gen_load_spr(t1, SPR_AMOR);
918     }
919 
920     /* Mask new bits into t2 */
921     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
922 
923     /* Load AMR and clear new bits in t0 */
924     gen_load_spr(t0, SPR_AMR);
925     tcg_gen_andc_tl(t0, t0, t1);
926 
927     /* Or'in new bits and write it out */
928     tcg_gen_or_tl(t0, t0, t2);
929     gen_store_spr(SPR_AMR, t0);
930     spr_store_dump_spr(SPR_AMR);
931 }
932 
933 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
934 {
935     TCGv t0 = tcg_temp_new();
936     TCGv t1 = tcg_temp_new();
937     TCGv t2 = tcg_temp_new();
938 
939     /*
940      * Note, the HV=1 case is handled earlier by simply using
941      * spr_write_generic for HV mode in the SPR table
942      */
943 
944     /* Build insertion mask into t1 based on context */
945     gen_load_spr(t1, SPR_AMOR);
946 
947     /* Mask new bits into t2 */
948     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
949 
950     /* Load AMR and clear new bits in t0 */
951     gen_load_spr(t0, SPR_UAMOR);
952     tcg_gen_andc_tl(t0, t0, t1);
953 
954     /* Or'in new bits and write it out */
955     tcg_gen_or_tl(t0, t0, t2);
956     gen_store_spr(SPR_UAMOR, t0);
957     spr_store_dump_spr(SPR_UAMOR);
958 }
959 
960 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
961 {
962     TCGv t0 = tcg_temp_new();
963     TCGv t1 = tcg_temp_new();
964     TCGv t2 = tcg_temp_new();
965 
966     /*
967      * Note, the HV=1 case is handled earlier by simply using
968      * spr_write_generic for HV mode in the SPR table
969      */
970 
971     /* Build insertion mask into t1 based on context */
972     gen_load_spr(t1, SPR_AMOR);
973 
974     /* Mask new bits into t2 */
975     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
976 
977     /* Load AMR and clear new bits in t0 */
978     gen_load_spr(t0, SPR_IAMR);
979     tcg_gen_andc_tl(t0, t0, t1);
980 
981     /* Or'in new bits and write it out */
982     tcg_gen_or_tl(t0, t0, t2);
983     gen_store_spr(SPR_IAMR, t0);
984     spr_store_dump_spr(SPR_IAMR);
985 }
986 #endif
987 #endif
988 
989 #ifndef CONFIG_USER_ONLY
990 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
991 {
992     gen_helper_fixup_thrm(cpu_env);
993     gen_load_spr(cpu_gpr[gprn], sprn);
994     spr_load_dump_spr(sprn);
995 }
996 #endif /* !CONFIG_USER_ONLY */
997 
998 #if !defined(CONFIG_USER_ONLY)
999 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1000 {
1001     TCGv t0 = tcg_temp_new();
1002 
1003     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1004     gen_store_spr(sprn, t0);
1005 }
1006 
1007 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1008 {
1009     TCGv t0 = tcg_temp_new();
1010 
1011     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1012     gen_store_spr(sprn, t0);
1013 }
1014 
1015 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1016 {
1017     TCGv t0 = tcg_temp_new();
1018 
1019     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1020                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1021     gen_store_spr(sprn, t0);
1022 }
1023 
1024 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1025 {
1026     gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1027 }
1028 
1029 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1030 {
1031     TCGv_i32 t0 = tcg_constant_i32(sprn);
1032     gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1033 }
1034 
1035 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1036 {
1037     gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1038 }
1039 
1040 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1041 {
1042     gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1043 }
1044 
1045 #endif
1046 
1047 #if !defined(CONFIG_USER_ONLY)
1048 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1049 {
1050     TCGv val = tcg_temp_new();
1051     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1052     gen_store_spr(SPR_BOOKE_MAS3, val);
1053     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1054     gen_store_spr(SPR_BOOKE_MAS7, val);
1055 }
1056 
1057 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1058 {
1059     TCGv mas7 = tcg_temp_new();
1060     TCGv mas3 = tcg_temp_new();
1061     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1062     tcg_gen_shli_tl(mas7, mas7, 32);
1063     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1064     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1065 }
1066 
1067 #endif
1068 
1069 #ifdef TARGET_PPC64
1070 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1071                                     int bit, int sprn, int cause)
1072 {
1073     TCGv_i32 t1 = tcg_constant_i32(bit);
1074     TCGv_i32 t2 = tcg_constant_i32(sprn);
1075     TCGv_i32 t3 = tcg_constant_i32(cause);
1076 
1077     gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1078 }
1079 
1080 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1081                                    int bit, int sprn, int cause)
1082 {
1083     TCGv_i32 t1 = tcg_constant_i32(bit);
1084     TCGv_i32 t2 = tcg_constant_i32(sprn);
1085     TCGv_i32 t3 = tcg_constant_i32(cause);
1086 
1087     gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1088 }
1089 
1090 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1091 {
1092     TCGv spr_up = tcg_temp_new();
1093     TCGv spr = tcg_temp_new();
1094 
1095     gen_load_spr(spr, sprn - 1);
1096     tcg_gen_shri_tl(spr_up, spr, 32);
1097     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1098 }
1099 
1100 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1101 {
1102     TCGv spr = tcg_temp_new();
1103 
1104     gen_load_spr(spr, sprn - 1);
1105     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1106     gen_store_spr(sprn - 1, spr);
1107 }
1108 
1109 #if !defined(CONFIG_USER_ONLY)
1110 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1111 {
1112     TCGv hmer = tcg_temp_new();
1113 
1114     gen_load_spr(hmer, sprn);
1115     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1116     gen_store_spr(sprn, hmer);
1117     spr_store_dump_spr(sprn);
1118 }
1119 
1120 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1121 {
1122     gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1123 }
1124 #endif /* !defined(CONFIG_USER_ONLY) */
1125 
1126 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1127 {
1128     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1129     spr_read_generic(ctx, gprn, sprn);
1130 }
1131 
1132 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1133 {
1134     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1135     spr_write_generic(ctx, sprn, gprn);
1136 }
1137 
1138 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1139 {
1140     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1141     spr_read_generic(ctx, gprn, sprn);
1142 }
1143 
1144 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1145 {
1146     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1147     spr_write_generic(ctx, sprn, gprn);
1148 }
1149 
1150 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1151 {
1152     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1153     spr_read_prev_upper32(ctx, gprn, sprn);
1154 }
1155 
1156 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1157 {
1158     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1159     spr_write_prev_upper32(ctx, sprn, gprn);
1160 }
1161 
1162 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1163 {
1164     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1165     spr_read_generic(ctx, gprn, sprn);
1166 }
1167 
1168 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1169 {
1170     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1171     spr_write_generic(ctx, sprn, gprn);
1172 }
1173 
1174 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1175 {
1176     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1177     spr_read_prev_upper32(ctx, gprn, sprn);
1178 }
1179 
1180 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1181 {
1182     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1183     spr_write_prev_upper32(ctx, sprn, gprn);
1184 }
1185 
1186 void spr_read_dexcr_ureg(DisasContext *ctx, int gprn, int sprn)
1187 {
1188     TCGv t0 = tcg_temp_new();
1189 
1190     /*
1191      * Access to the (H)DEXCR in problem state is done using separated
1192      * SPR indexes which are 16 below the SPR indexes which have full
1193      * access to the (H)DEXCR in privileged state. Problem state can
1194      * only read bits 32:63, bits 0:31 return 0.
1195      *
1196      * See section 9.3.1-9.3.2 of PowerISA v3.1B
1197      */
1198 
1199     gen_load_spr(t0, sprn + 16);
1200     tcg_gen_ext32u_tl(cpu_gpr[gprn], t0);
1201 }
1202 #endif
1203 
1204 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1205 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1206 
1207 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1208 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1209 
1210 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1211 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1212 
1213 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1214 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1215 
1216 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1217 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1218 
1219 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1220 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1221 
1222 typedef struct opcode_t {
1223     unsigned char opc1, opc2, opc3, opc4;
1224 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1225     unsigned char pad[4];
1226 #endif
1227     opc_handler_t handler;
1228     const char *oname;
1229 } opcode_t;
1230 
1231 static void gen_priv_opc(DisasContext *ctx)
1232 {
1233     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1234 }
1235 
1236 /* Helpers for priv. check */
1237 #define GEN_PRIV(CTX)              \
1238     do {                           \
1239         gen_priv_opc(CTX); return; \
1240     } while (0)
1241 
1242 #if defined(CONFIG_USER_ONLY)
1243 #define CHK_HV(CTX) GEN_PRIV(CTX)
1244 #define CHK_SV(CTX) GEN_PRIV(CTX)
1245 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1246 #else
1247 #define CHK_HV(CTX)                         \
1248     do {                                    \
1249         if (unlikely(ctx->pr || !ctx->hv)) {\
1250             GEN_PRIV(CTX);                  \
1251         }                                   \
1252     } while (0)
1253 #define CHK_SV(CTX)              \
1254     do {                         \
1255         if (unlikely(ctx->pr)) { \
1256             GEN_PRIV(CTX);       \
1257         }                        \
1258     } while (0)
1259 #define CHK_HVRM(CTX)                                   \
1260     do {                                                \
1261         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1262             GEN_PRIV(CTX);                              \
1263         }                                               \
1264     } while (0)
1265 #endif
1266 
1267 #define CHK_NONE(CTX)
1268 
1269 /*****************************************************************************/
1270 /* PowerPC instructions table                                                */
1271 
1272 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1273 {                                                                             \
1274     .opc1 = op1,                                                              \
1275     .opc2 = op2,                                                              \
1276     .opc3 = op3,                                                              \
1277     .opc4 = 0xff,                                                             \
1278     .handler = {                                                              \
1279         .inval1  = invl,                                                      \
1280         .type = _typ,                                                         \
1281         .type2 = _typ2,                                                       \
1282         .handler = &gen_##name,                                               \
1283     },                                                                        \
1284     .oname = stringify(name),                                                 \
1285 }
1286 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1287 {                                                                             \
1288     .opc1 = op1,                                                              \
1289     .opc2 = op2,                                                              \
1290     .opc3 = op3,                                                              \
1291     .opc4 = 0xff,                                                             \
1292     .handler = {                                                              \
1293         .inval1  = invl1,                                                     \
1294         .inval2  = invl2,                                                     \
1295         .type = _typ,                                                         \
1296         .type2 = _typ2,                                                       \
1297         .handler = &gen_##name,                                               \
1298     },                                                                        \
1299     .oname = stringify(name),                                                 \
1300 }
1301 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1302 {                                                                             \
1303     .opc1 = op1,                                                              \
1304     .opc2 = op2,                                                              \
1305     .opc3 = op3,                                                              \
1306     .opc4 = 0xff,                                                             \
1307     .handler = {                                                              \
1308         .inval1  = invl,                                                      \
1309         .type = _typ,                                                         \
1310         .type2 = _typ2,                                                       \
1311         .handler = &gen_##name,                                               \
1312     },                                                                        \
1313     .oname = onam,                                                            \
1314 }
1315 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1316 {                                                                             \
1317     .opc1 = op1,                                                              \
1318     .opc2 = op2,                                                              \
1319     .opc3 = op3,                                                              \
1320     .opc4 = op4,                                                              \
1321     .handler = {                                                              \
1322         .inval1  = invl,                                                      \
1323         .type = _typ,                                                         \
1324         .type2 = _typ2,                                                       \
1325         .handler = &gen_##name,                                               \
1326     },                                                                        \
1327     .oname = stringify(name),                                                 \
1328 }
1329 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1330 {                                                                             \
1331     .opc1 = op1,                                                              \
1332     .opc2 = op2,                                                              \
1333     .opc3 = op3,                                                              \
1334     .opc4 = op4,                                                              \
1335     .handler = {                                                              \
1336         .inval1  = invl,                                                      \
1337         .type = _typ,                                                         \
1338         .type2 = _typ2,                                                       \
1339         .handler = &gen_##name,                                               \
1340     },                                                                        \
1341     .oname = onam,                                                            \
1342 }
1343 
1344 /* Invalid instruction */
1345 static void gen_invalid(DisasContext *ctx)
1346 {
1347     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1348 }
1349 
1350 static opc_handler_t invalid_handler = {
1351     .inval1  = 0xFFFFFFFF,
1352     .inval2  = 0xFFFFFFFF,
1353     .type    = PPC_NONE,
1354     .type2   = PPC_NONE,
1355     .handler = gen_invalid,
1356 };
1357 
1358 /***                           Integer comparison                          ***/
1359 
1360 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1361 {
1362     TCGv t0 = tcg_temp_new();
1363     TCGv t1 = tcg_temp_new();
1364     TCGv_i32 t = tcg_temp_new_i32();
1365 
1366     tcg_gen_movi_tl(t0, CRF_EQ);
1367     tcg_gen_movi_tl(t1, CRF_LT);
1368     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1369                        t0, arg0, arg1, t1, t0);
1370     tcg_gen_movi_tl(t1, CRF_GT);
1371     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1372                        t0, arg0, arg1, t1, t0);
1373 
1374     tcg_gen_trunc_tl_i32(t, t0);
1375     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1376     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1377 }
1378 
1379 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1380 {
1381     TCGv t0 = tcg_constant_tl(arg1);
1382     gen_op_cmp(arg0, t0, s, crf);
1383 }
1384 
1385 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1386 {
1387     TCGv t0, t1;
1388     t0 = tcg_temp_new();
1389     t1 = tcg_temp_new();
1390     if (s) {
1391         tcg_gen_ext32s_tl(t0, arg0);
1392         tcg_gen_ext32s_tl(t1, arg1);
1393     } else {
1394         tcg_gen_ext32u_tl(t0, arg0);
1395         tcg_gen_ext32u_tl(t1, arg1);
1396     }
1397     gen_op_cmp(t0, t1, s, crf);
1398 }
1399 
1400 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1401 {
1402     TCGv t0 = tcg_constant_tl(arg1);
1403     gen_op_cmp32(arg0, t0, s, crf);
1404 }
1405 
1406 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1407 {
1408     if (NARROW_MODE(ctx)) {
1409         gen_op_cmpi32(reg, 0, 1, 0);
1410     } else {
1411         gen_op_cmpi(reg, 0, 1, 0);
1412     }
1413 }
1414 
1415 /* cmprb - range comparison: isupper, isaplha, islower*/
1416 static void gen_cmprb(DisasContext *ctx)
1417 {
1418     TCGv_i32 src1 = tcg_temp_new_i32();
1419     TCGv_i32 src2 = tcg_temp_new_i32();
1420     TCGv_i32 src2lo = tcg_temp_new_i32();
1421     TCGv_i32 src2hi = tcg_temp_new_i32();
1422     TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1423 
1424     tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1425     tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1426 
1427     tcg_gen_andi_i32(src1, src1, 0xFF);
1428     tcg_gen_ext8u_i32(src2lo, src2);
1429     tcg_gen_shri_i32(src2, src2, 8);
1430     tcg_gen_ext8u_i32(src2hi, src2);
1431 
1432     tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1433     tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1434     tcg_gen_and_i32(crf, src2lo, src2hi);
1435 
1436     if (ctx->opcode & 0x00200000) {
1437         tcg_gen_shri_i32(src2, src2, 8);
1438         tcg_gen_ext8u_i32(src2lo, src2);
1439         tcg_gen_shri_i32(src2, src2, 8);
1440         tcg_gen_ext8u_i32(src2hi, src2);
1441         tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1442         tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1443         tcg_gen_and_i32(src2lo, src2lo, src2hi);
1444         tcg_gen_or_i32(crf, crf, src2lo);
1445     }
1446     tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1447 }
1448 
1449 #if defined(TARGET_PPC64)
1450 /* cmpeqb */
1451 static void gen_cmpeqb(DisasContext *ctx)
1452 {
1453     gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1454                       cpu_gpr[rB(ctx->opcode)]);
1455 }
1456 #endif
1457 
1458 /* isel (PowerPC 2.03 specification) */
1459 static void gen_isel(DisasContext *ctx)
1460 {
1461     uint32_t bi = rC(ctx->opcode);
1462     uint32_t mask = 0x08 >> (bi & 0x03);
1463     TCGv t0 = tcg_temp_new();
1464     TCGv zr;
1465 
1466     tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1467     tcg_gen_andi_tl(t0, t0, mask);
1468 
1469     zr = tcg_constant_tl(0);
1470     tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1471                        rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1472                        cpu_gpr[rB(ctx->opcode)]);
1473 }
1474 
1475 /* cmpb: PowerPC 2.05 specification */
1476 static void gen_cmpb(DisasContext *ctx)
1477 {
1478     gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1479                     cpu_gpr[rB(ctx->opcode)]);
1480 }
1481 
1482 /***                           Integer arithmetic                          ***/
1483 
1484 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1485                                            TCGv arg1, TCGv arg2, int sub)
1486 {
1487     TCGv t0 = tcg_temp_new();
1488 
1489     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1490     tcg_gen_xor_tl(t0, arg1, arg2);
1491     if (sub) {
1492         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1493     } else {
1494         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1495     }
1496     if (NARROW_MODE(ctx)) {
1497         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1498         if (is_isa300(ctx)) {
1499             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1500         }
1501     } else {
1502         if (is_isa300(ctx)) {
1503             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1504         }
1505         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1506     }
1507     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1508 }
1509 
1510 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1511                                              TCGv res, TCGv arg0, TCGv arg1,
1512                                              TCGv ca32, int sub)
1513 {
1514     TCGv t0;
1515 
1516     if (!is_isa300(ctx)) {
1517         return;
1518     }
1519 
1520     t0 = tcg_temp_new();
1521     if (sub) {
1522         tcg_gen_eqv_tl(t0, arg0, arg1);
1523     } else {
1524         tcg_gen_xor_tl(t0, arg0, arg1);
1525     }
1526     tcg_gen_xor_tl(t0, t0, res);
1527     tcg_gen_extract_tl(ca32, t0, 32, 1);
1528 }
1529 
1530 /* Common add function */
1531 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1532                                     TCGv arg2, TCGv ca, TCGv ca32,
1533                                     bool add_ca, bool compute_ca,
1534                                     bool compute_ov, bool compute_rc0)
1535 {
1536     TCGv t0 = ret;
1537 
1538     if (compute_ca || compute_ov) {
1539         t0 = tcg_temp_new();
1540     }
1541 
1542     if (compute_ca) {
1543         if (NARROW_MODE(ctx)) {
1544             /*
1545              * Caution: a non-obvious corner case of the spec is that
1546              * we must produce the *entire* 64-bit addition, but
1547              * produce the carry into bit 32.
1548              */
1549             TCGv t1 = tcg_temp_new();
1550             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1551             tcg_gen_add_tl(t0, arg1, arg2);
1552             if (add_ca) {
1553                 tcg_gen_add_tl(t0, t0, ca);
1554             }
1555             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1556             tcg_gen_extract_tl(ca, ca, 32, 1);
1557             if (is_isa300(ctx)) {
1558                 tcg_gen_mov_tl(ca32, ca);
1559             }
1560         } else {
1561             TCGv zero = tcg_constant_tl(0);
1562             if (add_ca) {
1563                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1564                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1565             } else {
1566                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1567             }
1568             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1569         }
1570     } else {
1571         tcg_gen_add_tl(t0, arg1, arg2);
1572         if (add_ca) {
1573             tcg_gen_add_tl(t0, t0, ca);
1574         }
1575     }
1576 
1577     if (compute_ov) {
1578         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1579     }
1580     if (unlikely(compute_rc0)) {
1581         gen_set_Rc0(ctx, t0);
1582     }
1583 
1584     if (t0 != ret) {
1585         tcg_gen_mov_tl(ret, t0);
1586     }
1587 }
1588 /* Add functions with two operands */
1589 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov)     \
1590 static void glue(gen_, name)(DisasContext *ctx)                               \
1591 {                                                                             \
1592     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1593                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1594                      ca, glue(ca, 32),                                        \
1595                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1596 }
1597 /* Add functions with one operand and one immediate */
1598 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca,                    \
1599                                 add_ca, compute_ca, compute_ov)               \
1600 static void glue(gen_, name)(DisasContext *ctx)                               \
1601 {                                                                             \
1602     TCGv t0 = tcg_constant_tl(const_val);                                     \
1603     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1604                      cpu_gpr[rA(ctx->opcode)], t0,                            \
1605                      ca, glue(ca, 32),                                        \
1606                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1607 }
1608 
1609 /* add  add.  addo  addo. */
1610 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1611 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1612 /* addc  addc.  addco  addco. */
1613 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1614 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1615 /* adde  adde.  addeo  addeo. */
1616 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1617 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1618 /* addme  addme.  addmeo  addmeo.  */
1619 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1620 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1621 /* addex */
1622 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1623 /* addze  addze.  addzeo  addzeo.*/
1624 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1625 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1626 /* addic  addic.*/
1627 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1628 {
1629     TCGv c = tcg_constant_tl(SIMM(ctx->opcode));
1630     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1631                      c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1632 }
1633 
1634 static void gen_addic(DisasContext *ctx)
1635 {
1636     gen_op_addic(ctx, 0);
1637 }
1638 
1639 static void gen_addic_(DisasContext *ctx)
1640 {
1641     gen_op_addic(ctx, 1);
1642 }
1643 
1644 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1645                                      TCGv arg2, int sign, int compute_ov)
1646 {
1647     TCGv_i32 t0 = tcg_temp_new_i32();
1648     TCGv_i32 t1 = tcg_temp_new_i32();
1649     TCGv_i32 t2 = tcg_temp_new_i32();
1650     TCGv_i32 t3 = tcg_temp_new_i32();
1651 
1652     tcg_gen_trunc_tl_i32(t0, arg1);
1653     tcg_gen_trunc_tl_i32(t1, arg2);
1654     if (sign) {
1655         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1656         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1657         tcg_gen_and_i32(t2, t2, t3);
1658         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1659         tcg_gen_or_i32(t2, t2, t3);
1660         tcg_gen_movi_i32(t3, 0);
1661         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1662         tcg_gen_div_i32(t3, t0, t1);
1663         tcg_gen_extu_i32_tl(ret, t3);
1664     } else {
1665         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1666         tcg_gen_movi_i32(t3, 0);
1667         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1668         tcg_gen_divu_i32(t3, t0, t1);
1669         tcg_gen_extu_i32_tl(ret, t3);
1670     }
1671     if (compute_ov) {
1672         tcg_gen_extu_i32_tl(cpu_ov, t2);
1673         if (is_isa300(ctx)) {
1674             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1675         }
1676         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1677     }
1678 
1679     if (unlikely(Rc(ctx->opcode) != 0)) {
1680         gen_set_Rc0(ctx, ret);
1681     }
1682 }
1683 /* Div functions */
1684 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
1685 static void glue(gen_, name)(DisasContext *ctx)                               \
1686 {                                                                             \
1687     gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1688                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1689                      sign, compute_ov);                                       \
1690 }
1691 /* divwu  divwu.  divwuo  divwuo.   */
1692 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1693 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1694 /* divw  divw.  divwo  divwo.   */
1695 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1696 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1697 
1698 /* div[wd]eu[o][.] */
1699 #define GEN_DIVE(name, hlpr, compute_ov)                                      \
1700 static void gen_##name(DisasContext *ctx)                                     \
1701 {                                                                             \
1702     TCGv_i32 t0 = tcg_constant_i32(compute_ov);                               \
1703     gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env,                      \
1704                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1705     if (unlikely(Rc(ctx->opcode) != 0)) {                                     \
1706         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);                           \
1707     }                                                                         \
1708 }
1709 
1710 GEN_DIVE(divweu, divweu, 0);
1711 GEN_DIVE(divweuo, divweu, 1);
1712 GEN_DIVE(divwe, divwe, 0);
1713 GEN_DIVE(divweo, divwe, 1);
1714 
1715 #if defined(TARGET_PPC64)
1716 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1717                                      TCGv arg2, int sign, int compute_ov)
1718 {
1719     TCGv_i64 t0 = tcg_temp_new_i64();
1720     TCGv_i64 t1 = tcg_temp_new_i64();
1721     TCGv_i64 t2 = tcg_temp_new_i64();
1722     TCGv_i64 t3 = tcg_temp_new_i64();
1723 
1724     tcg_gen_mov_i64(t0, arg1);
1725     tcg_gen_mov_i64(t1, arg2);
1726     if (sign) {
1727         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1728         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1729         tcg_gen_and_i64(t2, t2, t3);
1730         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1731         tcg_gen_or_i64(t2, t2, t3);
1732         tcg_gen_movi_i64(t3, 0);
1733         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1734         tcg_gen_div_i64(ret, t0, t1);
1735     } else {
1736         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1737         tcg_gen_movi_i64(t3, 0);
1738         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1739         tcg_gen_divu_i64(ret, t0, t1);
1740     }
1741     if (compute_ov) {
1742         tcg_gen_mov_tl(cpu_ov, t2);
1743         if (is_isa300(ctx)) {
1744             tcg_gen_mov_tl(cpu_ov32, t2);
1745         }
1746         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1747     }
1748 
1749     if (unlikely(Rc(ctx->opcode) != 0)) {
1750         gen_set_Rc0(ctx, ret);
1751     }
1752 }
1753 
1754 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
1755 static void glue(gen_, name)(DisasContext *ctx)                               \
1756 {                                                                             \
1757     gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1758                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
1759                       sign, compute_ov);                                      \
1760 }
1761 /* divdu  divdu.  divduo  divduo.   */
1762 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1763 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1764 /* divd  divd.  divdo  divdo.   */
1765 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1766 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1767 
1768 GEN_DIVE(divdeu, divdeu, 0);
1769 GEN_DIVE(divdeuo, divdeu, 1);
1770 GEN_DIVE(divde, divde, 0);
1771 GEN_DIVE(divdeo, divde, 1);
1772 #endif
1773 
1774 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1775                                      TCGv arg2, int sign)
1776 {
1777     TCGv_i32 t0 = tcg_temp_new_i32();
1778     TCGv_i32 t1 = tcg_temp_new_i32();
1779 
1780     tcg_gen_trunc_tl_i32(t0, arg1);
1781     tcg_gen_trunc_tl_i32(t1, arg2);
1782     if (sign) {
1783         TCGv_i32 t2 = tcg_temp_new_i32();
1784         TCGv_i32 t3 = tcg_temp_new_i32();
1785         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1786         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1787         tcg_gen_and_i32(t2, t2, t3);
1788         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1789         tcg_gen_or_i32(t2, t2, t3);
1790         tcg_gen_movi_i32(t3, 0);
1791         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1792         tcg_gen_rem_i32(t3, t0, t1);
1793         tcg_gen_ext_i32_tl(ret, t3);
1794     } else {
1795         TCGv_i32 t2 = tcg_constant_i32(1);
1796         TCGv_i32 t3 = tcg_constant_i32(0);
1797         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1798         tcg_gen_remu_i32(t0, t0, t1);
1799         tcg_gen_extu_i32_tl(ret, t0);
1800     }
1801 }
1802 
1803 #define GEN_INT_ARITH_MODW(name, opc3, sign)                                \
1804 static void glue(gen_, name)(DisasContext *ctx)                             \
1805 {                                                                           \
1806     gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1807                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1808                       sign);                                                \
1809 }
1810 
1811 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1812 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1813 
1814 #if defined(TARGET_PPC64)
1815 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1816                                      TCGv arg2, int sign)
1817 {
1818     TCGv_i64 t0 = tcg_temp_new_i64();
1819     TCGv_i64 t1 = tcg_temp_new_i64();
1820 
1821     tcg_gen_mov_i64(t0, arg1);
1822     tcg_gen_mov_i64(t1, arg2);
1823     if (sign) {
1824         TCGv_i64 t2 = tcg_temp_new_i64();
1825         TCGv_i64 t3 = tcg_temp_new_i64();
1826         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1827         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1828         tcg_gen_and_i64(t2, t2, t3);
1829         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1830         tcg_gen_or_i64(t2, t2, t3);
1831         tcg_gen_movi_i64(t3, 0);
1832         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1833         tcg_gen_rem_i64(ret, t0, t1);
1834     } else {
1835         TCGv_i64 t2 = tcg_constant_i64(1);
1836         TCGv_i64 t3 = tcg_constant_i64(0);
1837         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1838         tcg_gen_remu_i64(ret, t0, t1);
1839     }
1840 }
1841 
1842 #define GEN_INT_ARITH_MODD(name, opc3, sign)                            \
1843 static void glue(gen_, name)(DisasContext *ctx)                           \
1844 {                                                                         \
1845   gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1846                     cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1847                     sign);                                                \
1848 }
1849 
1850 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1851 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1852 #endif
1853 
1854 /* mulhw  mulhw. */
1855 static void gen_mulhw(DisasContext *ctx)
1856 {
1857     TCGv_i32 t0 = tcg_temp_new_i32();
1858     TCGv_i32 t1 = tcg_temp_new_i32();
1859 
1860     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1861     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1862     tcg_gen_muls2_i32(t0, t1, t0, t1);
1863     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1864     if (unlikely(Rc(ctx->opcode) != 0)) {
1865         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1866     }
1867 }
1868 
1869 /* mulhwu  mulhwu.  */
1870 static void gen_mulhwu(DisasContext *ctx)
1871 {
1872     TCGv_i32 t0 = tcg_temp_new_i32();
1873     TCGv_i32 t1 = tcg_temp_new_i32();
1874 
1875     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1876     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1877     tcg_gen_mulu2_i32(t0, t1, t0, t1);
1878     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1879     if (unlikely(Rc(ctx->opcode) != 0)) {
1880         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1881     }
1882 }
1883 
1884 /* mullw  mullw. */
1885 static void gen_mullw(DisasContext *ctx)
1886 {
1887 #if defined(TARGET_PPC64)
1888     TCGv_i64 t0, t1;
1889     t0 = tcg_temp_new_i64();
1890     t1 = tcg_temp_new_i64();
1891     tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1892     tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1893     tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1894 #else
1895     tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1896                     cpu_gpr[rB(ctx->opcode)]);
1897 #endif
1898     if (unlikely(Rc(ctx->opcode) != 0)) {
1899         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1900     }
1901 }
1902 
1903 /* mullwo  mullwo. */
1904 static void gen_mullwo(DisasContext *ctx)
1905 {
1906     TCGv_i32 t0 = tcg_temp_new_i32();
1907     TCGv_i32 t1 = tcg_temp_new_i32();
1908 
1909     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1910     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1911     tcg_gen_muls2_i32(t0, t1, t0, t1);
1912 #if defined(TARGET_PPC64)
1913     tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1914 #else
1915     tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
1916 #endif
1917 
1918     tcg_gen_sari_i32(t0, t0, 31);
1919     tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
1920     tcg_gen_extu_i32_tl(cpu_ov, t0);
1921     if (is_isa300(ctx)) {
1922         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1923     }
1924     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1925 
1926     if (unlikely(Rc(ctx->opcode) != 0)) {
1927         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1928     }
1929 }
1930 
1931 /* mulli */
1932 static void gen_mulli(DisasContext *ctx)
1933 {
1934     tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1935                     SIMM(ctx->opcode));
1936 }
1937 
1938 #if defined(TARGET_PPC64)
1939 /* mulhd  mulhd. */
1940 static void gen_mulhd(DisasContext *ctx)
1941 {
1942     TCGv lo = tcg_temp_new();
1943     tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1944                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1945     if (unlikely(Rc(ctx->opcode) != 0)) {
1946         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1947     }
1948 }
1949 
1950 /* mulhdu  mulhdu. */
1951 static void gen_mulhdu(DisasContext *ctx)
1952 {
1953     TCGv lo = tcg_temp_new();
1954     tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
1955                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
1956     if (unlikely(Rc(ctx->opcode) != 0)) {
1957         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1958     }
1959 }
1960 
1961 /* mulld  mulld. */
1962 static void gen_mulld(DisasContext *ctx)
1963 {
1964     tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1965                    cpu_gpr[rB(ctx->opcode)]);
1966     if (unlikely(Rc(ctx->opcode) != 0)) {
1967         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1968     }
1969 }
1970 
1971 /* mulldo  mulldo. */
1972 static void gen_mulldo(DisasContext *ctx)
1973 {
1974     TCGv_i64 t0 = tcg_temp_new_i64();
1975     TCGv_i64 t1 = tcg_temp_new_i64();
1976 
1977     tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
1978                       cpu_gpr[rB(ctx->opcode)]);
1979     tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
1980 
1981     tcg_gen_sari_i64(t0, t0, 63);
1982     tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
1983     if (is_isa300(ctx)) {
1984         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1985     }
1986     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1987 
1988     if (unlikely(Rc(ctx->opcode) != 0)) {
1989         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1990     }
1991 }
1992 #endif
1993 
1994 /* Common subf function */
1995 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1996                                      TCGv arg2, bool add_ca, bool compute_ca,
1997                                      bool compute_ov, bool compute_rc0)
1998 {
1999     TCGv t0 = ret;
2000 
2001     if (compute_ca || compute_ov) {
2002         t0 = tcg_temp_new();
2003     }
2004 
2005     if (compute_ca) {
2006         /* dest = ~arg1 + arg2 [+ ca].  */
2007         if (NARROW_MODE(ctx)) {
2008             /*
2009              * Caution: a non-obvious corner case of the spec is that
2010              * we must produce the *entire* 64-bit addition, but
2011              * produce the carry into bit 32.
2012              */
2013             TCGv inv1 = tcg_temp_new();
2014             TCGv t1 = tcg_temp_new();
2015             tcg_gen_not_tl(inv1, arg1);
2016             if (add_ca) {
2017                 tcg_gen_add_tl(t0, arg2, cpu_ca);
2018             } else {
2019                 tcg_gen_addi_tl(t0, arg2, 1);
2020             }
2021             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
2022             tcg_gen_add_tl(t0, t0, inv1);
2023             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
2024             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2025             if (is_isa300(ctx)) {
2026                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2027             }
2028         } else if (add_ca) {
2029             TCGv zero, inv1 = tcg_temp_new();
2030             tcg_gen_not_tl(inv1, arg1);
2031             zero = tcg_constant_tl(0);
2032             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2033             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2034             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2035         } else {
2036             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2037             tcg_gen_sub_tl(t0, arg2, arg1);
2038             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2039         }
2040     } else if (add_ca) {
2041         /*
2042          * Since we're ignoring carry-out, we can simplify the
2043          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2044          */
2045         tcg_gen_sub_tl(t0, arg2, arg1);
2046         tcg_gen_add_tl(t0, t0, cpu_ca);
2047         tcg_gen_subi_tl(t0, t0, 1);
2048     } else {
2049         tcg_gen_sub_tl(t0, arg2, arg1);
2050     }
2051 
2052     if (compute_ov) {
2053         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2054     }
2055     if (unlikely(compute_rc0)) {
2056         gen_set_Rc0(ctx, t0);
2057     }
2058 
2059     if (t0 != ret) {
2060         tcg_gen_mov_tl(ret, t0);
2061     }
2062 }
2063 /* Sub functions with Two operands functions */
2064 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
2065 static void glue(gen_, name)(DisasContext *ctx)                               \
2066 {                                                                             \
2067     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2068                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
2069                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2070 }
2071 /* Sub functions with one operand and one immediate */
2072 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
2073                                 add_ca, compute_ca, compute_ov)               \
2074 static void glue(gen_, name)(DisasContext *ctx)                               \
2075 {                                                                             \
2076     TCGv t0 = tcg_constant_tl(const_val);                                     \
2077     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2078                       cpu_gpr[rA(ctx->opcode)], t0,                           \
2079                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2080 }
2081 /* subf  subf.  subfo  subfo. */
2082 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2083 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2084 /* subfc  subfc.  subfco  subfco. */
2085 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2086 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2087 /* subfe  subfe.  subfeo  subfo. */
2088 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2089 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2090 /* subfme  subfme.  subfmeo  subfmeo.  */
2091 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2092 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2093 /* subfze  subfze.  subfzeo  subfzeo.*/
2094 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2095 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2096 
2097 /* subfic */
2098 static void gen_subfic(DisasContext *ctx)
2099 {
2100     TCGv c = tcg_constant_tl(SIMM(ctx->opcode));
2101     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2102                       c, 0, 1, 0, 0);
2103 }
2104 
2105 /* neg neg. nego nego. */
2106 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2107 {
2108     TCGv zero = tcg_constant_tl(0);
2109     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2110                       zero, 0, 0, compute_ov, Rc(ctx->opcode));
2111 }
2112 
2113 static void gen_neg(DisasContext *ctx)
2114 {
2115     tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2116     if (unlikely(Rc(ctx->opcode))) {
2117         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2118     }
2119 }
2120 
2121 static void gen_nego(DisasContext *ctx)
2122 {
2123     gen_op_arith_neg(ctx, 1);
2124 }
2125 
2126 /***                            Integer logical                            ***/
2127 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
2128 static void glue(gen_, name)(DisasContext *ctx)                               \
2129 {                                                                             \
2130     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],                \
2131        cpu_gpr[rB(ctx->opcode)]);                                             \
2132     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2133         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2134 }
2135 
2136 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
2137 static void glue(gen_, name)(DisasContext *ctx)                               \
2138 {                                                                             \
2139     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);               \
2140     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2141         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2142 }
2143 
2144 /* and & and. */
2145 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2146 /* andc & andc. */
2147 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2148 
2149 /* andi. */
2150 static void gen_andi_(DisasContext *ctx)
2151 {
2152     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2153                     UIMM(ctx->opcode));
2154     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2155 }
2156 
2157 /* andis. */
2158 static void gen_andis_(DisasContext *ctx)
2159 {
2160     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2161                     UIMM(ctx->opcode) << 16);
2162     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2163 }
2164 
2165 /* cntlzw */
2166 static void gen_cntlzw(DisasContext *ctx)
2167 {
2168     TCGv_i32 t = tcg_temp_new_i32();
2169 
2170     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2171     tcg_gen_clzi_i32(t, t, 32);
2172     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2173 
2174     if (unlikely(Rc(ctx->opcode) != 0)) {
2175         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2176     }
2177 }
2178 
2179 /* cnttzw */
2180 static void gen_cnttzw(DisasContext *ctx)
2181 {
2182     TCGv_i32 t = tcg_temp_new_i32();
2183 
2184     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2185     tcg_gen_ctzi_i32(t, t, 32);
2186     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2187 
2188     if (unlikely(Rc(ctx->opcode) != 0)) {
2189         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2190     }
2191 }
2192 
2193 /* eqv & eqv. */
2194 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2195 /* extsb & extsb. */
2196 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2197 /* extsh & extsh. */
2198 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2199 /* nand & nand. */
2200 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2201 /* nor & nor. */
2202 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2203 
2204 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2205 static void gen_pause(DisasContext *ctx)
2206 {
2207     TCGv_i32 t0 = tcg_constant_i32(0);
2208     tcg_gen_st_i32(t0, cpu_env,
2209                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2210 
2211     /* Stop translation, this gives other CPUs a chance to run */
2212     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2213 }
2214 #endif /* defined(TARGET_PPC64) */
2215 
2216 /* or & or. */
2217 static void gen_or(DisasContext *ctx)
2218 {
2219     int rs, ra, rb;
2220 
2221     rs = rS(ctx->opcode);
2222     ra = rA(ctx->opcode);
2223     rb = rB(ctx->opcode);
2224     /* Optimisation for mr. ri case */
2225     if (rs != ra || rs != rb) {
2226         if (rs != rb) {
2227             tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2228         } else {
2229             tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2230         }
2231         if (unlikely(Rc(ctx->opcode) != 0)) {
2232             gen_set_Rc0(ctx, cpu_gpr[ra]);
2233         }
2234     } else if (unlikely(Rc(ctx->opcode) != 0)) {
2235         gen_set_Rc0(ctx, cpu_gpr[rs]);
2236 #if defined(TARGET_PPC64)
2237     } else if (rs != 0) { /* 0 is nop */
2238         int prio = 0;
2239 
2240         switch (rs) {
2241         case 1:
2242             /* Set process priority to low */
2243             prio = 2;
2244             break;
2245         case 6:
2246             /* Set process priority to medium-low */
2247             prio = 3;
2248             break;
2249         case 2:
2250             /* Set process priority to normal */
2251             prio = 4;
2252             break;
2253 #if !defined(CONFIG_USER_ONLY)
2254         case 31:
2255             if (!ctx->pr) {
2256                 /* Set process priority to very low */
2257                 prio = 1;
2258             }
2259             break;
2260         case 5:
2261             if (!ctx->pr) {
2262                 /* Set process priority to medium-hight */
2263                 prio = 5;
2264             }
2265             break;
2266         case 3:
2267             if (!ctx->pr) {
2268                 /* Set process priority to high */
2269                 prio = 6;
2270             }
2271             break;
2272         case 7:
2273             if (ctx->hv && !ctx->pr) {
2274                 /* Set process priority to very high */
2275                 prio = 7;
2276             }
2277             break;
2278 #endif
2279         default:
2280             break;
2281         }
2282         if (prio) {
2283             TCGv t0 = tcg_temp_new();
2284             gen_load_spr(t0, SPR_PPR);
2285             tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2286             tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2287             gen_store_spr(SPR_PPR, t0);
2288         }
2289 #if !defined(CONFIG_USER_ONLY)
2290         /*
2291          * Pause out of TCG otherwise spin loops with smt_low eat too
2292          * much CPU and the kernel hangs.  This applies to all
2293          * encodings other than no-op, e.g., miso(rs=26), yield(27),
2294          * mdoio(29), mdoom(30), and all currently undefined.
2295          */
2296         gen_pause(ctx);
2297 #endif
2298 #endif
2299     }
2300 }
2301 /* orc & orc. */
2302 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2303 
2304 /* xor & xor. */
2305 static void gen_xor(DisasContext *ctx)
2306 {
2307     /* Optimisation for "set to zero" case */
2308     if (rS(ctx->opcode) != rB(ctx->opcode)) {
2309         tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2310                        cpu_gpr[rB(ctx->opcode)]);
2311     } else {
2312         tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2313     }
2314     if (unlikely(Rc(ctx->opcode) != 0)) {
2315         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2316     }
2317 }
2318 
2319 /* ori */
2320 static void gen_ori(DisasContext *ctx)
2321 {
2322     target_ulong uimm = UIMM(ctx->opcode);
2323 
2324     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2325         return;
2326     }
2327     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2328 }
2329 
2330 /* oris */
2331 static void gen_oris(DisasContext *ctx)
2332 {
2333     target_ulong uimm = UIMM(ctx->opcode);
2334 
2335     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2336         /* NOP */
2337         return;
2338     }
2339     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2340                    uimm << 16);
2341 }
2342 
2343 /* xori */
2344 static void gen_xori(DisasContext *ctx)
2345 {
2346     target_ulong uimm = UIMM(ctx->opcode);
2347 
2348     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2349         /* NOP */
2350         return;
2351     }
2352     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2353 }
2354 
2355 /* xoris */
2356 static void gen_xoris(DisasContext *ctx)
2357 {
2358     target_ulong uimm = UIMM(ctx->opcode);
2359 
2360     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2361         /* NOP */
2362         return;
2363     }
2364     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2365                     uimm << 16);
2366 }
2367 
2368 /* popcntb : PowerPC 2.03 specification */
2369 static void gen_popcntb(DisasContext *ctx)
2370 {
2371     gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2372 }
2373 
2374 static void gen_popcntw(DisasContext *ctx)
2375 {
2376 #if defined(TARGET_PPC64)
2377     gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2378 #else
2379     tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2380 #endif
2381 }
2382 
2383 #if defined(TARGET_PPC64)
2384 /* popcntd: PowerPC 2.06 specification */
2385 static void gen_popcntd(DisasContext *ctx)
2386 {
2387     tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2388 }
2389 #endif
2390 
2391 /* prtyw: PowerPC 2.05 specification */
2392 static void gen_prtyw(DisasContext *ctx)
2393 {
2394     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2395     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2396     TCGv t0 = tcg_temp_new();
2397     tcg_gen_shri_tl(t0, rs, 16);
2398     tcg_gen_xor_tl(ra, rs, t0);
2399     tcg_gen_shri_tl(t0, ra, 8);
2400     tcg_gen_xor_tl(ra, ra, t0);
2401     tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2402 }
2403 
2404 #if defined(TARGET_PPC64)
2405 /* prtyd: PowerPC 2.05 specification */
2406 static void gen_prtyd(DisasContext *ctx)
2407 {
2408     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2409     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2410     TCGv t0 = tcg_temp_new();
2411     tcg_gen_shri_tl(t0, rs, 32);
2412     tcg_gen_xor_tl(ra, rs, t0);
2413     tcg_gen_shri_tl(t0, ra, 16);
2414     tcg_gen_xor_tl(ra, ra, t0);
2415     tcg_gen_shri_tl(t0, ra, 8);
2416     tcg_gen_xor_tl(ra, ra, t0);
2417     tcg_gen_andi_tl(ra, ra, 1);
2418 }
2419 #endif
2420 
2421 #if defined(TARGET_PPC64)
2422 /* bpermd */
2423 static void gen_bpermd(DisasContext *ctx)
2424 {
2425     gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2426                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2427 }
2428 #endif
2429 
2430 #if defined(TARGET_PPC64)
2431 /* extsw & extsw. */
2432 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2433 
2434 /* cntlzd */
2435 static void gen_cntlzd(DisasContext *ctx)
2436 {
2437     tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2438     if (unlikely(Rc(ctx->opcode) != 0)) {
2439         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2440     }
2441 }
2442 
2443 /* cnttzd */
2444 static void gen_cnttzd(DisasContext *ctx)
2445 {
2446     tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2447     if (unlikely(Rc(ctx->opcode) != 0)) {
2448         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2449     }
2450 }
2451 
2452 /* darn */
2453 static void gen_darn(DisasContext *ctx)
2454 {
2455     int l = L(ctx->opcode);
2456 
2457     if (l > 2) {
2458         tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2459     } else {
2460         translator_io_start(&ctx->base);
2461         if (l == 0) {
2462             gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2463         } else {
2464             /* Return 64-bit random for both CRN and RRN */
2465             gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2466         }
2467     }
2468 }
2469 #endif
2470 
2471 /***                             Integer rotate                            ***/
2472 
2473 /* rlwimi & rlwimi. */
2474 static void gen_rlwimi(DisasContext *ctx)
2475 {
2476     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2477     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2478     uint32_t sh = SH(ctx->opcode);
2479     uint32_t mb = MB(ctx->opcode);
2480     uint32_t me = ME(ctx->opcode);
2481 
2482     if (sh == (31 - me) && mb <= me) {
2483         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2484     } else {
2485         target_ulong mask;
2486         bool mask_in_32b = true;
2487         TCGv t1;
2488 
2489 #if defined(TARGET_PPC64)
2490         mb += 32;
2491         me += 32;
2492 #endif
2493         mask = MASK(mb, me);
2494 
2495 #if defined(TARGET_PPC64)
2496         if (mask > 0xffffffffu) {
2497             mask_in_32b = false;
2498         }
2499 #endif
2500         t1 = tcg_temp_new();
2501         if (mask_in_32b) {
2502             TCGv_i32 t0 = tcg_temp_new_i32();
2503             tcg_gen_trunc_tl_i32(t0, t_rs);
2504             tcg_gen_rotli_i32(t0, t0, sh);
2505             tcg_gen_extu_i32_tl(t1, t0);
2506         } else {
2507 #if defined(TARGET_PPC64)
2508             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2509             tcg_gen_rotli_i64(t1, t1, sh);
2510 #else
2511             g_assert_not_reached();
2512 #endif
2513         }
2514 
2515         tcg_gen_andi_tl(t1, t1, mask);
2516         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2517         tcg_gen_or_tl(t_ra, t_ra, t1);
2518     }
2519     if (unlikely(Rc(ctx->opcode) != 0)) {
2520         gen_set_Rc0(ctx, t_ra);
2521     }
2522 }
2523 
2524 /* rlwinm & rlwinm. */
2525 static void gen_rlwinm(DisasContext *ctx)
2526 {
2527     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2528     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2529     int sh = SH(ctx->opcode);
2530     int mb = MB(ctx->opcode);
2531     int me = ME(ctx->opcode);
2532     int len = me - mb + 1;
2533     int rsh = (32 - sh) & 31;
2534 
2535     if (sh != 0 && len > 0 && me == (31 - sh)) {
2536         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2537     } else if (me == 31 && rsh + len <= 32) {
2538         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2539     } else {
2540         target_ulong mask;
2541         bool mask_in_32b = true;
2542 #if defined(TARGET_PPC64)
2543         mb += 32;
2544         me += 32;
2545 #endif
2546         mask = MASK(mb, me);
2547 #if defined(TARGET_PPC64)
2548         if (mask > 0xffffffffu) {
2549             mask_in_32b = false;
2550         }
2551 #endif
2552         if (mask_in_32b) {
2553             if (sh == 0) {
2554                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2555             } else {
2556                 TCGv_i32 t0 = tcg_temp_new_i32();
2557                 tcg_gen_trunc_tl_i32(t0, t_rs);
2558                 tcg_gen_rotli_i32(t0, t0, sh);
2559                 tcg_gen_andi_i32(t0, t0, mask);
2560                 tcg_gen_extu_i32_tl(t_ra, t0);
2561             }
2562         } else {
2563 #if defined(TARGET_PPC64)
2564             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2565             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2566             tcg_gen_andi_i64(t_ra, t_ra, mask);
2567 #else
2568             g_assert_not_reached();
2569 #endif
2570         }
2571     }
2572     if (unlikely(Rc(ctx->opcode) != 0)) {
2573         gen_set_Rc0(ctx, t_ra);
2574     }
2575 }
2576 
2577 /* rlwnm & rlwnm. */
2578 static void gen_rlwnm(DisasContext *ctx)
2579 {
2580     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2581     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2582     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2583     uint32_t mb = MB(ctx->opcode);
2584     uint32_t me = ME(ctx->opcode);
2585     target_ulong mask;
2586     bool mask_in_32b = true;
2587 
2588 #if defined(TARGET_PPC64)
2589     mb += 32;
2590     me += 32;
2591 #endif
2592     mask = MASK(mb, me);
2593 
2594 #if defined(TARGET_PPC64)
2595     if (mask > 0xffffffffu) {
2596         mask_in_32b = false;
2597     }
2598 #endif
2599     if (mask_in_32b) {
2600         TCGv_i32 t0 = tcg_temp_new_i32();
2601         TCGv_i32 t1 = tcg_temp_new_i32();
2602         tcg_gen_trunc_tl_i32(t0, t_rb);
2603         tcg_gen_trunc_tl_i32(t1, t_rs);
2604         tcg_gen_andi_i32(t0, t0, 0x1f);
2605         tcg_gen_rotl_i32(t1, t1, t0);
2606         tcg_gen_extu_i32_tl(t_ra, t1);
2607     } else {
2608 #if defined(TARGET_PPC64)
2609         TCGv_i64 t0 = tcg_temp_new_i64();
2610         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2611         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2612         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2613 #else
2614         g_assert_not_reached();
2615 #endif
2616     }
2617 
2618     tcg_gen_andi_tl(t_ra, t_ra, mask);
2619 
2620     if (unlikely(Rc(ctx->opcode) != 0)) {
2621         gen_set_Rc0(ctx, t_ra);
2622     }
2623 }
2624 
2625 #if defined(TARGET_PPC64)
2626 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2627 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2628 {                                                                             \
2629     gen_##name(ctx, 0);                                                       \
2630 }                                                                             \
2631                                                                               \
2632 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2633 {                                                                             \
2634     gen_##name(ctx, 1);                                                       \
2635 }
2636 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2637 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2638 {                                                                             \
2639     gen_##name(ctx, 0, 0);                                                    \
2640 }                                                                             \
2641                                                                               \
2642 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2643 {                                                                             \
2644     gen_##name(ctx, 0, 1);                                                    \
2645 }                                                                             \
2646                                                                               \
2647 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2648 {                                                                             \
2649     gen_##name(ctx, 1, 0);                                                    \
2650 }                                                                             \
2651                                                                               \
2652 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2653 {                                                                             \
2654     gen_##name(ctx, 1, 1);                                                    \
2655 }
2656 
2657 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2658 {
2659     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2660     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2661     int len = me - mb + 1;
2662     int rsh = (64 - sh) & 63;
2663 
2664     if (sh != 0 && len > 0 && me == (63 - sh)) {
2665         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2666     } else if (me == 63 && rsh + len <= 64) {
2667         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2668     } else {
2669         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2670         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2671     }
2672     if (unlikely(Rc(ctx->opcode) != 0)) {
2673         gen_set_Rc0(ctx, t_ra);
2674     }
2675 }
2676 
2677 /* rldicl - rldicl. */
2678 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2679 {
2680     uint32_t sh, mb;
2681 
2682     sh = SH(ctx->opcode) | (shn << 5);
2683     mb = MB(ctx->opcode) | (mbn << 5);
2684     gen_rldinm(ctx, mb, 63, sh);
2685 }
2686 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2687 
2688 /* rldicr - rldicr. */
2689 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2690 {
2691     uint32_t sh, me;
2692 
2693     sh = SH(ctx->opcode) | (shn << 5);
2694     me = MB(ctx->opcode) | (men << 5);
2695     gen_rldinm(ctx, 0, me, sh);
2696 }
2697 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2698 
2699 /* rldic - rldic. */
2700 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2701 {
2702     uint32_t sh, mb;
2703 
2704     sh = SH(ctx->opcode) | (shn << 5);
2705     mb = MB(ctx->opcode) | (mbn << 5);
2706     gen_rldinm(ctx, mb, 63 - sh, sh);
2707 }
2708 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2709 
2710 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2711 {
2712     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2713     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2714     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2715     TCGv t0;
2716 
2717     t0 = tcg_temp_new();
2718     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2719     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2720 
2721     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2722     if (unlikely(Rc(ctx->opcode) != 0)) {
2723         gen_set_Rc0(ctx, t_ra);
2724     }
2725 }
2726 
2727 /* rldcl - rldcl. */
2728 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2729 {
2730     uint32_t mb;
2731 
2732     mb = MB(ctx->opcode) | (mbn << 5);
2733     gen_rldnm(ctx, mb, 63);
2734 }
2735 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2736 
2737 /* rldcr - rldcr. */
2738 static inline void gen_rldcr(DisasContext *ctx, int men)
2739 {
2740     uint32_t me;
2741 
2742     me = MB(ctx->opcode) | (men << 5);
2743     gen_rldnm(ctx, 0, me);
2744 }
2745 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2746 
2747 /* rldimi - rldimi. */
2748 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2749 {
2750     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2751     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2752     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2753     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2754     uint32_t me = 63 - sh;
2755 
2756     if (mb <= me) {
2757         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2758     } else {
2759         target_ulong mask = MASK(mb, me);
2760         TCGv t1 = tcg_temp_new();
2761 
2762         tcg_gen_rotli_tl(t1, t_rs, sh);
2763         tcg_gen_andi_tl(t1, t1, mask);
2764         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2765         tcg_gen_or_tl(t_ra, t_ra, t1);
2766     }
2767     if (unlikely(Rc(ctx->opcode) != 0)) {
2768         gen_set_Rc0(ctx, t_ra);
2769     }
2770 }
2771 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2772 #endif
2773 
2774 /***                             Integer shift                             ***/
2775 
2776 /* slw & slw. */
2777 static void gen_slw(DisasContext *ctx)
2778 {
2779     TCGv t0, t1;
2780 
2781     t0 = tcg_temp_new();
2782     /* AND rS with a mask that is 0 when rB >= 0x20 */
2783 #if defined(TARGET_PPC64)
2784     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2785     tcg_gen_sari_tl(t0, t0, 0x3f);
2786 #else
2787     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2788     tcg_gen_sari_tl(t0, t0, 0x1f);
2789 #endif
2790     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2791     t1 = tcg_temp_new();
2792     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2793     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2794     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2795     if (unlikely(Rc(ctx->opcode) != 0)) {
2796         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2797     }
2798 }
2799 
2800 /* sraw & sraw. */
2801 static void gen_sraw(DisasContext *ctx)
2802 {
2803     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2804                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2805     if (unlikely(Rc(ctx->opcode) != 0)) {
2806         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2807     }
2808 }
2809 
2810 /* srawi & srawi. */
2811 static void gen_srawi(DisasContext *ctx)
2812 {
2813     int sh = SH(ctx->opcode);
2814     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2815     TCGv src = cpu_gpr[rS(ctx->opcode)];
2816     if (sh == 0) {
2817         tcg_gen_ext32s_tl(dst, src);
2818         tcg_gen_movi_tl(cpu_ca, 0);
2819         if (is_isa300(ctx)) {
2820             tcg_gen_movi_tl(cpu_ca32, 0);
2821         }
2822     } else {
2823         TCGv t0;
2824         tcg_gen_ext32s_tl(dst, src);
2825         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2826         t0 = tcg_temp_new();
2827         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2828         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2829         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2830         if (is_isa300(ctx)) {
2831             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2832         }
2833         tcg_gen_sari_tl(dst, dst, sh);
2834     }
2835     if (unlikely(Rc(ctx->opcode) != 0)) {
2836         gen_set_Rc0(ctx, dst);
2837     }
2838 }
2839 
2840 /* srw & srw. */
2841 static void gen_srw(DisasContext *ctx)
2842 {
2843     TCGv t0, t1;
2844 
2845     t0 = tcg_temp_new();
2846     /* AND rS with a mask that is 0 when rB >= 0x20 */
2847 #if defined(TARGET_PPC64)
2848     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2849     tcg_gen_sari_tl(t0, t0, 0x3f);
2850 #else
2851     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2852     tcg_gen_sari_tl(t0, t0, 0x1f);
2853 #endif
2854     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2855     tcg_gen_ext32u_tl(t0, t0);
2856     t1 = tcg_temp_new();
2857     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2858     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2859     if (unlikely(Rc(ctx->opcode) != 0)) {
2860         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2861     }
2862 }
2863 
2864 #if defined(TARGET_PPC64)
2865 /* sld & sld. */
2866 static void gen_sld(DisasContext *ctx)
2867 {
2868     TCGv t0, t1;
2869 
2870     t0 = tcg_temp_new();
2871     /* AND rS with a mask that is 0 when rB >= 0x40 */
2872     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2873     tcg_gen_sari_tl(t0, t0, 0x3f);
2874     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2875     t1 = tcg_temp_new();
2876     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2877     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2878     if (unlikely(Rc(ctx->opcode) != 0)) {
2879         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2880     }
2881 }
2882 
2883 /* srad & srad. */
2884 static void gen_srad(DisasContext *ctx)
2885 {
2886     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
2887                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2888     if (unlikely(Rc(ctx->opcode) != 0)) {
2889         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2890     }
2891 }
2892 /* sradi & sradi. */
2893 static inline void gen_sradi(DisasContext *ctx, int n)
2894 {
2895     int sh = SH(ctx->opcode) + (n << 5);
2896     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2897     TCGv src = cpu_gpr[rS(ctx->opcode)];
2898     if (sh == 0) {
2899         tcg_gen_mov_tl(dst, src);
2900         tcg_gen_movi_tl(cpu_ca, 0);
2901         if (is_isa300(ctx)) {
2902             tcg_gen_movi_tl(cpu_ca32, 0);
2903         }
2904     } else {
2905         TCGv t0;
2906         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
2907         t0 = tcg_temp_new();
2908         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
2909         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2910         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2911         if (is_isa300(ctx)) {
2912             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2913         }
2914         tcg_gen_sari_tl(dst, src, sh);
2915     }
2916     if (unlikely(Rc(ctx->opcode) != 0)) {
2917         gen_set_Rc0(ctx, dst);
2918     }
2919 }
2920 
2921 static void gen_sradi0(DisasContext *ctx)
2922 {
2923     gen_sradi(ctx, 0);
2924 }
2925 
2926 static void gen_sradi1(DisasContext *ctx)
2927 {
2928     gen_sradi(ctx, 1);
2929 }
2930 
2931 /* extswsli & extswsli. */
2932 static inline void gen_extswsli(DisasContext *ctx, int n)
2933 {
2934     int sh = SH(ctx->opcode) + (n << 5);
2935     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2936     TCGv src = cpu_gpr[rS(ctx->opcode)];
2937 
2938     tcg_gen_ext32s_tl(dst, src);
2939     tcg_gen_shli_tl(dst, dst, sh);
2940     if (unlikely(Rc(ctx->opcode) != 0)) {
2941         gen_set_Rc0(ctx, dst);
2942     }
2943 }
2944 
2945 static void gen_extswsli0(DisasContext *ctx)
2946 {
2947     gen_extswsli(ctx, 0);
2948 }
2949 
2950 static void gen_extswsli1(DisasContext *ctx)
2951 {
2952     gen_extswsli(ctx, 1);
2953 }
2954 
2955 /* srd & srd. */
2956 static void gen_srd(DisasContext *ctx)
2957 {
2958     TCGv t0, t1;
2959 
2960     t0 = tcg_temp_new();
2961     /* AND rS with a mask that is 0 when rB >= 0x40 */
2962     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2963     tcg_gen_sari_tl(t0, t0, 0x3f);
2964     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2965     t1 = tcg_temp_new();
2966     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2967     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2968     if (unlikely(Rc(ctx->opcode) != 0)) {
2969         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2970     }
2971 }
2972 #endif
2973 
2974 /***                           Addressing modes                            ***/
2975 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2976 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2977                                       target_long maskl)
2978 {
2979     target_long simm = SIMM(ctx->opcode);
2980 
2981     simm &= ~maskl;
2982     if (rA(ctx->opcode) == 0) {
2983         if (NARROW_MODE(ctx)) {
2984             simm = (uint32_t)simm;
2985         }
2986         tcg_gen_movi_tl(EA, simm);
2987     } else if (likely(simm != 0)) {
2988         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2989         if (NARROW_MODE(ctx)) {
2990             tcg_gen_ext32u_tl(EA, EA);
2991         }
2992     } else {
2993         if (NARROW_MODE(ctx)) {
2994             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2995         } else {
2996             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2997         }
2998     }
2999 }
3000 
3001 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3002 {
3003     if (rA(ctx->opcode) == 0) {
3004         if (NARROW_MODE(ctx)) {
3005             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3006         } else {
3007             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3008         }
3009     } else {
3010         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3011         if (NARROW_MODE(ctx)) {
3012             tcg_gen_ext32u_tl(EA, EA);
3013         }
3014     }
3015 }
3016 
3017 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3018 {
3019     if (rA(ctx->opcode) == 0) {
3020         tcg_gen_movi_tl(EA, 0);
3021     } else if (NARROW_MODE(ctx)) {
3022         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3023     } else {
3024         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3025     }
3026 }
3027 
3028 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3029                                 target_long val)
3030 {
3031     tcg_gen_addi_tl(ret, arg1, val);
3032     if (NARROW_MODE(ctx)) {
3033         tcg_gen_ext32u_tl(ret, ret);
3034     }
3035 }
3036 
3037 static inline void gen_align_no_le(DisasContext *ctx)
3038 {
3039     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3040                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3041 }
3042 
3043 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
3044 {
3045     TCGv ea = tcg_temp_new();
3046     if (ra) {
3047         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
3048     } else {
3049         tcg_gen_mov_tl(ea, displ);
3050     }
3051     if (NARROW_MODE(ctx)) {
3052         tcg_gen_ext32u_tl(ea, ea);
3053     }
3054     return ea;
3055 }
3056 
3057 /***                             Integer load                              ***/
3058 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3059 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3060 
3061 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
3062 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
3063                                   TCGv val,                             \
3064                                   TCGv addr)                            \
3065 {                                                                       \
3066     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
3067 }
3068 
3069 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
3070 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3071 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3072 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3073 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3074 
3075 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3076 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3077 
3078 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
3079 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
3080                                              TCGv_i64 val,          \
3081                                              TCGv addr)             \
3082 {                                                                   \
3083     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
3084 }
3085 
3086 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
3087 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3088 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3089 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3090 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
3091 
3092 #if defined(TARGET_PPC64)
3093 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
3094 #endif
3095 
3096 #define GEN_QEMU_STORE_TL(stop, op)                                     \
3097 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
3098                                   TCGv val,                             \
3099                                   TCGv addr)                            \
3100 {                                                                       \
3101     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
3102 }
3103 
3104 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
3105 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
3106 #endif
3107 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3108 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3109 
3110 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3111 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3112 
3113 #define GEN_QEMU_STORE_64(stop, op)                               \
3114 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
3115                                               TCGv_i64 val,       \
3116                                               TCGv addr)          \
3117 {                                                                 \
3118     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
3119 }
3120 
3121 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
3122 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3123 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3124 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
3125 
3126 #if defined(TARGET_PPC64)
3127 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
3128 #endif
3129 
3130 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
3131 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3132 {                                                                             \
3133     TCGv EA;                                                                  \
3134     chk(ctx);                                                                 \
3135     gen_set_access_type(ctx, ACCESS_INT);                                     \
3136     EA = tcg_temp_new();                                                      \
3137     gen_addr_reg_index(ctx, EA);                                              \
3138     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
3139 }
3140 
3141 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
3142     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3143 
3144 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
3145     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3146 
3147 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
3148 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3149 {                                                                             \
3150     TCGv EA;                                                                  \
3151     CHK_SV(ctx);                                                              \
3152     gen_set_access_type(ctx, ACCESS_INT);                                     \
3153     EA = tcg_temp_new();                                                      \
3154     gen_addr_reg_index(ctx, EA);                                              \
3155     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3156 }
3157 
3158 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3159 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3160 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3161 #if defined(TARGET_PPC64)
3162 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
3163 #endif
3164 
3165 #if defined(TARGET_PPC64)
3166 /* CI load/store variants */
3167 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3168 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3169 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3170 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3171 #endif
3172 
3173 /***                              Integer store                            ***/
3174 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
3175 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3176 {                                                                             \
3177     TCGv EA;                                                                  \
3178     chk(ctx);                                                                 \
3179     gen_set_access_type(ctx, ACCESS_INT);                                     \
3180     EA = tcg_temp_new();                                                      \
3181     gen_addr_reg_index(ctx, EA);                                              \
3182     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
3183 }
3184 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
3185     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3186 
3187 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
3188     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3189 
3190 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
3191 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3192 {                                                                             \
3193     TCGv EA;                                                                  \
3194     CHK_SV(ctx);                                                              \
3195     gen_set_access_type(ctx, ACCESS_INT);                                     \
3196     EA = tcg_temp_new();                                                      \
3197     gen_addr_reg_index(ctx, EA);                                              \
3198     tcg_gen_qemu_st_tl(                                                       \
3199         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
3200 }
3201 
3202 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3203 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3204 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3205 #if defined(TARGET_PPC64)
3206 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
3207 #endif
3208 
3209 #if defined(TARGET_PPC64)
3210 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3211 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3212 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3213 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3214 #endif
3215 /***                Integer load and store with byte reverse               ***/
3216 
3217 /* lhbrx */
3218 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3219 
3220 /* lwbrx */
3221 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3222 
3223 #if defined(TARGET_PPC64)
3224 /* ldbrx */
3225 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3226 /* stdbrx */
3227 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3228 #endif  /* TARGET_PPC64 */
3229 
3230 /* sthbrx */
3231 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3232 /* stwbrx */
3233 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3234 
3235 /***                    Integer load and store multiple                    ***/
3236 
3237 /* lmw */
3238 static void gen_lmw(DisasContext *ctx)
3239 {
3240     TCGv t0;
3241     TCGv_i32 t1;
3242 
3243     if (ctx->le_mode) {
3244         gen_align_no_le(ctx);
3245         return;
3246     }
3247     gen_set_access_type(ctx, ACCESS_INT);
3248     t0 = tcg_temp_new();
3249     t1 = tcg_constant_i32(rD(ctx->opcode));
3250     gen_addr_imm_index(ctx, t0, 0);
3251     gen_helper_lmw(cpu_env, t0, t1);
3252 }
3253 
3254 /* stmw */
3255 static void gen_stmw(DisasContext *ctx)
3256 {
3257     TCGv t0;
3258     TCGv_i32 t1;
3259 
3260     if (ctx->le_mode) {
3261         gen_align_no_le(ctx);
3262         return;
3263     }
3264     gen_set_access_type(ctx, ACCESS_INT);
3265     t0 = tcg_temp_new();
3266     t1 = tcg_constant_i32(rS(ctx->opcode));
3267     gen_addr_imm_index(ctx, t0, 0);
3268     gen_helper_stmw(cpu_env, t0, t1);
3269 }
3270 
3271 /***                    Integer load and store strings                     ***/
3272 
3273 /* lswi */
3274 /*
3275  * PowerPC32 specification says we must generate an exception if rA is
3276  * in the range of registers to be loaded.  In an other hand, IBM says
3277  * this is valid, but rA won't be loaded.  For now, I'll follow the
3278  * spec...
3279  */
3280 static void gen_lswi(DisasContext *ctx)
3281 {
3282     TCGv t0;
3283     TCGv_i32 t1, t2;
3284     int nb = NB(ctx->opcode);
3285     int start = rD(ctx->opcode);
3286     int ra = rA(ctx->opcode);
3287     int nr;
3288 
3289     if (ctx->le_mode) {
3290         gen_align_no_le(ctx);
3291         return;
3292     }
3293     if (nb == 0) {
3294         nb = 32;
3295     }
3296     nr = DIV_ROUND_UP(nb, 4);
3297     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3298         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3299         return;
3300     }
3301     gen_set_access_type(ctx, ACCESS_INT);
3302     t0 = tcg_temp_new();
3303     gen_addr_register(ctx, t0);
3304     t1 = tcg_constant_i32(nb);
3305     t2 = tcg_constant_i32(start);
3306     gen_helper_lsw(cpu_env, t0, t1, t2);
3307 }
3308 
3309 /* lswx */
3310 static void gen_lswx(DisasContext *ctx)
3311 {
3312     TCGv t0;
3313     TCGv_i32 t1, t2, t3;
3314 
3315     if (ctx->le_mode) {
3316         gen_align_no_le(ctx);
3317         return;
3318     }
3319     gen_set_access_type(ctx, ACCESS_INT);
3320     t0 = tcg_temp_new();
3321     gen_addr_reg_index(ctx, t0);
3322     t1 = tcg_constant_i32(rD(ctx->opcode));
3323     t2 = tcg_constant_i32(rA(ctx->opcode));
3324     t3 = tcg_constant_i32(rB(ctx->opcode));
3325     gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3326 }
3327 
3328 /* stswi */
3329 static void gen_stswi(DisasContext *ctx)
3330 {
3331     TCGv t0;
3332     TCGv_i32 t1, t2;
3333     int nb = NB(ctx->opcode);
3334 
3335     if (ctx->le_mode) {
3336         gen_align_no_le(ctx);
3337         return;
3338     }
3339     gen_set_access_type(ctx, ACCESS_INT);
3340     t0 = tcg_temp_new();
3341     gen_addr_register(ctx, t0);
3342     if (nb == 0) {
3343         nb = 32;
3344     }
3345     t1 = tcg_constant_i32(nb);
3346     t2 = tcg_constant_i32(rS(ctx->opcode));
3347     gen_helper_stsw(cpu_env, t0, t1, t2);
3348 }
3349 
3350 /* stswx */
3351 static void gen_stswx(DisasContext *ctx)
3352 {
3353     TCGv t0;
3354     TCGv_i32 t1, t2;
3355 
3356     if (ctx->le_mode) {
3357         gen_align_no_le(ctx);
3358         return;
3359     }
3360     gen_set_access_type(ctx, ACCESS_INT);
3361     t0 = tcg_temp_new();
3362     gen_addr_reg_index(ctx, t0);
3363     t1 = tcg_temp_new_i32();
3364     tcg_gen_trunc_tl_i32(t1, cpu_xer);
3365     tcg_gen_andi_i32(t1, t1, 0x7F);
3366     t2 = tcg_constant_i32(rS(ctx->opcode));
3367     gen_helper_stsw(cpu_env, t0, t1, t2);
3368 }
3369 
3370 /***                        Memory synchronisation                         ***/
3371 /* eieio */
3372 static void gen_eieio(DisasContext *ctx)
3373 {
3374     TCGBar bar = TCG_MO_ALL;
3375 
3376     /*
3377      * eieio has complex semanitcs. It provides memory ordering between
3378      * operations in the set:
3379      * - loads from CI memory.
3380      * - stores to CI memory.
3381      * - stores to WT memory.
3382      *
3383      * It separately also orders memory for operations in the set:
3384      * - stores to cacheble memory.
3385      *
3386      * It also serializes instructions:
3387      * - dcbt and dcbst.
3388      *
3389      * It separately serializes:
3390      * - tlbie and tlbsync.
3391      *
3392      * And separately serializes:
3393      * - slbieg, slbiag, and slbsync.
3394      *
3395      * The end result is that CI memory ordering requires TCG_MO_ALL
3396      * and it is not possible to special-case more relaxed ordering for
3397      * cacheable accesses. TCG_BAR_SC is required to provide this
3398      * serialization.
3399      */
3400 
3401     /*
3402      * POWER9 has a eieio instruction variant using bit 6 as a hint to
3403      * tell the CPU it is a store-forwarding barrier.
3404      */
3405     if (ctx->opcode & 0x2000000) {
3406         /*
3407          * ISA says that "Reserved fields in instructions are ignored
3408          * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3409          * as this is not an instruction software should be using,
3410          * complain to the user.
3411          */
3412         if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3413             qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3414                           TARGET_FMT_lx "\n", ctx->cia);
3415         } else {
3416             bar = TCG_MO_ST_LD;
3417         }
3418     }
3419 
3420     tcg_gen_mb(bar | TCG_BAR_SC);
3421 }
3422 
3423 #if !defined(CONFIG_USER_ONLY)
3424 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3425 {
3426     TCGv_i32 t;
3427     TCGLabel *l;
3428 
3429     if (!ctx->lazy_tlb_flush) {
3430         return;
3431     }
3432     l = gen_new_label();
3433     t = tcg_temp_new_i32();
3434     tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3435     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3436     if (global) {
3437         gen_helper_check_tlb_flush_global(cpu_env);
3438     } else {
3439         gen_helper_check_tlb_flush_local(cpu_env);
3440     }
3441     gen_set_label(l);
3442 }
3443 #else
3444 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3445 #endif
3446 
3447 /* isync */
3448 static void gen_isync(DisasContext *ctx)
3449 {
3450     /*
3451      * We need to check for a pending TLB flush. This can only happen in
3452      * kernel mode however so check MSR_PR
3453      */
3454     if (!ctx->pr) {
3455         gen_check_tlb_flush(ctx, false);
3456     }
3457     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3458     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3459 }
3460 
3461 #define MEMOP_GET_SIZE(x)  (1 << ((x) & MO_SIZE))
3462 
3463 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3464 {
3465     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3466     TCGv t0 = tcg_temp_new();
3467 
3468     gen_set_access_type(ctx, ACCESS_RES);
3469     gen_addr_reg_index(ctx, t0);
3470     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3471     tcg_gen_mov_tl(cpu_reserve, t0);
3472     tcg_gen_mov_tl(cpu_reserve_val, gpr);
3473     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3474 }
3475 
3476 #define LARX(name, memop)                  \
3477 static void gen_##name(DisasContext *ctx)  \
3478 {                                          \
3479     gen_load_locked(ctx, memop);           \
3480 }
3481 
3482 /* lwarx */
3483 LARX(lbarx, DEF_MEMOP(MO_UB))
3484 LARX(lharx, DEF_MEMOP(MO_UW))
3485 LARX(lwarx, DEF_MEMOP(MO_UL))
3486 
3487 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
3488                                       TCGv EA, TCGCond cond, int addend)
3489 {
3490     TCGv t = tcg_temp_new();
3491     TCGv t2 = tcg_temp_new();
3492     TCGv u = tcg_temp_new();
3493 
3494     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3495     tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
3496     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3497     tcg_gen_addi_tl(u, t, addend);
3498 
3499     /* E.g. for fetch and increment bounded... */
3500     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3501     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3502     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3503 
3504     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3505     tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
3506     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
3507 }
3508 
3509 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3510 {
3511     uint32_t gpr_FC = FC(ctx->opcode);
3512     TCGv EA = tcg_temp_new();
3513     int rt = rD(ctx->opcode);
3514     bool need_serial;
3515     TCGv src, dst;
3516 
3517     gen_addr_register(ctx, EA);
3518     dst = cpu_gpr[rt];
3519     src = cpu_gpr[(rt + 1) & 31];
3520 
3521     need_serial = false;
3522     memop |= MO_ALIGN;
3523     switch (gpr_FC) {
3524     case 0: /* Fetch and add */
3525         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3526         break;
3527     case 1: /* Fetch and xor */
3528         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3529         break;
3530     case 2: /* Fetch and or */
3531         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3532         break;
3533     case 3: /* Fetch and 'and' */
3534         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3535         break;
3536     case 4:  /* Fetch and max unsigned */
3537         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3538         break;
3539     case 5:  /* Fetch and max signed */
3540         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3541         break;
3542     case 6:  /* Fetch and min unsigned */
3543         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3544         break;
3545     case 7:  /* Fetch and min signed */
3546         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3547         break;
3548     case 8: /* Swap */
3549         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3550         break;
3551 
3552     case 16: /* Compare and swap not equal */
3553         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3554             need_serial = true;
3555         } else {
3556             TCGv t0 = tcg_temp_new();
3557             TCGv t1 = tcg_temp_new();
3558 
3559             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3560             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3561                 tcg_gen_mov_tl(t1, src);
3562             } else {
3563                 tcg_gen_ext32u_tl(t1, src);
3564             }
3565             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3566                                cpu_gpr[(rt + 2) & 31], t0);
3567             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3568             tcg_gen_mov_tl(dst, t0);
3569         }
3570         break;
3571 
3572     case 24: /* Fetch and increment bounded */
3573         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3574             need_serial = true;
3575         } else {
3576             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3577         }
3578         break;
3579     case 25: /* Fetch and increment equal */
3580         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3581             need_serial = true;
3582         } else {
3583             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3584         }
3585         break;
3586     case 28: /* Fetch and decrement bounded */
3587         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3588             need_serial = true;
3589         } else {
3590             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3591         }
3592         break;
3593 
3594     default:
3595         /* invoke data storage error handler */
3596         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3597     }
3598 
3599     if (need_serial) {
3600         /* Restart with exclusive lock.  */
3601         gen_helper_exit_atomic(cpu_env);
3602         ctx->base.is_jmp = DISAS_NORETURN;
3603     }
3604 }
3605 
3606 static void gen_lwat(DisasContext *ctx)
3607 {
3608     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3609 }
3610 
3611 #ifdef TARGET_PPC64
3612 static void gen_ldat(DisasContext *ctx)
3613 {
3614     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3615 }
3616 #endif
3617 
3618 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3619 {
3620     uint32_t gpr_FC = FC(ctx->opcode);
3621     TCGv EA = tcg_temp_new();
3622     TCGv src, discard;
3623 
3624     gen_addr_register(ctx, EA);
3625     src = cpu_gpr[rD(ctx->opcode)];
3626     discard = tcg_temp_new();
3627 
3628     memop |= MO_ALIGN;
3629     switch (gpr_FC) {
3630     case 0: /* add and Store */
3631         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3632         break;
3633     case 1: /* xor and Store */
3634         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3635         break;
3636     case 2: /* Or and Store */
3637         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3638         break;
3639     case 3: /* 'and' and Store */
3640         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3641         break;
3642     case 4:  /* Store max unsigned */
3643         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3644         break;
3645     case 5:  /* Store max signed */
3646         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3647         break;
3648     case 6:  /* Store min unsigned */
3649         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3650         break;
3651     case 7:  /* Store min signed */
3652         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3653         break;
3654     case 24: /* Store twin  */
3655         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3656             /* Restart with exclusive lock.  */
3657             gen_helper_exit_atomic(cpu_env);
3658             ctx->base.is_jmp = DISAS_NORETURN;
3659         } else {
3660             TCGv t = tcg_temp_new();
3661             TCGv t2 = tcg_temp_new();
3662             TCGv s = tcg_temp_new();
3663             TCGv s2 = tcg_temp_new();
3664             TCGv ea_plus_s = tcg_temp_new();
3665 
3666             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3667             tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
3668             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3669             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3670             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3671             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3672             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3673         }
3674         break;
3675     default:
3676         /* invoke data storage error handler */
3677         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3678     }
3679 }
3680 
3681 static void gen_stwat(DisasContext *ctx)
3682 {
3683     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3684 }
3685 
3686 #ifdef TARGET_PPC64
3687 static void gen_stdat(DisasContext *ctx)
3688 {
3689     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3690 }
3691 #endif
3692 
3693 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3694 {
3695     TCGLabel *l1 = gen_new_label();
3696     TCGLabel *l2 = gen_new_label();
3697     TCGv t0 = tcg_temp_new();
3698     int reg = rS(ctx->opcode);
3699 
3700     gen_set_access_type(ctx, ACCESS_RES);
3701     gen_addr_reg_index(ctx, t0);
3702     tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3703 
3704     t0 = tcg_temp_new();
3705     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3706                               cpu_gpr[reg], ctx->mem_idx,
3707                               DEF_MEMOP(memop) | MO_ALIGN);
3708     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3709     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3710     tcg_gen_or_tl(t0, t0, cpu_so);
3711     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3712     tcg_gen_br(l2);
3713 
3714     gen_set_label(l1);
3715 
3716     /*
3717      * Address mismatch implies failure.  But we still need to provide
3718      * the memory barrier semantics of the instruction.
3719      */
3720     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3721     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3722 
3723     gen_set_label(l2);
3724     tcg_gen_movi_tl(cpu_reserve, -1);
3725 }
3726 
3727 #define STCX(name, memop)                  \
3728 static void gen_##name(DisasContext *ctx)  \
3729 {                                          \
3730     gen_conditional_store(ctx, memop);     \
3731 }
3732 
3733 STCX(stbcx_, DEF_MEMOP(MO_UB))
3734 STCX(sthcx_, DEF_MEMOP(MO_UW))
3735 STCX(stwcx_, DEF_MEMOP(MO_UL))
3736 
3737 #if defined(TARGET_PPC64)
3738 /* ldarx */
3739 LARX(ldarx, DEF_MEMOP(MO_UQ))
3740 /* stdcx. */
3741 STCX(stdcx_, DEF_MEMOP(MO_UQ))
3742 
3743 /* lqarx */
3744 static void gen_lqarx(DisasContext *ctx)
3745 {
3746     int rd = rD(ctx->opcode);
3747     TCGv EA, hi, lo;
3748     TCGv_i128 t16;
3749 
3750     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3751                  (rd == rB(ctx->opcode)))) {
3752         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3753         return;
3754     }
3755 
3756     gen_set_access_type(ctx, ACCESS_RES);
3757     EA = tcg_temp_new();
3758     gen_addr_reg_index(ctx, EA);
3759 
3760     /* Note that the low part is always in RD+1, even in LE mode.  */
3761     lo = cpu_gpr[rd + 1];
3762     hi = cpu_gpr[rd];
3763 
3764     t16 = tcg_temp_new_i128();
3765     tcg_gen_qemu_ld_i128(t16, EA, ctx->mem_idx, DEF_MEMOP(MO_128 | MO_ALIGN));
3766     tcg_gen_extr_i128_i64(lo, hi, t16);
3767 
3768     tcg_gen_mov_tl(cpu_reserve, EA);
3769     tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
3770     tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
3771 }
3772 
3773 /* stqcx. */
3774 static void gen_stqcx_(DisasContext *ctx)
3775 {
3776     TCGLabel *lab_fail, *lab_over;
3777     int rs = rS(ctx->opcode);
3778     TCGv EA, t0, t1;
3779     TCGv_i128 cmp, val;
3780 
3781     if (unlikely(rs & 1)) {
3782         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3783         return;
3784     }
3785 
3786     lab_fail = gen_new_label();
3787     lab_over = gen_new_label();
3788 
3789     gen_set_access_type(ctx, ACCESS_RES);
3790     EA = tcg_temp_new();
3791     gen_addr_reg_index(ctx, EA);
3792 
3793     tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
3794 
3795     cmp = tcg_temp_new_i128();
3796     val = tcg_temp_new_i128();
3797 
3798     tcg_gen_concat_i64_i128(cmp, cpu_reserve_val2, cpu_reserve_val);
3799 
3800     /* Note that the low part is always in RS+1, even in LE mode.  */
3801     tcg_gen_concat_i64_i128(val, cpu_gpr[rs + 1], cpu_gpr[rs]);
3802 
3803     tcg_gen_atomic_cmpxchg_i128(val, cpu_reserve, cmp, val, ctx->mem_idx,
3804                                 DEF_MEMOP(MO_128 | MO_ALIGN));
3805 
3806     t0 = tcg_temp_new();
3807     t1 = tcg_temp_new();
3808     tcg_gen_extr_i128_i64(t1, t0, val);
3809 
3810     tcg_gen_xor_tl(t1, t1, cpu_reserve_val2);
3811     tcg_gen_xor_tl(t0, t0, cpu_reserve_val);
3812     tcg_gen_or_tl(t0, t0, t1);
3813 
3814     tcg_gen_setcondi_tl(TCG_COND_EQ, t0, t0, 0);
3815     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3816     tcg_gen_or_tl(t0, t0, cpu_so);
3817     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3818 
3819     tcg_gen_br(lab_over);
3820     gen_set_label(lab_fail);
3821 
3822     /*
3823      * Address mismatch implies failure.  But we still need to provide
3824      * the memory barrier semantics of the instruction.
3825      */
3826     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3827     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3828 
3829     gen_set_label(lab_over);
3830     tcg_gen_movi_tl(cpu_reserve, -1);
3831 }
3832 #endif /* defined(TARGET_PPC64) */
3833 
3834 /* sync */
3835 static void gen_sync(DisasContext *ctx)
3836 {
3837     TCGBar bar = TCG_MO_ALL;
3838     uint32_t l = (ctx->opcode >> 21) & 3;
3839 
3840     if ((l == 1) && (ctx->insns_flags2 & PPC2_MEM_LWSYNC)) {
3841         bar = TCG_MO_LD_LD | TCG_MO_LD_ST | TCG_MO_ST_ST;
3842     }
3843 
3844     /*
3845      * We may need to check for a pending TLB flush.
3846      *
3847      * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
3848      *
3849      * Additionally, this can only happen in kernel mode however so
3850      * check MSR_PR as well.
3851      */
3852     if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
3853         gen_check_tlb_flush(ctx, true);
3854     }
3855 
3856     tcg_gen_mb(bar | TCG_BAR_SC);
3857 }
3858 
3859 /* wait */
3860 static void gen_wait(DisasContext *ctx)
3861 {
3862     uint32_t wc;
3863 
3864     if (ctx->insns_flags & PPC_WAIT) {
3865         /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
3866 
3867         if (ctx->insns_flags2 & PPC2_PM_ISA206) {
3868             /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
3869             wc = WC(ctx->opcode);
3870         } else {
3871             wc = 0;
3872         }
3873 
3874     } else if (ctx->insns_flags2 & PPC2_ISA300) {
3875         /* v3.0 defines a new 'wait' encoding. */
3876         wc = WC(ctx->opcode);
3877         if (ctx->insns_flags2 & PPC2_ISA310) {
3878             uint32_t pl = PL(ctx->opcode);
3879 
3880             /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
3881             if (wc == 3) {
3882                 gen_invalid(ctx);
3883                 return;
3884             }
3885 
3886             /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
3887             if (pl > 0 && wc != 2) {
3888                 gen_invalid(ctx);
3889                 return;
3890             }
3891 
3892         } else { /* ISA300 */
3893             /* WC 1-3 are reserved */
3894             if (wc > 0) {
3895                 gen_invalid(ctx);
3896                 return;
3897             }
3898         }
3899 
3900     } else {
3901         warn_report("wait instruction decoded with wrong ISA flags.");
3902         gen_invalid(ctx);
3903         return;
3904     }
3905 
3906     /*
3907      * wait without WC field or with WC=0 waits for an exception / interrupt
3908      * to occur.
3909      */
3910     if (wc == 0) {
3911         TCGv_i32 t0 = tcg_constant_i32(1);
3912         tcg_gen_st_i32(t0, cpu_env,
3913                        -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3914         /* Stop translation, as the CPU is supposed to sleep from now */
3915         gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3916     }
3917 
3918     /*
3919      * Other wait types must not just wait until an exception occurs because
3920      * ignoring their other wake-up conditions could cause a hang.
3921      *
3922      * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
3923      * no-ops.
3924      *
3925      * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
3926      *
3927      * wc=2 waits for an implementation-specific condition, such could be
3928      * always true, so it can be implemented as a no-op.
3929      *
3930      * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
3931      *
3932      * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
3933      * Reservation-loss may have implementation-specific conditions, so it
3934      * can be implemented as a no-op.
3935      *
3936      * wc=2 waits for an exception or an amount of time to pass. This
3937      * amount is implementation-specific so it can be implemented as a
3938      * no-op.
3939      *
3940      * ISA v3.1 allows for execution to resume "in the rare case of
3941      * an implementation-dependent event", so in any case software must
3942      * not depend on the architected resumption condition to become
3943      * true, so no-op implementations should be architecturally correct
3944      * (if suboptimal).
3945      */
3946 }
3947 
3948 #if defined(TARGET_PPC64)
3949 static void gen_doze(DisasContext *ctx)
3950 {
3951 #if defined(CONFIG_USER_ONLY)
3952     GEN_PRIV(ctx);
3953 #else
3954     TCGv_i32 t;
3955 
3956     CHK_HV(ctx);
3957     t = tcg_constant_i32(PPC_PM_DOZE);
3958     gen_helper_pminsn(cpu_env, t);
3959     /* Stop translation, as the CPU is supposed to sleep from now */
3960     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3961 #endif /* defined(CONFIG_USER_ONLY) */
3962 }
3963 
3964 static void gen_nap(DisasContext *ctx)
3965 {
3966 #if defined(CONFIG_USER_ONLY)
3967     GEN_PRIV(ctx);
3968 #else
3969     TCGv_i32 t;
3970 
3971     CHK_HV(ctx);
3972     t = tcg_constant_i32(PPC_PM_NAP);
3973     gen_helper_pminsn(cpu_env, t);
3974     /* Stop translation, as the CPU is supposed to sleep from now */
3975     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3976 #endif /* defined(CONFIG_USER_ONLY) */
3977 }
3978 
3979 static void gen_stop(DisasContext *ctx)
3980 {
3981 #if defined(CONFIG_USER_ONLY)
3982     GEN_PRIV(ctx);
3983 #else
3984     TCGv_i32 t;
3985 
3986     CHK_HV(ctx);
3987     t = tcg_constant_i32(PPC_PM_STOP);
3988     gen_helper_pminsn(cpu_env, t);
3989     /* Stop translation, as the CPU is supposed to sleep from now */
3990     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3991 #endif /* defined(CONFIG_USER_ONLY) */
3992 }
3993 
3994 static void gen_sleep(DisasContext *ctx)
3995 {
3996 #if defined(CONFIG_USER_ONLY)
3997     GEN_PRIV(ctx);
3998 #else
3999     TCGv_i32 t;
4000 
4001     CHK_HV(ctx);
4002     t = tcg_constant_i32(PPC_PM_SLEEP);
4003     gen_helper_pminsn(cpu_env, t);
4004     /* Stop translation, as the CPU is supposed to sleep from now */
4005     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4006 #endif /* defined(CONFIG_USER_ONLY) */
4007 }
4008 
4009 static void gen_rvwinkle(DisasContext *ctx)
4010 {
4011 #if defined(CONFIG_USER_ONLY)
4012     GEN_PRIV(ctx);
4013 #else
4014     TCGv_i32 t;
4015 
4016     CHK_HV(ctx);
4017     t = tcg_constant_i32(PPC_PM_RVWINKLE);
4018     gen_helper_pminsn(cpu_env, t);
4019     /* Stop translation, as the CPU is supposed to sleep from now */
4020     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4021 #endif /* defined(CONFIG_USER_ONLY) */
4022 }
4023 #endif /* #if defined(TARGET_PPC64) */
4024 
4025 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4026 {
4027 #if defined(TARGET_PPC64)
4028     if (ctx->has_cfar) {
4029         tcg_gen_movi_tl(cpu_cfar, nip);
4030     }
4031 #endif
4032 }
4033 
4034 #if defined(TARGET_PPC64)
4035 static void pmu_count_insns(DisasContext *ctx)
4036 {
4037     /*
4038      * Do not bother calling the helper if the PMU isn't counting
4039      * instructions.
4040      */
4041     if (!ctx->pmu_insn_cnt) {
4042         return;
4043     }
4044 
4045  #if !defined(CONFIG_USER_ONLY)
4046     TCGLabel *l;
4047     TCGv t0;
4048 
4049     /*
4050      * The PMU insns_inc() helper stops the internal PMU timer if a
4051      * counter overflows happens. In that case, if the guest is
4052      * running with icount and we do not handle it beforehand,
4053      * the helper can trigger a 'bad icount read'.
4054      */
4055     translator_io_start(&ctx->base);
4056 
4057     /* Avoid helper calls when only PMC5-6 are enabled. */
4058     if (!ctx->pmc_other) {
4059         l = gen_new_label();
4060         t0 = tcg_temp_new();
4061 
4062         gen_load_spr(t0, SPR_POWER_PMC5);
4063         tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4064         gen_store_spr(SPR_POWER_PMC5, t0);
4065         /* Check for overflow, if it's enabled */
4066         if (ctx->mmcr0_pmcjce) {
4067             tcg_gen_brcondi_tl(TCG_COND_LT, t0, PMC_COUNTER_NEGATIVE_VAL, l);
4068             gen_helper_handle_pmc5_overflow(cpu_env);
4069         }
4070 
4071         gen_set_label(l);
4072     } else {
4073         gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns));
4074     }
4075   #else
4076     /*
4077      * User mode can read (but not write) PMC5 and start/stop
4078      * the PMU via MMCR0_FC. In this case just increment
4079      * PMC5 with base.num_insns.
4080      */
4081     TCGv t0 = tcg_temp_new();
4082 
4083     gen_load_spr(t0, SPR_POWER_PMC5);
4084     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4085     gen_store_spr(SPR_POWER_PMC5, t0);
4086   #endif /* #if !defined(CONFIG_USER_ONLY) */
4087 }
4088 #else
4089 static void pmu_count_insns(DisasContext *ctx)
4090 {
4091     return;
4092 }
4093 #endif /* #if defined(TARGET_PPC64) */
4094 
4095 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4096 {
4097     return translator_use_goto_tb(&ctx->base, dest);
4098 }
4099 
4100 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4101 {
4102     if (unlikely(ctx->singlestep_enabled)) {
4103         gen_debug_exception(ctx);
4104     } else {
4105         /*
4106          * tcg_gen_lookup_and_goto_ptr will exit the TB if
4107          * CF_NO_GOTO_PTR is set. Count insns now.
4108          */
4109         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
4110             pmu_count_insns(ctx);
4111         }
4112 
4113         tcg_gen_lookup_and_goto_ptr();
4114     }
4115 }
4116 
4117 /***                                Branch                                 ***/
4118 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4119 {
4120     if (NARROW_MODE(ctx)) {
4121         dest = (uint32_t) dest;
4122     }
4123     if (use_goto_tb(ctx, dest)) {
4124         pmu_count_insns(ctx);
4125         tcg_gen_goto_tb(n);
4126         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4127         tcg_gen_exit_tb(ctx->base.tb, n);
4128     } else {
4129         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4130         gen_lookup_and_goto_ptr(ctx);
4131     }
4132 }
4133 
4134 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4135 {
4136     if (NARROW_MODE(ctx)) {
4137         nip = (uint32_t)nip;
4138     }
4139     tcg_gen_movi_tl(cpu_lr, nip);
4140 }
4141 
4142 /* b ba bl bla */
4143 static void gen_b(DisasContext *ctx)
4144 {
4145     target_ulong li, target;
4146 
4147     /* sign extend LI */
4148     li = LI(ctx->opcode);
4149     li = (li ^ 0x02000000) - 0x02000000;
4150     if (likely(AA(ctx->opcode) == 0)) {
4151         target = ctx->cia + li;
4152     } else {
4153         target = li;
4154     }
4155     if (LK(ctx->opcode)) {
4156         gen_setlr(ctx, ctx->base.pc_next);
4157     }
4158     gen_update_cfar(ctx, ctx->cia);
4159     gen_goto_tb(ctx, 0, target);
4160     ctx->base.is_jmp = DISAS_NORETURN;
4161 }
4162 
4163 #define BCOND_IM  0
4164 #define BCOND_LR  1
4165 #define BCOND_CTR 2
4166 #define BCOND_TAR 3
4167 
4168 static void gen_bcond(DisasContext *ctx, int type)
4169 {
4170     uint32_t bo = BO(ctx->opcode);
4171     TCGLabel *l1;
4172     TCGv target;
4173 
4174     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4175         target = tcg_temp_new();
4176         if (type == BCOND_CTR) {
4177             tcg_gen_mov_tl(target, cpu_ctr);
4178         } else if (type == BCOND_TAR) {
4179             gen_load_spr(target, SPR_TAR);
4180         } else {
4181             tcg_gen_mov_tl(target, cpu_lr);
4182         }
4183     } else {
4184         target = NULL;
4185     }
4186     if (LK(ctx->opcode)) {
4187         gen_setlr(ctx, ctx->base.pc_next);
4188     }
4189     l1 = gen_new_label();
4190     if ((bo & 0x4) == 0) {
4191         /* Decrement and test CTR */
4192         TCGv temp = tcg_temp_new();
4193 
4194         if (type == BCOND_CTR) {
4195             /*
4196              * All ISAs up to v3 describe this form of bcctr as invalid but
4197              * some processors, ie. 64-bit server processors compliant with
4198              * arch 2.x, do implement a "test and decrement" logic instead,
4199              * as described in their respective UMs. This logic involves CTR
4200              * to act as both the branch target and a counter, which makes
4201              * it basically useless and thus never used in real code.
4202              *
4203              * This form was hence chosen to trigger extra micro-architectural
4204              * side-effect on real HW needed for the Spectre v2 workaround.
4205              * It is up to guests that implement such workaround, ie. linux, to
4206              * use this form in a way it just triggers the side-effect without
4207              * doing anything else harmful.
4208              */
4209             if (unlikely(!is_book3s_arch2x(ctx))) {
4210                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4211                 return;
4212             }
4213 
4214             if (NARROW_MODE(ctx)) {
4215                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4216             } else {
4217                 tcg_gen_mov_tl(temp, cpu_ctr);
4218             }
4219             if (bo & 0x2) {
4220                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4221             } else {
4222                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4223             }
4224             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4225         } else {
4226             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4227             if (NARROW_MODE(ctx)) {
4228                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4229             } else {
4230                 tcg_gen_mov_tl(temp, cpu_ctr);
4231             }
4232             if (bo & 0x2) {
4233                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4234             } else {
4235                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4236             }
4237         }
4238     }
4239     if ((bo & 0x10) == 0) {
4240         /* Test CR */
4241         uint32_t bi = BI(ctx->opcode);
4242         uint32_t mask = 0x08 >> (bi & 0x03);
4243         TCGv_i32 temp = tcg_temp_new_i32();
4244 
4245         if (bo & 0x8) {
4246             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4247             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4248         } else {
4249             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4250             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4251         }
4252     }
4253     gen_update_cfar(ctx, ctx->cia);
4254     if (type == BCOND_IM) {
4255         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4256         if (likely(AA(ctx->opcode) == 0)) {
4257             gen_goto_tb(ctx, 0, ctx->cia + li);
4258         } else {
4259             gen_goto_tb(ctx, 0, li);
4260         }
4261     } else {
4262         if (NARROW_MODE(ctx)) {
4263             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4264         } else {
4265             tcg_gen_andi_tl(cpu_nip, target, ~3);
4266         }
4267         gen_lookup_and_goto_ptr(ctx);
4268     }
4269     if ((bo & 0x14) != 0x14) {
4270         /* fallthrough case */
4271         gen_set_label(l1);
4272         gen_goto_tb(ctx, 1, ctx->base.pc_next);
4273     }
4274     ctx->base.is_jmp = DISAS_NORETURN;
4275 }
4276 
4277 static void gen_bc(DisasContext *ctx)
4278 {
4279     gen_bcond(ctx, BCOND_IM);
4280 }
4281 
4282 static void gen_bcctr(DisasContext *ctx)
4283 {
4284     gen_bcond(ctx, BCOND_CTR);
4285 }
4286 
4287 static void gen_bclr(DisasContext *ctx)
4288 {
4289     gen_bcond(ctx, BCOND_LR);
4290 }
4291 
4292 static void gen_bctar(DisasContext *ctx)
4293 {
4294     gen_bcond(ctx, BCOND_TAR);
4295 }
4296 
4297 /***                      Condition register logical                       ***/
4298 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
4299 static void glue(gen_, name)(DisasContext *ctx)                               \
4300 {                                                                             \
4301     uint8_t bitmask;                                                          \
4302     int sh;                                                                   \
4303     TCGv_i32 t0, t1;                                                          \
4304     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
4305     t0 = tcg_temp_new_i32();                                                  \
4306     if (sh > 0)                                                               \
4307         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
4308     else if (sh < 0)                                                          \
4309         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
4310     else                                                                      \
4311         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
4312     t1 = tcg_temp_new_i32();                                                  \
4313     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
4314     if (sh > 0)                                                               \
4315         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
4316     else if (sh < 0)                                                          \
4317         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
4318     else                                                                      \
4319         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
4320     tcg_op(t0, t0, t1);                                                       \
4321     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
4322     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
4323     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
4324     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
4325 }
4326 
4327 /* crand */
4328 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4329 /* crandc */
4330 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4331 /* creqv */
4332 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4333 /* crnand */
4334 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4335 /* crnor */
4336 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4337 /* cror */
4338 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4339 /* crorc */
4340 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4341 /* crxor */
4342 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4343 
4344 /* mcrf */
4345 static void gen_mcrf(DisasContext *ctx)
4346 {
4347     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4348 }
4349 
4350 /***                           System linkage                              ***/
4351 
4352 /* rfi (supervisor only) */
4353 static void gen_rfi(DisasContext *ctx)
4354 {
4355 #if defined(CONFIG_USER_ONLY)
4356     GEN_PRIV(ctx);
4357 #else
4358     /*
4359      * This instruction doesn't exist anymore on 64-bit server
4360      * processors compliant with arch 2.x
4361      */
4362     if (is_book3s_arch2x(ctx)) {
4363         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4364         return;
4365     }
4366     /* Restore CPU state */
4367     CHK_SV(ctx);
4368     translator_io_start(&ctx->base);
4369     gen_update_cfar(ctx, ctx->cia);
4370     gen_helper_rfi(cpu_env);
4371     ctx->base.is_jmp = DISAS_EXIT;
4372 #endif
4373 }
4374 
4375 #if defined(TARGET_PPC64)
4376 static void gen_rfid(DisasContext *ctx)
4377 {
4378 #if defined(CONFIG_USER_ONLY)
4379     GEN_PRIV(ctx);
4380 #else
4381     /* Restore CPU state */
4382     CHK_SV(ctx);
4383     translator_io_start(&ctx->base);
4384     gen_update_cfar(ctx, ctx->cia);
4385     gen_helper_rfid(cpu_env);
4386     ctx->base.is_jmp = DISAS_EXIT;
4387 #endif
4388 }
4389 
4390 #if !defined(CONFIG_USER_ONLY)
4391 static void gen_rfscv(DisasContext *ctx)
4392 {
4393 #if defined(CONFIG_USER_ONLY)
4394     GEN_PRIV(ctx);
4395 #else
4396     /* Restore CPU state */
4397     CHK_SV(ctx);
4398     translator_io_start(&ctx->base);
4399     gen_update_cfar(ctx, ctx->cia);
4400     gen_helper_rfscv(cpu_env);
4401     ctx->base.is_jmp = DISAS_EXIT;
4402 #endif
4403 }
4404 #endif
4405 
4406 static void gen_hrfid(DisasContext *ctx)
4407 {
4408 #if defined(CONFIG_USER_ONLY)
4409     GEN_PRIV(ctx);
4410 #else
4411     /* Restore CPU state */
4412     CHK_HV(ctx);
4413     gen_helper_hrfid(cpu_env);
4414     ctx->base.is_jmp = DISAS_EXIT;
4415 #endif
4416 }
4417 #endif
4418 
4419 /* sc */
4420 #if defined(CONFIG_USER_ONLY)
4421 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4422 #else
4423 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4424 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4425 #endif
4426 static void gen_sc(DisasContext *ctx)
4427 {
4428     uint32_t lev;
4429 
4430     lev = (ctx->opcode >> 5) & 0x7F;
4431     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4432 }
4433 
4434 #if defined(TARGET_PPC64)
4435 #if !defined(CONFIG_USER_ONLY)
4436 static void gen_scv(DisasContext *ctx)
4437 {
4438     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4439 
4440     /* Set the PC back to the faulting instruction. */
4441     gen_update_nip(ctx, ctx->cia);
4442     gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4443 
4444     ctx->base.is_jmp = DISAS_NORETURN;
4445 }
4446 #endif
4447 #endif
4448 
4449 /***                                Trap                                   ***/
4450 
4451 /* Check for unconditional traps (always or never) */
4452 static bool check_unconditional_trap(DisasContext *ctx)
4453 {
4454     /* Trap never */
4455     if (TO(ctx->opcode) == 0) {
4456         return true;
4457     }
4458     /* Trap always */
4459     if (TO(ctx->opcode) == 31) {
4460         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4461         return true;
4462     }
4463     return false;
4464 }
4465 
4466 /* tw */
4467 static void gen_tw(DisasContext *ctx)
4468 {
4469     TCGv_i32 t0;
4470 
4471     if (check_unconditional_trap(ctx)) {
4472         return;
4473     }
4474     t0 = tcg_constant_i32(TO(ctx->opcode));
4475     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4476                   t0);
4477 }
4478 
4479 /* twi */
4480 static void gen_twi(DisasContext *ctx)
4481 {
4482     TCGv t0;
4483     TCGv_i32 t1;
4484 
4485     if (check_unconditional_trap(ctx)) {
4486         return;
4487     }
4488     t0 = tcg_constant_tl(SIMM(ctx->opcode));
4489     t1 = tcg_constant_i32(TO(ctx->opcode));
4490     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4491 }
4492 
4493 #if defined(TARGET_PPC64)
4494 /* td */
4495 static void gen_td(DisasContext *ctx)
4496 {
4497     TCGv_i32 t0;
4498 
4499     if (check_unconditional_trap(ctx)) {
4500         return;
4501     }
4502     t0 = tcg_constant_i32(TO(ctx->opcode));
4503     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4504                   t0);
4505 }
4506 
4507 /* tdi */
4508 static void gen_tdi(DisasContext *ctx)
4509 {
4510     TCGv t0;
4511     TCGv_i32 t1;
4512 
4513     if (check_unconditional_trap(ctx)) {
4514         return;
4515     }
4516     t0 = tcg_constant_tl(SIMM(ctx->opcode));
4517     t1 = tcg_constant_i32(TO(ctx->opcode));
4518     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4519 }
4520 #endif
4521 
4522 /***                          Processor control                            ***/
4523 
4524 /* mcrxr */
4525 static void gen_mcrxr(DisasContext *ctx)
4526 {
4527     TCGv_i32 t0 = tcg_temp_new_i32();
4528     TCGv_i32 t1 = tcg_temp_new_i32();
4529     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4530 
4531     tcg_gen_trunc_tl_i32(t0, cpu_so);
4532     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4533     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4534     tcg_gen_shli_i32(t0, t0, 3);
4535     tcg_gen_shli_i32(t1, t1, 2);
4536     tcg_gen_shli_i32(dst, dst, 1);
4537     tcg_gen_or_i32(dst, dst, t0);
4538     tcg_gen_or_i32(dst, dst, t1);
4539 
4540     tcg_gen_movi_tl(cpu_so, 0);
4541     tcg_gen_movi_tl(cpu_ov, 0);
4542     tcg_gen_movi_tl(cpu_ca, 0);
4543 }
4544 
4545 #ifdef TARGET_PPC64
4546 /* mcrxrx */
4547 static void gen_mcrxrx(DisasContext *ctx)
4548 {
4549     TCGv t0 = tcg_temp_new();
4550     TCGv t1 = tcg_temp_new();
4551     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4552 
4553     /* copy OV and OV32 */
4554     tcg_gen_shli_tl(t0, cpu_ov, 1);
4555     tcg_gen_or_tl(t0, t0, cpu_ov32);
4556     tcg_gen_shli_tl(t0, t0, 2);
4557     /* copy CA and CA32 */
4558     tcg_gen_shli_tl(t1, cpu_ca, 1);
4559     tcg_gen_or_tl(t1, t1, cpu_ca32);
4560     tcg_gen_or_tl(t0, t0, t1);
4561     tcg_gen_trunc_tl_i32(dst, t0);
4562 }
4563 #endif
4564 
4565 /* mfcr mfocrf */
4566 static void gen_mfcr(DisasContext *ctx)
4567 {
4568     uint32_t crm, crn;
4569 
4570     if (likely(ctx->opcode & 0x00100000)) {
4571         crm = CRM(ctx->opcode);
4572         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4573             crn = ctz32(crm);
4574             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4575             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4576                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4577         }
4578     } else {
4579         TCGv_i32 t0 = tcg_temp_new_i32();
4580         tcg_gen_mov_i32(t0, cpu_crf[0]);
4581         tcg_gen_shli_i32(t0, t0, 4);
4582         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4583         tcg_gen_shli_i32(t0, t0, 4);
4584         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4585         tcg_gen_shli_i32(t0, t0, 4);
4586         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4587         tcg_gen_shli_i32(t0, t0, 4);
4588         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4589         tcg_gen_shli_i32(t0, t0, 4);
4590         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4591         tcg_gen_shli_i32(t0, t0, 4);
4592         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4593         tcg_gen_shli_i32(t0, t0, 4);
4594         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4595         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4596     }
4597 }
4598 
4599 /* mfmsr */
4600 static void gen_mfmsr(DisasContext *ctx)
4601 {
4602     CHK_SV(ctx);
4603     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4604 }
4605 
4606 /* mfspr */
4607 static inline void gen_op_mfspr(DisasContext *ctx)
4608 {
4609     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4610     uint32_t sprn = SPR(ctx->opcode);
4611 
4612 #if defined(CONFIG_USER_ONLY)
4613     read_cb = ctx->spr_cb[sprn].uea_read;
4614 #else
4615     if (ctx->pr) {
4616         read_cb = ctx->spr_cb[sprn].uea_read;
4617     } else if (ctx->hv) {
4618         read_cb = ctx->spr_cb[sprn].hea_read;
4619     } else {
4620         read_cb = ctx->spr_cb[sprn].oea_read;
4621     }
4622 #endif
4623     if (likely(read_cb != NULL)) {
4624         if (likely(read_cb != SPR_NOACCESS)) {
4625             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4626         } else {
4627             /* Privilege exception */
4628             /*
4629              * This is a hack to avoid warnings when running Linux:
4630              * this OS breaks the PowerPC virtualisation model,
4631              * allowing userland application to read the PVR
4632              */
4633             if (sprn != SPR_PVR) {
4634                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4635                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4636                               ctx->cia);
4637             }
4638             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4639         }
4640     } else {
4641         /* ISA 2.07 defines these as no-ops */
4642         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4643             (sprn >= 808 && sprn <= 811)) {
4644             /* This is a nop */
4645             return;
4646         }
4647         /* Not defined */
4648         qemu_log_mask(LOG_GUEST_ERROR,
4649                       "Trying to read invalid spr %d (0x%03x) at "
4650                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4651 
4652         /*
4653          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4654          * generate a priv, a hv emu or a no-op
4655          */
4656         if (sprn & 0x10) {
4657             if (ctx->pr) {
4658                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4659             }
4660         } else {
4661             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4662                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4663             }
4664         }
4665     }
4666 }
4667 
4668 static void gen_mfspr(DisasContext *ctx)
4669 {
4670     gen_op_mfspr(ctx);
4671 }
4672 
4673 /* mftb */
4674 static void gen_mftb(DisasContext *ctx)
4675 {
4676     gen_op_mfspr(ctx);
4677 }
4678 
4679 /* mtcrf mtocrf*/
4680 static void gen_mtcrf(DisasContext *ctx)
4681 {
4682     uint32_t crm, crn;
4683 
4684     crm = CRM(ctx->opcode);
4685     if (likely((ctx->opcode & 0x00100000))) {
4686         if (crm && ((crm & (crm - 1)) == 0)) {
4687             TCGv_i32 temp = tcg_temp_new_i32();
4688             crn = ctz32(crm);
4689             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4690             tcg_gen_shri_i32(temp, temp, crn * 4);
4691             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4692         }
4693     } else {
4694         TCGv_i32 temp = tcg_temp_new_i32();
4695         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4696         for (crn = 0 ; crn < 8 ; crn++) {
4697             if (crm & (1 << crn)) {
4698                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4699                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4700             }
4701         }
4702     }
4703 }
4704 
4705 /* mtmsr */
4706 #if defined(TARGET_PPC64)
4707 static void gen_mtmsrd(DisasContext *ctx)
4708 {
4709     if (unlikely(!is_book3s_arch2x(ctx))) {
4710         gen_invalid(ctx);
4711         return;
4712     }
4713 
4714     CHK_SV(ctx);
4715 
4716 #if !defined(CONFIG_USER_ONLY)
4717     TCGv t0, t1;
4718     target_ulong mask;
4719 
4720     t0 = tcg_temp_new();
4721     t1 = tcg_temp_new();
4722 
4723     translator_io_start(&ctx->base);
4724 
4725     if (ctx->opcode & 0x00010000) {
4726         /* L=1 form only updates EE and RI */
4727         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4728     } else {
4729         /* mtmsrd does not alter HV, S, ME, or LE */
4730         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4731                  (1ULL << MSR_HV));
4732         /*
4733          * XXX: we need to update nip before the store if we enter
4734          *      power saving mode, we will exit the loop directly from
4735          *      ppc_store_msr
4736          */
4737         gen_update_nip(ctx, ctx->base.pc_next);
4738     }
4739 
4740     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4741     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4742     tcg_gen_or_tl(t0, t0, t1);
4743 
4744     gen_helper_store_msr(cpu_env, t0);
4745 
4746     /* Must stop the translation as machine state (may have) changed */
4747     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4748 #endif /* !defined(CONFIG_USER_ONLY) */
4749 }
4750 #endif /* defined(TARGET_PPC64) */
4751 
4752 static void gen_mtmsr(DisasContext *ctx)
4753 {
4754     CHK_SV(ctx);
4755 
4756 #if !defined(CONFIG_USER_ONLY)
4757     TCGv t0, t1;
4758     target_ulong mask = 0xFFFFFFFF;
4759 
4760     t0 = tcg_temp_new();
4761     t1 = tcg_temp_new();
4762 
4763     translator_io_start(&ctx->base);
4764     if (ctx->opcode & 0x00010000) {
4765         /* L=1 form only updates EE and RI */
4766         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
4767     } else {
4768         /* mtmsr does not alter S, ME, or LE */
4769         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
4770 
4771         /*
4772          * XXX: we need to update nip before the store if we enter
4773          *      power saving mode, we will exit the loop directly from
4774          *      ppc_store_msr
4775          */
4776         gen_update_nip(ctx, ctx->base.pc_next);
4777     }
4778 
4779     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4780     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4781     tcg_gen_or_tl(t0, t0, t1);
4782 
4783     gen_helper_store_msr(cpu_env, t0);
4784 
4785     /* Must stop the translation as machine state (may have) changed */
4786     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4787 #endif
4788 }
4789 
4790 /* mtspr */
4791 static void gen_mtspr(DisasContext *ctx)
4792 {
4793     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
4794     uint32_t sprn = SPR(ctx->opcode);
4795 
4796 #if defined(CONFIG_USER_ONLY)
4797     write_cb = ctx->spr_cb[sprn].uea_write;
4798 #else
4799     if (ctx->pr) {
4800         write_cb = ctx->spr_cb[sprn].uea_write;
4801     } else if (ctx->hv) {
4802         write_cb = ctx->spr_cb[sprn].hea_write;
4803     } else {
4804         write_cb = ctx->spr_cb[sprn].oea_write;
4805     }
4806 #endif
4807     if (likely(write_cb != NULL)) {
4808         if (likely(write_cb != SPR_NOACCESS)) {
4809             (*write_cb)(ctx, sprn, rS(ctx->opcode));
4810         } else {
4811             /* Privilege exception */
4812             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
4813                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4814                           ctx->cia);
4815             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4816         }
4817     } else {
4818         /* ISA 2.07 defines these as no-ops */
4819         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4820             (sprn >= 808 && sprn <= 811)) {
4821             /* This is a nop */
4822             return;
4823         }
4824 
4825         /* Not defined */
4826         qemu_log_mask(LOG_GUEST_ERROR,
4827                       "Trying to write invalid spr %d (0x%03x) at "
4828                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4829 
4830 
4831         /*
4832          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4833          * generate a priv, a hv emu or a no-op
4834          */
4835         if (sprn & 0x10) {
4836             if (ctx->pr) {
4837                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4838             }
4839         } else {
4840             if (ctx->pr || sprn == 0) {
4841                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4842             }
4843         }
4844     }
4845 }
4846 
4847 #if defined(TARGET_PPC64)
4848 /* setb */
4849 static void gen_setb(DisasContext *ctx)
4850 {
4851     TCGv_i32 t0 = tcg_temp_new_i32();
4852     TCGv_i32 t8 = tcg_constant_i32(8);
4853     TCGv_i32 tm1 = tcg_constant_i32(-1);
4854     int crf = crfS(ctx->opcode);
4855 
4856     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
4857     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
4858     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4859 }
4860 #endif
4861 
4862 /***                         Cache management                              ***/
4863 
4864 /* dcbf */
4865 static void gen_dcbf(DisasContext *ctx)
4866 {
4867     /* XXX: specification says this is treated as a load by the MMU */
4868     TCGv t0;
4869     gen_set_access_type(ctx, ACCESS_CACHE);
4870     t0 = tcg_temp_new();
4871     gen_addr_reg_index(ctx, t0);
4872     gen_qemu_ld8u(ctx, t0, t0);
4873 }
4874 
4875 /* dcbfep (external PID dcbf) */
4876 static void gen_dcbfep(DisasContext *ctx)
4877 {
4878     /* XXX: specification says this is treated as a load by the MMU */
4879     TCGv t0;
4880     CHK_SV(ctx);
4881     gen_set_access_type(ctx, ACCESS_CACHE);
4882     t0 = tcg_temp_new();
4883     gen_addr_reg_index(ctx, t0);
4884     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4885 }
4886 
4887 /* dcbi (Supervisor only) */
4888 static void gen_dcbi(DisasContext *ctx)
4889 {
4890 #if defined(CONFIG_USER_ONLY)
4891     GEN_PRIV(ctx);
4892 #else
4893     TCGv EA, val;
4894 
4895     CHK_SV(ctx);
4896     EA = tcg_temp_new();
4897     gen_set_access_type(ctx, ACCESS_CACHE);
4898     gen_addr_reg_index(ctx, EA);
4899     val = tcg_temp_new();
4900     /* XXX: specification says this should be treated as a store by the MMU */
4901     gen_qemu_ld8u(ctx, val, EA);
4902     gen_qemu_st8(ctx, val, EA);
4903 #endif /* defined(CONFIG_USER_ONLY) */
4904 }
4905 
4906 /* dcdst */
4907 static void gen_dcbst(DisasContext *ctx)
4908 {
4909     /* XXX: specification say this is treated as a load by the MMU */
4910     TCGv t0;
4911     gen_set_access_type(ctx, ACCESS_CACHE);
4912     t0 = tcg_temp_new();
4913     gen_addr_reg_index(ctx, t0);
4914     gen_qemu_ld8u(ctx, t0, t0);
4915 }
4916 
4917 /* dcbstep (dcbstep External PID version) */
4918 static void gen_dcbstep(DisasContext *ctx)
4919 {
4920     /* XXX: specification say this is treated as a load by the MMU */
4921     TCGv t0;
4922     gen_set_access_type(ctx, ACCESS_CACHE);
4923     t0 = tcg_temp_new();
4924     gen_addr_reg_index(ctx, t0);
4925     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4926 }
4927 
4928 /* dcbt */
4929 static void gen_dcbt(DisasContext *ctx)
4930 {
4931     /*
4932      * interpreted as no-op
4933      * XXX: specification say this is treated as a load by the MMU but
4934      *      does not generate any exception
4935      */
4936 }
4937 
4938 /* dcbtep */
4939 static void gen_dcbtep(DisasContext *ctx)
4940 {
4941     /*
4942      * interpreted as no-op
4943      * XXX: specification say this is treated as a load by the MMU but
4944      *      does not generate any exception
4945      */
4946 }
4947 
4948 /* dcbtst */
4949 static void gen_dcbtst(DisasContext *ctx)
4950 {
4951     /*
4952      * interpreted as no-op
4953      * XXX: specification say this is treated as a load by the MMU but
4954      *      does not generate any exception
4955      */
4956 }
4957 
4958 /* dcbtstep */
4959 static void gen_dcbtstep(DisasContext *ctx)
4960 {
4961     /*
4962      * interpreted as no-op
4963      * XXX: specification say this is treated as a load by the MMU but
4964      *      does not generate any exception
4965      */
4966 }
4967 
4968 /* dcbtls */
4969 static void gen_dcbtls(DisasContext *ctx)
4970 {
4971     /* Always fails locking the cache */
4972     TCGv t0 = tcg_temp_new();
4973     gen_load_spr(t0, SPR_Exxx_L1CSR0);
4974     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
4975     gen_store_spr(SPR_Exxx_L1CSR0, t0);
4976 }
4977 
4978 /* dcblc */
4979 static void gen_dcblc(DisasContext *ctx)
4980 {
4981     /*
4982      * interpreted as no-op
4983      */
4984 }
4985 
4986 /* dcbz */
4987 static void gen_dcbz(DisasContext *ctx)
4988 {
4989     TCGv tcgv_addr;
4990     TCGv_i32 tcgv_op;
4991 
4992     gen_set_access_type(ctx, ACCESS_CACHE);
4993     tcgv_addr = tcg_temp_new();
4994     tcgv_op = tcg_constant_i32(ctx->opcode & 0x03FF000);
4995     gen_addr_reg_index(ctx, tcgv_addr);
4996     gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
4997 }
4998 
4999 /* dcbzep */
5000 static void gen_dcbzep(DisasContext *ctx)
5001 {
5002     TCGv tcgv_addr;
5003     TCGv_i32 tcgv_op;
5004 
5005     gen_set_access_type(ctx, ACCESS_CACHE);
5006     tcgv_addr = tcg_temp_new();
5007     tcgv_op = tcg_constant_i32(ctx->opcode & 0x03FF000);
5008     gen_addr_reg_index(ctx, tcgv_addr);
5009     gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5010 }
5011 
5012 /* dst / dstt */
5013 static void gen_dst(DisasContext *ctx)
5014 {
5015     if (rA(ctx->opcode) == 0) {
5016         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5017     } else {
5018         /* interpreted as no-op */
5019     }
5020 }
5021 
5022 /* dstst /dststt */
5023 static void gen_dstst(DisasContext *ctx)
5024 {
5025     if (rA(ctx->opcode) == 0) {
5026         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5027     } else {
5028         /* interpreted as no-op */
5029     }
5030 
5031 }
5032 
5033 /* dss / dssall */
5034 static void gen_dss(DisasContext *ctx)
5035 {
5036     /* interpreted as no-op */
5037 }
5038 
5039 /* icbi */
5040 static void gen_icbi(DisasContext *ctx)
5041 {
5042     TCGv t0;
5043     gen_set_access_type(ctx, ACCESS_CACHE);
5044     t0 = tcg_temp_new();
5045     gen_addr_reg_index(ctx, t0);
5046     gen_helper_icbi(cpu_env, t0);
5047 }
5048 
5049 /* icbiep */
5050 static void gen_icbiep(DisasContext *ctx)
5051 {
5052     TCGv t0;
5053     gen_set_access_type(ctx, ACCESS_CACHE);
5054     t0 = tcg_temp_new();
5055     gen_addr_reg_index(ctx, t0);
5056     gen_helper_icbiep(cpu_env, t0);
5057 }
5058 
5059 /* Optional: */
5060 /* dcba */
5061 static void gen_dcba(DisasContext *ctx)
5062 {
5063     /*
5064      * interpreted as no-op
5065      * XXX: specification say this is treated as a store by the MMU
5066      *      but does not generate any exception
5067      */
5068 }
5069 
5070 /***                    Segment register manipulation                      ***/
5071 /* Supervisor only: */
5072 
5073 /* mfsr */
5074 static void gen_mfsr(DisasContext *ctx)
5075 {
5076 #if defined(CONFIG_USER_ONLY)
5077     GEN_PRIV(ctx);
5078 #else
5079     TCGv t0;
5080 
5081     CHK_SV(ctx);
5082     t0 = tcg_constant_tl(SR(ctx->opcode));
5083     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5084 #endif /* defined(CONFIG_USER_ONLY) */
5085 }
5086 
5087 /* mfsrin */
5088 static void gen_mfsrin(DisasContext *ctx)
5089 {
5090 #if defined(CONFIG_USER_ONLY)
5091     GEN_PRIV(ctx);
5092 #else
5093     TCGv t0;
5094 
5095     CHK_SV(ctx);
5096     t0 = tcg_temp_new();
5097     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5098     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5099 #endif /* defined(CONFIG_USER_ONLY) */
5100 }
5101 
5102 /* mtsr */
5103 static void gen_mtsr(DisasContext *ctx)
5104 {
5105 #if defined(CONFIG_USER_ONLY)
5106     GEN_PRIV(ctx);
5107 #else
5108     TCGv t0;
5109 
5110     CHK_SV(ctx);
5111     t0 = tcg_constant_tl(SR(ctx->opcode));
5112     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5113 #endif /* defined(CONFIG_USER_ONLY) */
5114 }
5115 
5116 /* mtsrin */
5117 static void gen_mtsrin(DisasContext *ctx)
5118 {
5119 #if defined(CONFIG_USER_ONLY)
5120     GEN_PRIV(ctx);
5121 #else
5122     TCGv t0;
5123     CHK_SV(ctx);
5124 
5125     t0 = tcg_temp_new();
5126     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5127     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5128 #endif /* defined(CONFIG_USER_ONLY) */
5129 }
5130 
5131 #if defined(TARGET_PPC64)
5132 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5133 
5134 /* mfsr */
5135 static void gen_mfsr_64b(DisasContext *ctx)
5136 {
5137 #if defined(CONFIG_USER_ONLY)
5138     GEN_PRIV(ctx);
5139 #else
5140     TCGv t0;
5141 
5142     CHK_SV(ctx);
5143     t0 = tcg_constant_tl(SR(ctx->opcode));
5144     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5145 #endif /* defined(CONFIG_USER_ONLY) */
5146 }
5147 
5148 /* mfsrin */
5149 static void gen_mfsrin_64b(DisasContext *ctx)
5150 {
5151 #if defined(CONFIG_USER_ONLY)
5152     GEN_PRIV(ctx);
5153 #else
5154     TCGv t0;
5155 
5156     CHK_SV(ctx);
5157     t0 = tcg_temp_new();
5158     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5159     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5160 #endif /* defined(CONFIG_USER_ONLY) */
5161 }
5162 
5163 /* mtsr */
5164 static void gen_mtsr_64b(DisasContext *ctx)
5165 {
5166 #if defined(CONFIG_USER_ONLY)
5167     GEN_PRIV(ctx);
5168 #else
5169     TCGv t0;
5170 
5171     CHK_SV(ctx);
5172     t0 = tcg_constant_tl(SR(ctx->opcode));
5173     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5174 #endif /* defined(CONFIG_USER_ONLY) */
5175 }
5176 
5177 /* mtsrin */
5178 static void gen_mtsrin_64b(DisasContext *ctx)
5179 {
5180 #if defined(CONFIG_USER_ONLY)
5181     GEN_PRIV(ctx);
5182 #else
5183     TCGv t0;
5184 
5185     CHK_SV(ctx);
5186     t0 = tcg_temp_new();
5187     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5188     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5189 #endif /* defined(CONFIG_USER_ONLY) */
5190 }
5191 
5192 #endif /* defined(TARGET_PPC64) */
5193 
5194 /***                      Lookaside buffer management                      ***/
5195 /* Optional & supervisor only: */
5196 
5197 /* tlbia */
5198 static void gen_tlbia(DisasContext *ctx)
5199 {
5200 #if defined(CONFIG_USER_ONLY)
5201     GEN_PRIV(ctx);
5202 #else
5203     CHK_HV(ctx);
5204 
5205     gen_helper_tlbia(cpu_env);
5206 #endif  /* defined(CONFIG_USER_ONLY) */
5207 }
5208 
5209 /* tlbsync */
5210 static void gen_tlbsync(DisasContext *ctx)
5211 {
5212 #if defined(CONFIG_USER_ONLY)
5213     GEN_PRIV(ctx);
5214 #else
5215 
5216     if (ctx->gtse) {
5217         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
5218     } else {
5219         CHK_HV(ctx); /* Else hypervisor privileged */
5220     }
5221 
5222     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5223     if (ctx->insns_flags & PPC_BOOKE) {
5224         gen_check_tlb_flush(ctx, true);
5225     }
5226 #endif /* defined(CONFIG_USER_ONLY) */
5227 }
5228 
5229 /***                              External control                         ***/
5230 /* Optional: */
5231 
5232 /* eciwx */
5233 static void gen_eciwx(DisasContext *ctx)
5234 {
5235     TCGv t0;
5236     /* Should check EAR[E] ! */
5237     gen_set_access_type(ctx, ACCESS_EXT);
5238     t0 = tcg_temp_new();
5239     gen_addr_reg_index(ctx, t0);
5240     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5241                        DEF_MEMOP(MO_UL | MO_ALIGN));
5242 }
5243 
5244 /* ecowx */
5245 static void gen_ecowx(DisasContext *ctx)
5246 {
5247     TCGv t0;
5248     /* Should check EAR[E] ! */
5249     gen_set_access_type(ctx, ACCESS_EXT);
5250     t0 = tcg_temp_new();
5251     gen_addr_reg_index(ctx, t0);
5252     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5253                        DEF_MEMOP(MO_UL | MO_ALIGN));
5254 }
5255 
5256 /* 602 - 603 - G2 TLB management */
5257 
5258 /* tlbld */
5259 static void gen_tlbld_6xx(DisasContext *ctx)
5260 {
5261 #if defined(CONFIG_USER_ONLY)
5262     GEN_PRIV(ctx);
5263 #else
5264     CHK_SV(ctx);
5265     gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5266 #endif /* defined(CONFIG_USER_ONLY) */
5267 }
5268 
5269 /* tlbli */
5270 static void gen_tlbli_6xx(DisasContext *ctx)
5271 {
5272 #if defined(CONFIG_USER_ONLY)
5273     GEN_PRIV(ctx);
5274 #else
5275     CHK_SV(ctx);
5276     gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5277 #endif /* defined(CONFIG_USER_ONLY) */
5278 }
5279 
5280 /* BookE specific instructions */
5281 
5282 /* XXX: not implemented on 440 ? */
5283 static void gen_mfapidi(DisasContext *ctx)
5284 {
5285     /* XXX: TODO */
5286     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5287 }
5288 
5289 /* XXX: not implemented on 440 ? */
5290 static void gen_tlbiva(DisasContext *ctx)
5291 {
5292 #if defined(CONFIG_USER_ONLY)
5293     GEN_PRIV(ctx);
5294 #else
5295     TCGv t0;
5296 
5297     CHK_SV(ctx);
5298     t0 = tcg_temp_new();
5299     gen_addr_reg_index(ctx, t0);
5300     gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5301 #endif /* defined(CONFIG_USER_ONLY) */
5302 }
5303 
5304 /* All 405 MAC instructions are translated here */
5305 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5306                                         int ra, int rb, int rt, int Rc)
5307 {
5308     TCGv t0, t1;
5309 
5310     t0 = tcg_temp_new();
5311     t1 = tcg_temp_new();
5312 
5313     switch (opc3 & 0x0D) {
5314     case 0x05:
5315         /* macchw    - macchw.    - macchwo   - macchwo.   */
5316         /* macchws   - macchws.   - macchwso  - macchwso.  */
5317         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
5318         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
5319         /* mulchw - mulchw. */
5320         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5321         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5322         tcg_gen_ext16s_tl(t1, t1);
5323         break;
5324     case 0x04:
5325         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
5326         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
5327         /* mulchwu - mulchwu. */
5328         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5329         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5330         tcg_gen_ext16u_tl(t1, t1);
5331         break;
5332     case 0x01:
5333         /* machhw    - machhw.    - machhwo   - machhwo.   */
5334         /* machhws   - machhws.   - machhwso  - machhwso.  */
5335         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
5336         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
5337         /* mulhhw - mulhhw. */
5338         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5339         tcg_gen_ext16s_tl(t0, t0);
5340         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5341         tcg_gen_ext16s_tl(t1, t1);
5342         break;
5343     case 0x00:
5344         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
5345         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
5346         /* mulhhwu - mulhhwu. */
5347         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5348         tcg_gen_ext16u_tl(t0, t0);
5349         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5350         tcg_gen_ext16u_tl(t1, t1);
5351         break;
5352     case 0x0D:
5353         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
5354         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
5355         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
5356         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
5357         /* mullhw - mullhw. */
5358         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5359         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5360         break;
5361     case 0x0C:
5362         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
5363         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
5364         /* mullhwu - mullhwu. */
5365         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5366         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5367         break;
5368     }
5369     if (opc2 & 0x04) {
5370         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5371         tcg_gen_mul_tl(t1, t0, t1);
5372         if (opc2 & 0x02) {
5373             /* nmultiply-and-accumulate (0x0E) */
5374             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5375         } else {
5376             /* multiply-and-accumulate (0x0C) */
5377             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5378         }
5379 
5380         if (opc3 & 0x12) {
5381             /* Check overflow and/or saturate */
5382             TCGLabel *l1 = gen_new_label();
5383 
5384             if (opc3 & 0x10) {
5385                 /* Start with XER OV disabled, the most likely case */
5386                 tcg_gen_movi_tl(cpu_ov, 0);
5387             }
5388             if (opc3 & 0x01) {
5389                 /* Signed */
5390                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5391                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5392                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5393                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5394                 if (opc3 & 0x02) {
5395                     /* Saturate */
5396                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5397                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5398                 }
5399             } else {
5400                 /* Unsigned */
5401                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5402                 if (opc3 & 0x02) {
5403                     /* Saturate */
5404                     tcg_gen_movi_tl(t0, UINT32_MAX);
5405                 }
5406             }
5407             if (opc3 & 0x10) {
5408                 /* Check overflow */
5409                 tcg_gen_movi_tl(cpu_ov, 1);
5410                 tcg_gen_movi_tl(cpu_so, 1);
5411             }
5412             gen_set_label(l1);
5413             tcg_gen_mov_tl(cpu_gpr[rt], t0);
5414         }
5415     } else {
5416         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5417     }
5418     if (unlikely(Rc) != 0) {
5419         /* Update Rc0 */
5420         gen_set_Rc0(ctx, cpu_gpr[rt]);
5421     }
5422 }
5423 
5424 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
5425 static void glue(gen_, name)(DisasContext *ctx)                               \
5426 {                                                                             \
5427     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
5428                          rD(ctx->opcode), Rc(ctx->opcode));                   \
5429 }
5430 
5431 /* macchw    - macchw.    */
5432 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5433 /* macchwo   - macchwo.   */
5434 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5435 /* macchws   - macchws.   */
5436 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5437 /* macchwso  - macchwso.  */
5438 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5439 /* macchwsu  - macchwsu.  */
5440 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5441 /* macchwsuo - macchwsuo. */
5442 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5443 /* macchwu   - macchwu.   */
5444 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5445 /* macchwuo  - macchwuo.  */
5446 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5447 /* machhw    - machhw.    */
5448 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5449 /* machhwo   - machhwo.   */
5450 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5451 /* machhws   - machhws.   */
5452 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5453 /* machhwso  - machhwso.  */
5454 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5455 /* machhwsu  - machhwsu.  */
5456 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5457 /* machhwsuo - machhwsuo. */
5458 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5459 /* machhwu   - machhwu.   */
5460 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5461 /* machhwuo  - machhwuo.  */
5462 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5463 /* maclhw    - maclhw.    */
5464 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5465 /* maclhwo   - maclhwo.   */
5466 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5467 /* maclhws   - maclhws.   */
5468 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5469 /* maclhwso  - maclhwso.  */
5470 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5471 /* maclhwu   - maclhwu.   */
5472 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5473 /* maclhwuo  - maclhwuo.  */
5474 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5475 /* maclhwsu  - maclhwsu.  */
5476 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5477 /* maclhwsuo - maclhwsuo. */
5478 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5479 /* nmacchw   - nmacchw.   */
5480 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5481 /* nmacchwo  - nmacchwo.  */
5482 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5483 /* nmacchws  - nmacchws.  */
5484 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5485 /* nmacchwso - nmacchwso. */
5486 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5487 /* nmachhw   - nmachhw.   */
5488 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5489 /* nmachhwo  - nmachhwo.  */
5490 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5491 /* nmachhws  - nmachhws.  */
5492 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5493 /* nmachhwso - nmachhwso. */
5494 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5495 /* nmaclhw   - nmaclhw.   */
5496 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5497 /* nmaclhwo  - nmaclhwo.  */
5498 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5499 /* nmaclhws  - nmaclhws.  */
5500 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5501 /* nmaclhwso - nmaclhwso. */
5502 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5503 
5504 /* mulchw  - mulchw.  */
5505 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5506 /* mulchwu - mulchwu. */
5507 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5508 /* mulhhw  - mulhhw.  */
5509 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5510 /* mulhhwu - mulhhwu. */
5511 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5512 /* mullhw  - mullhw.  */
5513 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5514 /* mullhwu - mullhwu. */
5515 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5516 
5517 /* mfdcr */
5518 static void gen_mfdcr(DisasContext *ctx)
5519 {
5520 #if defined(CONFIG_USER_ONLY)
5521     GEN_PRIV(ctx);
5522 #else
5523     TCGv dcrn;
5524 
5525     CHK_SV(ctx);
5526     dcrn = tcg_constant_tl(SPR(ctx->opcode));
5527     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5528 #endif /* defined(CONFIG_USER_ONLY) */
5529 }
5530 
5531 /* mtdcr */
5532 static void gen_mtdcr(DisasContext *ctx)
5533 {
5534 #if defined(CONFIG_USER_ONLY)
5535     GEN_PRIV(ctx);
5536 #else
5537     TCGv dcrn;
5538 
5539     CHK_SV(ctx);
5540     dcrn = tcg_constant_tl(SPR(ctx->opcode));
5541     gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5542 #endif /* defined(CONFIG_USER_ONLY) */
5543 }
5544 
5545 /* mfdcrx */
5546 /* XXX: not implemented on 440 ? */
5547 static void gen_mfdcrx(DisasContext *ctx)
5548 {
5549 #if defined(CONFIG_USER_ONLY)
5550     GEN_PRIV(ctx);
5551 #else
5552     CHK_SV(ctx);
5553     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5554                         cpu_gpr[rA(ctx->opcode)]);
5555     /* Note: Rc update flag set leads to undefined state of Rc0 */
5556 #endif /* defined(CONFIG_USER_ONLY) */
5557 }
5558 
5559 /* mtdcrx */
5560 /* XXX: not implemented on 440 ? */
5561 static void gen_mtdcrx(DisasContext *ctx)
5562 {
5563 #if defined(CONFIG_USER_ONLY)
5564     GEN_PRIV(ctx);
5565 #else
5566     CHK_SV(ctx);
5567     gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5568                          cpu_gpr[rS(ctx->opcode)]);
5569     /* Note: Rc update flag set leads to undefined state of Rc0 */
5570 #endif /* defined(CONFIG_USER_ONLY) */
5571 }
5572 
5573 /* dccci */
5574 static void gen_dccci(DisasContext *ctx)
5575 {
5576     CHK_SV(ctx);
5577     /* interpreted as no-op */
5578 }
5579 
5580 /* dcread */
5581 static void gen_dcread(DisasContext *ctx)
5582 {
5583 #if defined(CONFIG_USER_ONLY)
5584     GEN_PRIV(ctx);
5585 #else
5586     TCGv EA, val;
5587 
5588     CHK_SV(ctx);
5589     gen_set_access_type(ctx, ACCESS_CACHE);
5590     EA = tcg_temp_new();
5591     gen_addr_reg_index(ctx, EA);
5592     val = tcg_temp_new();
5593     gen_qemu_ld32u(ctx, val, EA);
5594     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5595 #endif /* defined(CONFIG_USER_ONLY) */
5596 }
5597 
5598 /* icbt */
5599 static void gen_icbt_40x(DisasContext *ctx)
5600 {
5601     /*
5602      * interpreted as no-op
5603      * XXX: specification say this is treated as a load by the MMU but
5604      *      does not generate any exception
5605      */
5606 }
5607 
5608 /* iccci */
5609 static void gen_iccci(DisasContext *ctx)
5610 {
5611     CHK_SV(ctx);
5612     /* interpreted as no-op */
5613 }
5614 
5615 /* icread */
5616 static void gen_icread(DisasContext *ctx)
5617 {
5618     CHK_SV(ctx);
5619     /* interpreted as no-op */
5620 }
5621 
5622 /* rfci (supervisor only) */
5623 static void gen_rfci_40x(DisasContext *ctx)
5624 {
5625 #if defined(CONFIG_USER_ONLY)
5626     GEN_PRIV(ctx);
5627 #else
5628     CHK_SV(ctx);
5629     /* Restore CPU state */
5630     gen_helper_40x_rfci(cpu_env);
5631     ctx->base.is_jmp = DISAS_EXIT;
5632 #endif /* defined(CONFIG_USER_ONLY) */
5633 }
5634 
5635 static void gen_rfci(DisasContext *ctx)
5636 {
5637 #if defined(CONFIG_USER_ONLY)
5638     GEN_PRIV(ctx);
5639 #else
5640     CHK_SV(ctx);
5641     /* Restore CPU state */
5642     gen_helper_rfci(cpu_env);
5643     ctx->base.is_jmp = DISAS_EXIT;
5644 #endif /* defined(CONFIG_USER_ONLY) */
5645 }
5646 
5647 /* BookE specific */
5648 
5649 /* XXX: not implemented on 440 ? */
5650 static void gen_rfdi(DisasContext *ctx)
5651 {
5652 #if defined(CONFIG_USER_ONLY)
5653     GEN_PRIV(ctx);
5654 #else
5655     CHK_SV(ctx);
5656     /* Restore CPU state */
5657     gen_helper_rfdi(cpu_env);
5658     ctx->base.is_jmp = DISAS_EXIT;
5659 #endif /* defined(CONFIG_USER_ONLY) */
5660 }
5661 
5662 /* XXX: not implemented on 440 ? */
5663 static void gen_rfmci(DisasContext *ctx)
5664 {
5665 #if defined(CONFIG_USER_ONLY)
5666     GEN_PRIV(ctx);
5667 #else
5668     CHK_SV(ctx);
5669     /* Restore CPU state */
5670     gen_helper_rfmci(cpu_env);
5671     ctx->base.is_jmp = DISAS_EXIT;
5672 #endif /* defined(CONFIG_USER_ONLY) */
5673 }
5674 
5675 /* TLB management - PowerPC 405 implementation */
5676 
5677 /* tlbre */
5678 static void gen_tlbre_40x(DisasContext *ctx)
5679 {
5680 #if defined(CONFIG_USER_ONLY)
5681     GEN_PRIV(ctx);
5682 #else
5683     CHK_SV(ctx);
5684     switch (rB(ctx->opcode)) {
5685     case 0:
5686         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5687                                 cpu_gpr[rA(ctx->opcode)]);
5688         break;
5689     case 1:
5690         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5691                                 cpu_gpr[rA(ctx->opcode)]);
5692         break;
5693     default:
5694         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5695         break;
5696     }
5697 #endif /* defined(CONFIG_USER_ONLY) */
5698 }
5699 
5700 /* tlbsx - tlbsx. */
5701 static void gen_tlbsx_40x(DisasContext *ctx)
5702 {
5703 #if defined(CONFIG_USER_ONLY)
5704     GEN_PRIV(ctx);
5705 #else
5706     TCGv t0;
5707 
5708     CHK_SV(ctx);
5709     t0 = tcg_temp_new();
5710     gen_addr_reg_index(ctx, t0);
5711     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5712     if (Rc(ctx->opcode)) {
5713         TCGLabel *l1 = gen_new_label();
5714         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5715         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5716         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5717         gen_set_label(l1);
5718     }
5719 #endif /* defined(CONFIG_USER_ONLY) */
5720 }
5721 
5722 /* tlbwe */
5723 static void gen_tlbwe_40x(DisasContext *ctx)
5724 {
5725 #if defined(CONFIG_USER_ONLY)
5726     GEN_PRIV(ctx);
5727 #else
5728     CHK_SV(ctx);
5729 
5730     switch (rB(ctx->opcode)) {
5731     case 0:
5732         gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
5733                                 cpu_gpr[rS(ctx->opcode)]);
5734         break;
5735     case 1:
5736         gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
5737                                 cpu_gpr[rS(ctx->opcode)]);
5738         break;
5739     default:
5740         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5741         break;
5742     }
5743 #endif /* defined(CONFIG_USER_ONLY) */
5744 }
5745 
5746 /* TLB management - PowerPC 440 implementation */
5747 
5748 /* tlbre */
5749 static void gen_tlbre_440(DisasContext *ctx)
5750 {
5751 #if defined(CONFIG_USER_ONLY)
5752     GEN_PRIV(ctx);
5753 #else
5754     CHK_SV(ctx);
5755 
5756     switch (rB(ctx->opcode)) {
5757     case 0:
5758     case 1:
5759     case 2:
5760         {
5761             TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5762             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
5763                                  t0, cpu_gpr[rA(ctx->opcode)]);
5764         }
5765         break;
5766     default:
5767         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5768         break;
5769     }
5770 #endif /* defined(CONFIG_USER_ONLY) */
5771 }
5772 
5773 /* tlbsx - tlbsx. */
5774 static void gen_tlbsx_440(DisasContext *ctx)
5775 {
5776 #if defined(CONFIG_USER_ONLY)
5777     GEN_PRIV(ctx);
5778 #else
5779     TCGv t0;
5780 
5781     CHK_SV(ctx);
5782     t0 = tcg_temp_new();
5783     gen_addr_reg_index(ctx, t0);
5784     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5785     if (Rc(ctx->opcode)) {
5786         TCGLabel *l1 = gen_new_label();
5787         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5788         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5789         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5790         gen_set_label(l1);
5791     }
5792 #endif /* defined(CONFIG_USER_ONLY) */
5793 }
5794 
5795 /* tlbwe */
5796 static void gen_tlbwe_440(DisasContext *ctx)
5797 {
5798 #if defined(CONFIG_USER_ONLY)
5799     GEN_PRIV(ctx);
5800 #else
5801     CHK_SV(ctx);
5802     switch (rB(ctx->opcode)) {
5803     case 0:
5804     case 1:
5805     case 2:
5806         {
5807             TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5808             gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
5809                                  cpu_gpr[rS(ctx->opcode)]);
5810         }
5811         break;
5812     default:
5813         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5814         break;
5815     }
5816 #endif /* defined(CONFIG_USER_ONLY) */
5817 }
5818 
5819 /* TLB management - PowerPC BookE 2.06 implementation */
5820 
5821 /* tlbre */
5822 static void gen_tlbre_booke206(DisasContext *ctx)
5823 {
5824  #if defined(CONFIG_USER_ONLY)
5825     GEN_PRIV(ctx);
5826 #else
5827    CHK_SV(ctx);
5828     gen_helper_booke206_tlbre(cpu_env);
5829 #endif /* defined(CONFIG_USER_ONLY) */
5830 }
5831 
5832 /* tlbsx - tlbsx. */
5833 static void gen_tlbsx_booke206(DisasContext *ctx)
5834 {
5835 #if defined(CONFIG_USER_ONLY)
5836     GEN_PRIV(ctx);
5837 #else
5838     TCGv t0;
5839 
5840     CHK_SV(ctx);
5841     if (rA(ctx->opcode)) {
5842         t0 = tcg_temp_new();
5843         tcg_gen_add_tl(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5844     } else {
5845         t0 = cpu_gpr[rB(ctx->opcode)];
5846     }
5847     gen_helper_booke206_tlbsx(cpu_env, t0);
5848 #endif /* defined(CONFIG_USER_ONLY) */
5849 }
5850 
5851 /* tlbwe */
5852 static void gen_tlbwe_booke206(DisasContext *ctx)
5853 {
5854 #if defined(CONFIG_USER_ONLY)
5855     GEN_PRIV(ctx);
5856 #else
5857     CHK_SV(ctx);
5858     gen_helper_booke206_tlbwe(cpu_env);
5859 #endif /* defined(CONFIG_USER_ONLY) */
5860 }
5861 
5862 static void gen_tlbivax_booke206(DisasContext *ctx)
5863 {
5864 #if defined(CONFIG_USER_ONLY)
5865     GEN_PRIV(ctx);
5866 #else
5867     TCGv t0;
5868 
5869     CHK_SV(ctx);
5870     t0 = tcg_temp_new();
5871     gen_addr_reg_index(ctx, t0);
5872     gen_helper_booke206_tlbivax(cpu_env, t0);
5873 #endif /* defined(CONFIG_USER_ONLY) */
5874 }
5875 
5876 static void gen_tlbilx_booke206(DisasContext *ctx)
5877 {
5878 #if defined(CONFIG_USER_ONLY)
5879     GEN_PRIV(ctx);
5880 #else
5881     TCGv t0;
5882 
5883     CHK_SV(ctx);
5884     t0 = tcg_temp_new();
5885     gen_addr_reg_index(ctx, t0);
5886 
5887     switch ((ctx->opcode >> 21) & 0x3) {
5888     case 0:
5889         gen_helper_booke206_tlbilx0(cpu_env, t0);
5890         break;
5891     case 1:
5892         gen_helper_booke206_tlbilx1(cpu_env, t0);
5893         break;
5894     case 3:
5895         gen_helper_booke206_tlbilx3(cpu_env, t0);
5896         break;
5897     default:
5898         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5899         break;
5900     }
5901 #endif /* defined(CONFIG_USER_ONLY) */
5902 }
5903 
5904 /* wrtee */
5905 static void gen_wrtee(DisasContext *ctx)
5906 {
5907 #if defined(CONFIG_USER_ONLY)
5908     GEN_PRIV(ctx);
5909 #else
5910     TCGv t0;
5911 
5912     CHK_SV(ctx);
5913     t0 = tcg_temp_new();
5914     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
5915     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5916     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
5917     gen_ppc_maybe_interrupt(ctx);
5918     /*
5919      * Stop translation to have a chance to raise an exception if we
5920      * just set msr_ee to 1
5921      */
5922     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5923 #endif /* defined(CONFIG_USER_ONLY) */
5924 }
5925 
5926 /* wrteei */
5927 static void gen_wrteei(DisasContext *ctx)
5928 {
5929 #if defined(CONFIG_USER_ONLY)
5930     GEN_PRIV(ctx);
5931 #else
5932     CHK_SV(ctx);
5933     if (ctx->opcode & 0x00008000) {
5934         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
5935         gen_ppc_maybe_interrupt(ctx);
5936         /* Stop translation to have a chance to raise an exception */
5937         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5938     } else {
5939         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5940     }
5941 #endif /* defined(CONFIG_USER_ONLY) */
5942 }
5943 
5944 /* PowerPC 440 specific instructions */
5945 
5946 /* dlmzb */
5947 static void gen_dlmzb(DisasContext *ctx)
5948 {
5949     TCGv_i32 t0 = tcg_constant_i32(Rc(ctx->opcode));
5950     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
5951                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
5952 }
5953 
5954 /* mbar replaces eieio on 440 */
5955 static void gen_mbar(DisasContext *ctx)
5956 {
5957     /* interpreted as no-op */
5958 }
5959 
5960 /* msync replaces sync on 440 */
5961 static void gen_msync_4xx(DisasContext *ctx)
5962 {
5963     /* Only e500 seems to treat reserved bits as invalid */
5964     if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
5965         (ctx->opcode & 0x03FFF801)) {
5966         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5967     }
5968     /* otherwise interpreted as no-op */
5969 }
5970 
5971 /* icbt */
5972 static void gen_icbt_440(DisasContext *ctx)
5973 {
5974     /*
5975      * interpreted as no-op
5976      * XXX: specification say this is treated as a load by the MMU but
5977      *      does not generate any exception
5978      */
5979 }
5980 
5981 #if defined(TARGET_PPC64)
5982 static void gen_maddld(DisasContext *ctx)
5983 {
5984     TCGv_i64 t1 = tcg_temp_new_i64();
5985 
5986     tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5987     tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
5988 }
5989 
5990 /* maddhd maddhdu */
5991 static void gen_maddhd_maddhdu(DisasContext *ctx)
5992 {
5993     TCGv_i64 lo = tcg_temp_new_i64();
5994     TCGv_i64 hi = tcg_temp_new_i64();
5995     TCGv_i64 t1 = tcg_temp_new_i64();
5996 
5997     if (Rc(ctx->opcode)) {
5998         tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
5999                           cpu_gpr[rB(ctx->opcode)]);
6000         tcg_gen_movi_i64(t1, 0);
6001     } else {
6002         tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6003                           cpu_gpr[rB(ctx->opcode)]);
6004         tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6005     }
6006     tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6007                      cpu_gpr[rC(ctx->opcode)], t1);
6008 }
6009 #endif /* defined(TARGET_PPC64) */
6010 
6011 static void gen_tbegin(DisasContext *ctx)
6012 {
6013     if (unlikely(!ctx->tm_enabled)) {
6014         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6015         return;
6016     }
6017     gen_helper_tbegin(cpu_env);
6018 }
6019 
6020 #define GEN_TM_NOOP(name)                                      \
6021 static inline void gen_##name(DisasContext *ctx)               \
6022 {                                                              \
6023     if (unlikely(!ctx->tm_enabled)) {                          \
6024         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6025         return;                                                \
6026     }                                                          \
6027     /*                                                         \
6028      * Because tbegin always fails in QEMU, these user         \
6029      * space instructions all have a simple implementation:    \
6030      *                                                         \
6031      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
6032      *           = 0b0 || 0b00    || 0b0                       \
6033      */                                                        \
6034     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6035 }
6036 
6037 GEN_TM_NOOP(tend);
6038 GEN_TM_NOOP(tabort);
6039 GEN_TM_NOOP(tabortwc);
6040 GEN_TM_NOOP(tabortwci);
6041 GEN_TM_NOOP(tabortdc);
6042 GEN_TM_NOOP(tabortdci);
6043 GEN_TM_NOOP(tsr);
6044 
6045 static inline void gen_cp_abort(DisasContext *ctx)
6046 {
6047     /* Do Nothing */
6048 }
6049 
6050 #define GEN_CP_PASTE_NOOP(name)                           \
6051 static inline void gen_##name(DisasContext *ctx)          \
6052 {                                                         \
6053     /*                                                    \
6054      * Generate invalid exception until we have an        \
6055      * implementation of the copy paste facility          \
6056      */                                                   \
6057     gen_invalid(ctx);                                     \
6058 }
6059 
6060 GEN_CP_PASTE_NOOP(copy)
6061 GEN_CP_PASTE_NOOP(paste)
6062 
6063 static void gen_tcheck(DisasContext *ctx)
6064 {
6065     if (unlikely(!ctx->tm_enabled)) {
6066         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6067         return;
6068     }
6069     /*
6070      * Because tbegin always fails, the tcheck implementation is
6071      * simple:
6072      *
6073      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6074      *         = 0b1 || 0b00 || 0b0
6075      */
6076     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6077 }
6078 
6079 #if defined(CONFIG_USER_ONLY)
6080 #define GEN_TM_PRIV_NOOP(name)                                 \
6081 static inline void gen_##name(DisasContext *ctx)               \
6082 {                                                              \
6083     gen_priv_opc(ctx);                                         \
6084 }
6085 
6086 #else
6087 
6088 #define GEN_TM_PRIV_NOOP(name)                                 \
6089 static inline void gen_##name(DisasContext *ctx)               \
6090 {                                                              \
6091     CHK_SV(ctx);                                               \
6092     if (unlikely(!ctx->tm_enabled)) {                          \
6093         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6094         return;                                                \
6095     }                                                          \
6096     /*                                                         \
6097      * Because tbegin always fails, the implementation is      \
6098      * simple:                                                 \
6099      *                                                         \
6100      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
6101      *         = 0b0 || 0b00 | 0b0                             \
6102      */                                                        \
6103     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6104 }
6105 
6106 #endif
6107 
6108 GEN_TM_PRIV_NOOP(treclaim);
6109 GEN_TM_PRIV_NOOP(trechkpt);
6110 
6111 static inline void get_fpr(TCGv_i64 dst, int regno)
6112 {
6113     tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
6114 }
6115 
6116 static inline void set_fpr(int regno, TCGv_i64 src)
6117 {
6118     tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
6119     /*
6120      * Before PowerISA v3.1 the result of doubleword 1 of the VSR
6121      * corresponding to the target FPR was undefined. However,
6122      * most (if not all) real hardware were setting the result to 0.
6123      * Starting at ISA v3.1, the result for doubleword 1 is now defined
6124      * to be 0.
6125      */
6126     tcg_gen_st_i64(tcg_constant_i64(0), cpu_env, vsr64_offset(regno, false));
6127 }
6128 
6129 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
6130 {
6131     tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
6132 }
6133 
6134 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
6135 {
6136     tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
6137 }
6138 
6139 /*
6140  * Helpers for decodetree used by !function for decoding arguments.
6141  */
6142 static int times_2(DisasContext *ctx, int x)
6143 {
6144     return x * 2;
6145 }
6146 
6147 static int times_4(DisasContext *ctx, int x)
6148 {
6149     return x * 4;
6150 }
6151 
6152 static int times_16(DisasContext *ctx, int x)
6153 {
6154     return x * 16;
6155 }
6156 
6157 static int64_t dw_compose_ea(DisasContext *ctx, int x)
6158 {
6159     return deposit64(0xfffffffffffffe00, 3, 6, x);
6160 }
6161 
6162 /*
6163  * Helpers for trans_* functions to check for specific insns flags.
6164  * Use token pasting to ensure that we use the proper flag with the
6165  * proper variable.
6166  */
6167 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
6168     do {                                                \
6169         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
6170             return false;                               \
6171         }                                               \
6172     } while (0)
6173 
6174 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
6175     do {                                                \
6176         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
6177             return false;                               \
6178         }                                               \
6179     } while (0)
6180 
6181 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
6182 #if TARGET_LONG_BITS == 32
6183 # define REQUIRE_64BIT(CTX)  return false
6184 #else
6185 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
6186 #endif
6187 
6188 #define REQUIRE_VECTOR(CTX)                             \
6189     do {                                                \
6190         if (unlikely(!(CTX)->altivec_enabled)) {        \
6191             gen_exception((CTX), POWERPC_EXCP_VPU);     \
6192             return true;                                \
6193         }                                               \
6194     } while (0)
6195 
6196 #define REQUIRE_VSX(CTX)                                \
6197     do {                                                \
6198         if (unlikely(!(CTX)->vsx_enabled)) {            \
6199             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
6200             return true;                                \
6201         }                                               \
6202     } while (0)
6203 
6204 #define REQUIRE_FPU(ctx)                                \
6205     do {                                                \
6206         if (unlikely(!(ctx)->fpu_enabled)) {            \
6207             gen_exception((ctx), POWERPC_EXCP_FPU);     \
6208             return true;                                \
6209         }                                               \
6210     } while (0)
6211 
6212 #if !defined(CONFIG_USER_ONLY)
6213 #define REQUIRE_SV(CTX)             \
6214     do {                            \
6215         if (unlikely((CTX)->pr)) {  \
6216             gen_priv_opc(CTX);      \
6217             return true;            \
6218         }                           \
6219     } while (0)
6220 
6221 #define REQUIRE_HV(CTX)                             \
6222     do {                                            \
6223         if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
6224             gen_priv_opc(CTX);                      \
6225             return true;                            \
6226         }                                           \
6227     } while (0)
6228 #else
6229 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6230 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6231 #endif
6232 
6233 /*
6234  * Helpers for implementing sets of trans_* functions.
6235  * Defer the implementation of NAME to FUNC, with optional extra arguments.
6236  */
6237 #define TRANS(NAME, FUNC, ...) \
6238     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6239     { return FUNC(ctx, a, __VA_ARGS__); }
6240 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
6241     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6242     {                                                          \
6243         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
6244         return FUNC(ctx, a, __VA_ARGS__);                      \
6245     }
6246 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6247     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6248     {                                                          \
6249         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6250         return FUNC(ctx, a, __VA_ARGS__);                      \
6251     }
6252 
6253 #define TRANS64(NAME, FUNC, ...) \
6254     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6255     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
6256 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6257     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6258     {                                                          \
6259         REQUIRE_64BIT(ctx);                                    \
6260         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6261         return FUNC(ctx, a, __VA_ARGS__);                      \
6262     }
6263 
6264 /* TODO: More TRANS* helpers for extra insn_flags checks. */
6265 
6266 
6267 #include "decode-insn32.c.inc"
6268 #include "decode-insn64.c.inc"
6269 #include "power8-pmu-regs.c.inc"
6270 
6271 /*
6272  * Incorporate CIA into the constant when R=1.
6273  * Validate that when R=1, RA=0.
6274  */
6275 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
6276 {
6277     d->rt = a->rt;
6278     d->ra = a->ra;
6279     d->si = a->si;
6280     if (a->r) {
6281         if (unlikely(a->ra != 0)) {
6282             gen_invalid(ctx);
6283             return false;
6284         }
6285         d->si += ctx->cia;
6286     }
6287     return true;
6288 }
6289 
6290 #include "translate/fixedpoint-impl.c.inc"
6291 
6292 #include "translate/fp-impl.c.inc"
6293 
6294 #include "translate/vmx-impl.c.inc"
6295 
6296 #include "translate/vsx-impl.c.inc"
6297 
6298 #include "translate/dfp-impl.c.inc"
6299 
6300 #include "translate/spe-impl.c.inc"
6301 
6302 #include "translate/branch-impl.c.inc"
6303 
6304 #include "translate/processor-ctrl-impl.c.inc"
6305 
6306 #include "translate/storage-ctrl-impl.c.inc"
6307 
6308 /* Handles lfdp */
6309 static void gen_dform39(DisasContext *ctx)
6310 {
6311     if ((ctx->opcode & 0x3) == 0) {
6312         if (ctx->insns_flags2 & PPC2_ISA205) {
6313             return gen_lfdp(ctx);
6314         }
6315     }
6316     return gen_invalid(ctx);
6317 }
6318 
6319 /* Handles stfdp */
6320 static void gen_dform3D(DisasContext *ctx)
6321 {
6322     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
6323         /* stfdp */
6324         if (ctx->insns_flags2 & PPC2_ISA205) {
6325             return gen_stfdp(ctx);
6326         }
6327     }
6328     return gen_invalid(ctx);
6329 }
6330 
6331 #if defined(TARGET_PPC64)
6332 /* brd */
6333 static void gen_brd(DisasContext *ctx)
6334 {
6335     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6336 }
6337 
6338 /* brw */
6339 static void gen_brw(DisasContext *ctx)
6340 {
6341     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6342     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
6343 
6344 }
6345 
6346 /* brh */
6347 static void gen_brh(DisasContext *ctx)
6348 {
6349     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
6350     TCGv_i64 t1 = tcg_temp_new_i64();
6351     TCGv_i64 t2 = tcg_temp_new_i64();
6352 
6353     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
6354     tcg_gen_and_i64(t2, t1, mask);
6355     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
6356     tcg_gen_shli_i64(t1, t1, 8);
6357     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
6358 }
6359 #endif
6360 
6361 static opcode_t opcodes[] = {
6362 #if defined(TARGET_PPC64)
6363 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
6364 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
6365 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
6366 #endif
6367 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6368 #if defined(TARGET_PPC64)
6369 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6370 #endif
6371 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6372 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6373 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6374 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6375 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6376 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6377 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6378 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6379 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6380 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6381 #if defined(TARGET_PPC64)
6382 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6383 #endif
6384 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6385 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6386 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6387 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6388 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6389 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6390 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6391 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
6392 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6393 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
6394 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6395 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6396 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6397 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6398 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6399 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6400 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6401 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6402 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6403 #if defined(TARGET_PPC64)
6404 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6405 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6406 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6407 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6408 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6409 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6410 #endif
6411 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6412 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6413 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6414 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6415 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6416 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6417 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6418 #if defined(TARGET_PPC64)
6419 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6420 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6421 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6422 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6423 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6424 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6425                PPC_NONE, PPC2_ISA300),
6426 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6427                PPC_NONE, PPC2_ISA300),
6428 #endif
6429 /* handles lfdp, lxsd, lxssp */
6430 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6431 /* handles stfdp, stxsd, stxssp */
6432 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6433 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6434 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6435 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6436 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6437 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6438 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6439 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
6440 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6441 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6442 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6443 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6444 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
6445 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
6446 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6447 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6448 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6449 #if defined(TARGET_PPC64)
6450 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
6451 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
6452 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6453 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6454 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6455 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6456 #endif
6457 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6458 /* ISA v3.0 changed the extended opcode from 62 to 30 */
6459 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
6460 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
6461 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6462 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6463 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6464 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6465 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6466 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6467 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6468 #if defined(TARGET_PPC64)
6469 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6470 #if !defined(CONFIG_USER_ONLY)
6471 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6472 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6473 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6474 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
6475 #endif
6476 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6477 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6478 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6479 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6480 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6481 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6482 #endif
6483 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6484 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
6485 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
6486 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6487 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6488 #if defined(TARGET_PPC64)
6489 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6490 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6491 #endif
6492 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6493 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6494 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6495 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6496 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6497 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6498 #if defined(TARGET_PPC64)
6499 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6500 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6501 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
6502 #endif
6503 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6504 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6505 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6506 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6507 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6508 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6509 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6510 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6511 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6512 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6513 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6514 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6515 GEN_HANDLER_E(dcblc, 0x1F, 0x06, 0x0c, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6516 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6517 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6518 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6519 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
6520 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6521 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6522 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6523 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6524 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6525 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6526 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6527 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6528 #if defined(TARGET_PPC64)
6529 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6530 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6531              PPC_SEGMENT_64B),
6532 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6533 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6534              PPC_SEGMENT_64B),
6535 #endif
6536 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6537 /*
6538  * XXX Those instructions will need to be handled differently for
6539  * different ISA versions
6540  */
6541 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6542 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6543 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6544 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6545 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6546 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6547 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6548 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6549 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6550 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6551 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6552 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6553 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6554 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6555 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6556 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6557 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6558 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6559 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6560 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6561 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6562 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6563 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6564 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6565 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6566 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6567 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6568                PPC_NONE, PPC2_BOOKE206),
6569 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6570                PPC_NONE, PPC2_BOOKE206),
6571 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6572                PPC_NONE, PPC2_BOOKE206),
6573 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6574                PPC_NONE, PPC2_BOOKE206),
6575 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6576                PPC_NONE, PPC2_BOOKE206),
6577 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6578 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6579 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6580 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6581               PPC_BOOKE, PPC2_BOOKE206),
6582 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
6583 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6584                PPC_BOOKE, PPC2_BOOKE206),
6585 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
6586              PPC_440_SPEC),
6587 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6588 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6589 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6590 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6591 #if defined(TARGET_PPC64)
6592 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6593               PPC2_ISA300),
6594 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6595 #endif
6596 
6597 #undef GEN_INT_ARITH_ADD
6598 #undef GEN_INT_ARITH_ADD_CONST
6599 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov)         \
6600 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6601 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val,                        \
6602                                 add_ca, compute_ca, compute_ov)               \
6603 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6604 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6605 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6606 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6607 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6608 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
6609 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
6610 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
6611 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
6612 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
6613 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
6614 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
6615 
6616 #undef GEN_INT_ARITH_DIVW
6617 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
6618 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
6619 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
6620 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
6621 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
6622 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
6623 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6624 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6625 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6626 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6627 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6628 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6629 
6630 #if defined(TARGET_PPC64)
6631 #undef GEN_INT_ARITH_DIVD
6632 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
6633 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6634 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
6635 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
6636 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
6637 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
6638 
6639 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6640 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6641 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6642 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6643 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6644 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6645 
6646 #undef GEN_INT_ARITH_MUL_HELPER
6647 #define GEN_INT_ARITH_MUL_HELPER(name, opc3)                                  \
6648 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6649 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
6650 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
6651 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
6652 #endif
6653 
6654 #undef GEN_INT_ARITH_SUBF
6655 #undef GEN_INT_ARITH_SUBF_CONST
6656 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
6657 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
6658 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
6659                                 add_ca, compute_ca, compute_ov)               \
6660 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
6661 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
6662 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
6663 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
6664 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
6665 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
6666 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
6667 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
6668 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
6669 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
6670 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
6671 
6672 #undef GEN_LOGICAL1
6673 #undef GEN_LOGICAL2
6674 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
6675 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
6676 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
6677 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
6678 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
6679 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
6680 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
6681 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
6682 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
6683 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
6684 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
6685 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
6686 #if defined(TARGET_PPC64)
6687 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
6688 #endif
6689 
6690 #if defined(TARGET_PPC64)
6691 #undef GEN_PPC64_R2
6692 #undef GEN_PPC64_R4
6693 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
6694 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6695 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6696              PPC_64B)
6697 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
6698 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6699 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
6700              PPC_64B),                                                        \
6701 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6702              PPC_64B),                                                        \
6703 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
6704              PPC_64B)
6705 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
6706 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
6707 GEN_PPC64_R4(rldic, 0x1E, 0x04),
6708 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
6709 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
6710 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
6711 #endif
6712 
6713 #undef GEN_LDX_E
6714 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
6715 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
6716 
6717 #if defined(TARGET_PPC64)
6718 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
6719 
6720 /* HV/P7 and later only */
6721 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
6722 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
6723 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
6724 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
6725 #endif
6726 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
6727 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
6728 
6729 /* External PID based load */
6730 #undef GEN_LDEPX
6731 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
6732 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6733               0x00000001, PPC_NONE, PPC2_BOOKE206),
6734 
6735 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
6736 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
6737 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
6738 #if defined(TARGET_PPC64)
6739 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
6740 #endif
6741 
6742 #undef GEN_STX_E
6743 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
6744 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
6745 
6746 #if defined(TARGET_PPC64)
6747 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
6748 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
6749 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
6750 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
6751 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
6752 #endif
6753 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
6754 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
6755 
6756 #undef GEN_STEPX
6757 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
6758 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6759               0x00000001, PPC_NONE, PPC2_BOOKE206),
6760 
6761 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
6762 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
6763 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
6764 #if defined(TARGET_PPC64)
6765 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
6766 #endif
6767 
6768 #undef GEN_CRLOGIC
6769 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
6770 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
6771 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
6772 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
6773 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
6774 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
6775 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
6776 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
6777 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
6778 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
6779 
6780 #undef GEN_MAC_HANDLER
6781 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
6782 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
6783 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
6784 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
6785 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
6786 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
6787 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
6788 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
6789 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
6790 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
6791 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
6792 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
6793 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
6794 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
6795 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
6796 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
6797 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
6798 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
6799 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
6800 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
6801 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
6802 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
6803 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
6804 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
6805 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
6806 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
6807 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
6808 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
6809 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
6810 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
6811 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
6812 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
6813 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
6814 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
6815 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
6816 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
6817 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
6818 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
6819 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
6820 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
6821 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
6822 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
6823 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
6824 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
6825 
6826 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
6827                PPC_NONE, PPC2_TM),
6828 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
6829                PPC_NONE, PPC2_TM),
6830 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
6831                PPC_NONE, PPC2_TM),
6832 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
6833                PPC_NONE, PPC2_TM),
6834 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
6835                PPC_NONE, PPC2_TM),
6836 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
6837                PPC_NONE, PPC2_TM),
6838 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
6839                PPC_NONE, PPC2_TM),
6840 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
6841                PPC_NONE, PPC2_TM),
6842 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
6843                PPC_NONE, PPC2_TM),
6844 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
6845                PPC_NONE, PPC2_TM),
6846 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
6847                PPC_NONE, PPC2_TM),
6848 
6849 #include "translate/fp-ops.c.inc"
6850 
6851 #include "translate/vmx-ops.c.inc"
6852 
6853 #include "translate/vsx-ops.c.inc"
6854 
6855 #include "translate/spe-ops.c.inc"
6856 };
6857 
6858 /*****************************************************************************/
6859 /* Opcode types */
6860 enum {
6861     PPC_DIRECT   = 0, /* Opcode routine        */
6862     PPC_INDIRECT = 1, /* Indirect opcode table */
6863 };
6864 
6865 #define PPC_OPCODE_MASK 0x3
6866 
6867 static inline int is_indirect_opcode(void *handler)
6868 {
6869     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
6870 }
6871 
6872 static inline opc_handler_t **ind_table(void *handler)
6873 {
6874     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
6875 }
6876 
6877 /* Instruction table creation */
6878 /* Opcodes tables creation */
6879 static void fill_new_table(opc_handler_t **table, int len)
6880 {
6881     int i;
6882 
6883     for (i = 0; i < len; i++) {
6884         table[i] = &invalid_handler;
6885     }
6886 }
6887 
6888 static int create_new_table(opc_handler_t **table, unsigned char idx)
6889 {
6890     opc_handler_t **tmp;
6891 
6892     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
6893     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
6894     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
6895 
6896     return 0;
6897 }
6898 
6899 static int insert_in_table(opc_handler_t **table, unsigned char idx,
6900                             opc_handler_t *handler)
6901 {
6902     if (table[idx] != &invalid_handler) {
6903         return -1;
6904     }
6905     table[idx] = handler;
6906 
6907     return 0;
6908 }
6909 
6910 static int register_direct_insn(opc_handler_t **ppc_opcodes,
6911                                 unsigned char idx, opc_handler_t *handler)
6912 {
6913     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
6914         printf("*** ERROR: opcode %02x already assigned in main "
6915                "opcode table\n", idx);
6916         return -1;
6917     }
6918 
6919     return 0;
6920 }
6921 
6922 static int register_ind_in_table(opc_handler_t **table,
6923                                  unsigned char idx1, unsigned char idx2,
6924                                  opc_handler_t *handler)
6925 {
6926     if (table[idx1] == &invalid_handler) {
6927         if (create_new_table(table, idx1) < 0) {
6928             printf("*** ERROR: unable to create indirect table "
6929                    "idx=%02x\n", idx1);
6930             return -1;
6931         }
6932     } else {
6933         if (!is_indirect_opcode(table[idx1])) {
6934             printf("*** ERROR: idx %02x already assigned to a direct "
6935                    "opcode\n", idx1);
6936             return -1;
6937         }
6938     }
6939     if (handler != NULL &&
6940         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
6941         printf("*** ERROR: opcode %02x already assigned in "
6942                "opcode table %02x\n", idx2, idx1);
6943         return -1;
6944     }
6945 
6946     return 0;
6947 }
6948 
6949 static int register_ind_insn(opc_handler_t **ppc_opcodes,
6950                              unsigned char idx1, unsigned char idx2,
6951                              opc_handler_t *handler)
6952 {
6953     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
6954 }
6955 
6956 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
6957                                 unsigned char idx1, unsigned char idx2,
6958                                 unsigned char idx3, opc_handler_t *handler)
6959 {
6960     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6961         printf("*** ERROR: unable to join indirect table idx "
6962                "[%02x-%02x]\n", idx1, idx2);
6963         return -1;
6964     }
6965     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
6966                               handler) < 0) {
6967         printf("*** ERROR: unable to insert opcode "
6968                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6969         return -1;
6970     }
6971 
6972     return 0;
6973 }
6974 
6975 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
6976                                  unsigned char idx1, unsigned char idx2,
6977                                  unsigned char idx3, unsigned char idx4,
6978                                  opc_handler_t *handler)
6979 {
6980     opc_handler_t **table;
6981 
6982     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6983         printf("*** ERROR: unable to join indirect table idx "
6984                "[%02x-%02x]\n", idx1, idx2);
6985         return -1;
6986     }
6987     table = ind_table(ppc_opcodes[idx1]);
6988     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
6989         printf("*** ERROR: unable to join 2nd-level indirect table idx "
6990                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6991         return -1;
6992     }
6993     table = ind_table(table[idx2]);
6994     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
6995         printf("*** ERROR: unable to insert opcode "
6996                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
6997         return -1;
6998     }
6999     return 0;
7000 }
7001 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
7002 {
7003     if (insn->opc2 != 0xFF) {
7004         if (insn->opc3 != 0xFF) {
7005             if (insn->opc4 != 0xFF) {
7006                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7007                                           insn->opc3, insn->opc4,
7008                                           &insn->handler) < 0) {
7009                     return -1;
7010                 }
7011             } else {
7012                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7013                                          insn->opc3, &insn->handler) < 0) {
7014                     return -1;
7015                 }
7016             }
7017         } else {
7018             if (register_ind_insn(ppc_opcodes, insn->opc1,
7019                                   insn->opc2, &insn->handler) < 0) {
7020                 return -1;
7021             }
7022         }
7023     } else {
7024         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
7025             return -1;
7026         }
7027     }
7028 
7029     return 0;
7030 }
7031 
7032 static int test_opcode_table(opc_handler_t **table, int len)
7033 {
7034     int i, count, tmp;
7035 
7036     for (i = 0, count = 0; i < len; i++) {
7037         /* Consistency fixup */
7038         if (table[i] == NULL) {
7039             table[i] = &invalid_handler;
7040         }
7041         if (table[i] != &invalid_handler) {
7042             if (is_indirect_opcode(table[i])) {
7043                 tmp = test_opcode_table(ind_table(table[i]),
7044                     PPC_CPU_INDIRECT_OPCODES_LEN);
7045                 if (tmp == 0) {
7046                     free(table[i]);
7047                     table[i] = &invalid_handler;
7048                 } else {
7049                     count++;
7050                 }
7051             } else {
7052                 count++;
7053             }
7054         }
7055     }
7056 
7057     return count;
7058 }
7059 
7060 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
7061 {
7062     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
7063         printf("*** WARNING: no opcode defined !\n");
7064     }
7065 }
7066 
7067 /*****************************************************************************/
7068 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
7069 {
7070     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
7071     opcode_t *opc;
7072 
7073     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
7074     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
7075         if (((opc->handler.type & pcc->insns_flags) != 0) ||
7076             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
7077             if (register_insn(cpu->opcodes, opc) < 0) {
7078                 error_setg(errp, "ERROR initializing PowerPC instruction "
7079                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
7080                            opc->opc3);
7081                 return;
7082             }
7083         }
7084     }
7085     fix_opcode_tables(cpu->opcodes);
7086     fflush(stdout);
7087     fflush(stderr);
7088 }
7089 
7090 void destroy_ppc_opcodes(PowerPCCPU *cpu)
7091 {
7092     opc_handler_t **table, **table_2;
7093     int i, j, k;
7094 
7095     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
7096         if (cpu->opcodes[i] == &invalid_handler) {
7097             continue;
7098         }
7099         if (is_indirect_opcode(cpu->opcodes[i])) {
7100             table = ind_table(cpu->opcodes[i]);
7101             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
7102                 if (table[j] == &invalid_handler) {
7103                     continue;
7104                 }
7105                 if (is_indirect_opcode(table[j])) {
7106                     table_2 = ind_table(table[j]);
7107                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
7108                         if (table_2[k] != &invalid_handler &&
7109                             is_indirect_opcode(table_2[k])) {
7110                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
7111                                                      ~PPC_INDIRECT));
7112                         }
7113                     }
7114                     g_free((opc_handler_t *)((uintptr_t)table[j] &
7115                                              ~PPC_INDIRECT));
7116                 }
7117             }
7118             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
7119                 ~PPC_INDIRECT));
7120         }
7121     }
7122 }
7123 
7124 int ppc_fixup_cpu(PowerPCCPU *cpu)
7125 {
7126     CPUPPCState *env = &cpu->env;
7127 
7128     /*
7129      * TCG doesn't (yet) emulate some groups of instructions that are
7130      * implemented on some otherwise supported CPUs (e.g. VSX and
7131      * decimal floating point instructions on POWER7).  We remove
7132      * unsupported instruction groups from the cpu state's instruction
7133      * masks and hope the guest can cope.  For at least the pseries
7134      * machine, the unavailability of these instructions can be
7135      * advertised to the guest via the device tree.
7136      */
7137     if ((env->insns_flags & ~PPC_TCG_INSNS)
7138         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
7139         warn_report("Disabling some instructions which are not "
7140                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
7141                     env->insns_flags & ~PPC_TCG_INSNS,
7142                     env->insns_flags2 & ~PPC_TCG_INSNS2);
7143     }
7144     env->insns_flags &= PPC_TCG_INSNS;
7145     env->insns_flags2 &= PPC_TCG_INSNS2;
7146     return 0;
7147 }
7148 
7149 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
7150 {
7151     opc_handler_t **table, *handler;
7152     uint32_t inval;
7153 
7154     ctx->opcode = insn;
7155 
7156     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7157               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7158               ctx->le_mode ? "little" : "big");
7159 
7160     table = cpu->opcodes;
7161     handler = table[opc1(insn)];
7162     if (is_indirect_opcode(handler)) {
7163         table = ind_table(handler);
7164         handler = table[opc2(insn)];
7165         if (is_indirect_opcode(handler)) {
7166             table = ind_table(handler);
7167             handler = table[opc3(insn)];
7168             if (is_indirect_opcode(handler)) {
7169                 table = ind_table(handler);
7170                 handler = table[opc4(insn)];
7171             }
7172         }
7173     }
7174 
7175     /* Is opcode *REALLY* valid ? */
7176     if (unlikely(handler->handler == &gen_invalid)) {
7177         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7178                       "%02x - %02x - %02x - %02x (%08x) "
7179                       TARGET_FMT_lx "\n",
7180                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7181                       insn, ctx->cia);
7182         return false;
7183     }
7184 
7185     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
7186                  && Rc(insn))) {
7187         inval = handler->inval2;
7188     } else {
7189         inval = handler->inval1;
7190     }
7191 
7192     if (unlikely((insn & inval) != 0)) {
7193         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7194                       "%02x - %02x - %02x - %02x (%08x) "
7195                       TARGET_FMT_lx "\n", insn & inval,
7196                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7197                       insn, ctx->cia);
7198         return false;
7199     }
7200 
7201     handler->handler(ctx);
7202     return true;
7203 }
7204 
7205 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
7206 {
7207     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7208     CPUPPCState *env = cs->env_ptr;
7209     uint32_t hflags = ctx->base.tb->flags;
7210 
7211     ctx->spr_cb = env->spr_cb;
7212     ctx->pr = (hflags >> HFLAGS_PR) & 1;
7213     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
7214     ctx->dr = (hflags >> HFLAGS_DR) & 1;
7215     ctx->hv = (hflags >> HFLAGS_HV) & 1;
7216     ctx->insns_flags = env->insns_flags;
7217     ctx->insns_flags2 = env->insns_flags2;
7218     ctx->access_type = -1;
7219     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
7220     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
7221     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
7222     ctx->flags = env->flags;
7223 #if defined(TARGET_PPC64)
7224     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
7225     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7226 #endif
7227     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
7228         || env->mmu_model & POWERPC_MMU_64;
7229 
7230     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
7231     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
7232     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
7233     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
7234     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
7235     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
7236     ctx->hr = (hflags >> HFLAGS_HR) & 1;
7237     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
7238     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
7239     ctx->mmcr0_pmcjce = (hflags >> HFLAGS_PMCJCE) & 1;
7240     ctx->pmc_other = (hflags >> HFLAGS_PMC_OTHER) & 1;
7241     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
7242 
7243     ctx->singlestep_enabled = 0;
7244     if ((hflags >> HFLAGS_SE) & 1) {
7245         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
7246         ctx->base.max_insns = 1;
7247     }
7248     if ((hflags >> HFLAGS_BE) & 1) {
7249         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
7250     }
7251 }
7252 
7253 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
7254 {
7255 }
7256 
7257 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
7258 {
7259     tcg_gen_insn_start(dcbase->pc_next);
7260 }
7261 
7262 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
7263 {
7264     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
7265     return opc1(insn) == 1;
7266 }
7267 
7268 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
7269 {
7270     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7271     PowerPCCPU *cpu = POWERPC_CPU(cs);
7272     CPUPPCState *env = cs->env_ptr;
7273     target_ulong pc;
7274     uint32_t insn;
7275     bool ok;
7276 
7277     LOG_DISAS("----------------\n");
7278     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7279               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
7280 
7281     ctx->cia = pc = ctx->base.pc_next;
7282     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
7283     ctx->base.pc_next = pc += 4;
7284 
7285     if (!is_prefix_insn(ctx, insn)) {
7286         ok = (decode_insn32(ctx, insn) ||
7287               decode_legacy(cpu, ctx, insn));
7288     } else if ((pc & 63) == 0) {
7289         /*
7290          * Power v3.1, section 1.9 Exceptions:
7291          * attempt to execute a prefixed instruction that crosses a
7292          * 64-byte address boundary (system alignment error).
7293          */
7294         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
7295         ok = true;
7296     } else {
7297         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
7298                                              need_byteswap(ctx));
7299         ctx->base.pc_next = pc += 4;
7300         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
7301     }
7302     if (!ok) {
7303         gen_invalid(ctx);
7304     }
7305 
7306     /* End the TB when crossing a page boundary. */
7307     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
7308         ctx->base.is_jmp = DISAS_TOO_MANY;
7309     }
7310 }
7311 
7312 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
7313 {
7314     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7315     DisasJumpType is_jmp = ctx->base.is_jmp;
7316     target_ulong nip = ctx->base.pc_next;
7317 
7318     if (is_jmp == DISAS_NORETURN) {
7319         /* We have already exited the TB. */
7320         return;
7321     }
7322 
7323     /* Honor single stepping. */
7324     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)
7325         && (nip <= 0x100 || nip > 0xf00)) {
7326         switch (is_jmp) {
7327         case DISAS_TOO_MANY:
7328         case DISAS_EXIT_UPDATE:
7329         case DISAS_CHAIN_UPDATE:
7330             gen_update_nip(ctx, nip);
7331             break;
7332         case DISAS_EXIT:
7333         case DISAS_CHAIN:
7334             break;
7335         default:
7336             g_assert_not_reached();
7337         }
7338 
7339         gen_debug_exception(ctx);
7340         return;
7341     }
7342 
7343     switch (is_jmp) {
7344     case DISAS_TOO_MANY:
7345         if (use_goto_tb(ctx, nip)) {
7346             pmu_count_insns(ctx);
7347             tcg_gen_goto_tb(0);
7348             gen_update_nip(ctx, nip);
7349             tcg_gen_exit_tb(ctx->base.tb, 0);
7350             break;
7351         }
7352         /* fall through */
7353     case DISAS_CHAIN_UPDATE:
7354         gen_update_nip(ctx, nip);
7355         /* fall through */
7356     case DISAS_CHAIN:
7357         /*
7358          * tcg_gen_lookup_and_goto_ptr will exit the TB if
7359          * CF_NO_GOTO_PTR is set. Count insns now.
7360          */
7361         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
7362             pmu_count_insns(ctx);
7363         }
7364 
7365         tcg_gen_lookup_and_goto_ptr();
7366         break;
7367 
7368     case DISAS_EXIT_UPDATE:
7369         gen_update_nip(ctx, nip);
7370         /* fall through */
7371     case DISAS_EXIT:
7372         pmu_count_insns(ctx);
7373         tcg_gen_exit_tb(NULL, 0);
7374         break;
7375 
7376     default:
7377         g_assert_not_reached();
7378     }
7379 }
7380 
7381 static void ppc_tr_disas_log(const DisasContextBase *dcbase,
7382                              CPUState *cs, FILE *logfile)
7383 {
7384     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
7385     target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size);
7386 }
7387 
7388 static const TranslatorOps ppc_tr_ops = {
7389     .init_disas_context = ppc_tr_init_disas_context,
7390     .tb_start           = ppc_tr_tb_start,
7391     .insn_start         = ppc_tr_insn_start,
7392     .translate_insn     = ppc_tr_translate_insn,
7393     .tb_stop            = ppc_tr_tb_stop,
7394     .disas_log          = ppc_tr_disas_log,
7395 };
7396 
7397 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
7398                            target_ulong pc, void *host_pc)
7399 {
7400     DisasContext ctx;
7401 
7402     translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
7403 }
7404