xref: /openbmc/qemu/target/ppc/translate.c (revision 21063bce)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31 
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34 
35 #include "exec/translator.h"
36 #include "exec/log.h"
37 #include "qemu/atomic128.h"
38 #include "spr_common.h"
39 #include "power8-pmu.h"
40 
41 #include "qemu/qemu-print.h"
42 #include "qapi/error.h"
43 
44 #define CPU_SINGLE_STEP 0x1
45 #define CPU_BRANCH_STEP 0x2
46 
47 /* Include definitions for instructions classes and implementations flags */
48 /* #define PPC_DEBUG_DISAS */
49 
50 #ifdef PPC_DEBUG_DISAS
51 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
52 #else
53 #  define LOG_DISAS(...) do { } while (0)
54 #endif
55 /*****************************************************************************/
56 /* Code translation helpers                                                  */
57 
58 /* global register indexes */
59 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
60                           + 10 * 4 + 22 * 5 /* SPE GPRh */
61                           + 8 * 5           /* CRF */];
62 static TCGv cpu_gpr[32];
63 static TCGv cpu_gprh[32];
64 static TCGv_i32 cpu_crf[8];
65 static TCGv cpu_nip;
66 static TCGv cpu_msr;
67 static TCGv cpu_ctr;
68 static TCGv cpu_lr;
69 #if defined(TARGET_PPC64)
70 static TCGv cpu_cfar;
71 #endif
72 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
73 static TCGv cpu_reserve;
74 static TCGv cpu_reserve_val;
75 static TCGv cpu_reserve_val2;
76 static TCGv cpu_fpscr;
77 static TCGv_i32 cpu_access_type;
78 
79 #include "exec/gen-icount.h"
80 
81 void ppc_translate_init(void)
82 {
83     int i;
84     char *p;
85     size_t cpu_reg_names_size;
86 
87     p = cpu_reg_names;
88     cpu_reg_names_size = sizeof(cpu_reg_names);
89 
90     for (i = 0; i < 8; i++) {
91         snprintf(p, cpu_reg_names_size, "crf%d", i);
92         cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
93                                             offsetof(CPUPPCState, crf[i]), p);
94         p += 5;
95         cpu_reg_names_size -= 5;
96     }
97 
98     for (i = 0; i < 32; i++) {
99         snprintf(p, cpu_reg_names_size, "r%d", i);
100         cpu_gpr[i] = tcg_global_mem_new(cpu_env,
101                                         offsetof(CPUPPCState, gpr[i]), p);
102         p += (i < 10) ? 3 : 4;
103         cpu_reg_names_size -= (i < 10) ? 3 : 4;
104         snprintf(p, cpu_reg_names_size, "r%dH", i);
105         cpu_gprh[i] = tcg_global_mem_new(cpu_env,
106                                          offsetof(CPUPPCState, gprh[i]), p);
107         p += (i < 10) ? 4 : 5;
108         cpu_reg_names_size -= (i < 10) ? 4 : 5;
109     }
110 
111     cpu_nip = tcg_global_mem_new(cpu_env,
112                                  offsetof(CPUPPCState, nip), "nip");
113 
114     cpu_msr = tcg_global_mem_new(cpu_env,
115                                  offsetof(CPUPPCState, msr), "msr");
116 
117     cpu_ctr = tcg_global_mem_new(cpu_env,
118                                  offsetof(CPUPPCState, ctr), "ctr");
119 
120     cpu_lr = tcg_global_mem_new(cpu_env,
121                                 offsetof(CPUPPCState, lr), "lr");
122 
123 #if defined(TARGET_PPC64)
124     cpu_cfar = tcg_global_mem_new(cpu_env,
125                                   offsetof(CPUPPCState, cfar), "cfar");
126 #endif
127 
128     cpu_xer = tcg_global_mem_new(cpu_env,
129                                  offsetof(CPUPPCState, xer), "xer");
130     cpu_so = tcg_global_mem_new(cpu_env,
131                                 offsetof(CPUPPCState, so), "SO");
132     cpu_ov = tcg_global_mem_new(cpu_env,
133                                 offsetof(CPUPPCState, ov), "OV");
134     cpu_ca = tcg_global_mem_new(cpu_env,
135                                 offsetof(CPUPPCState, ca), "CA");
136     cpu_ov32 = tcg_global_mem_new(cpu_env,
137                                   offsetof(CPUPPCState, ov32), "OV32");
138     cpu_ca32 = tcg_global_mem_new(cpu_env,
139                                   offsetof(CPUPPCState, ca32), "CA32");
140 
141     cpu_reserve = tcg_global_mem_new(cpu_env,
142                                      offsetof(CPUPPCState, reserve_addr),
143                                      "reserve_addr");
144     cpu_reserve_val = tcg_global_mem_new(cpu_env,
145                                          offsetof(CPUPPCState, reserve_val),
146                                          "reserve_val");
147     cpu_reserve_val2 = tcg_global_mem_new(cpu_env,
148                                           offsetof(CPUPPCState, reserve_val2),
149                                           "reserve_val2");
150 
151     cpu_fpscr = tcg_global_mem_new(cpu_env,
152                                    offsetof(CPUPPCState, fpscr), "fpscr");
153 
154     cpu_access_type = tcg_global_mem_new_i32(cpu_env,
155                                              offsetof(CPUPPCState, access_type),
156                                              "access_type");
157 }
158 
159 /* internal defines */
160 struct DisasContext {
161     DisasContextBase base;
162     target_ulong cia;  /* current instruction address */
163     uint32_t opcode;
164     /* Routine used to access memory */
165     bool pr, hv, dr, le_mode;
166     bool lazy_tlb_flush;
167     bool need_access_type;
168     int mem_idx;
169     int access_type;
170     /* Translation flags */
171     MemOp default_tcg_memop_mask;
172 #if defined(TARGET_PPC64)
173     bool sf_mode;
174     bool has_cfar;
175 #endif
176     bool fpu_enabled;
177     bool altivec_enabled;
178     bool vsx_enabled;
179     bool spe_enabled;
180     bool tm_enabled;
181     bool gtse;
182     bool hr;
183     bool mmcr0_pmcc0;
184     bool mmcr0_pmcc1;
185     bool mmcr0_pmcjce;
186     bool pmc_other;
187     bool pmu_insn_cnt;
188     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
189     int singlestep_enabled;
190     uint32_t flags;
191     uint64_t insns_flags;
192     uint64_t insns_flags2;
193 };
194 
195 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
196 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
197 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
198 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
199 
200 /* Return true iff byteswap is needed in a scalar memop */
201 static inline bool need_byteswap(const DisasContext *ctx)
202 {
203 #if TARGET_BIG_ENDIAN
204      return ctx->le_mode;
205 #else
206      return !ctx->le_mode;
207 #endif
208 }
209 
210 /* True when active word size < size of target_long.  */
211 #ifdef TARGET_PPC64
212 # define NARROW_MODE(C)  (!(C)->sf_mode)
213 #else
214 # define NARROW_MODE(C)  0
215 #endif
216 
217 struct opc_handler_t {
218     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
219     uint32_t inval1;
220     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
221     uint32_t inval2;
222     /* instruction type */
223     uint64_t type;
224     /* extended instruction type */
225     uint64_t type2;
226     /* handler */
227     void (*handler)(DisasContext *ctx);
228 };
229 
230 /* SPR load/store helpers */
231 static inline void gen_load_spr(TCGv t, int reg)
232 {
233     tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
234 }
235 
236 static inline void gen_store_spr(int reg, TCGv t)
237 {
238     tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
239 }
240 
241 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
242 {
243     if (ctx->need_access_type && ctx->access_type != access_type) {
244         tcg_gen_movi_i32(cpu_access_type, access_type);
245         ctx->access_type = access_type;
246     }
247 }
248 
249 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
250 {
251     if (NARROW_MODE(ctx)) {
252         nip = (uint32_t)nip;
253     }
254     tcg_gen_movi_tl(cpu_nip, nip);
255 }
256 
257 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
258 {
259     TCGv_i32 t0, t1;
260 
261     /*
262      * These are all synchronous exceptions, we set the PC back to the
263      * faulting instruction
264      */
265     gen_update_nip(ctx, ctx->cia);
266     t0 = tcg_const_i32(excp);
267     t1 = tcg_const_i32(error);
268     gen_helper_raise_exception_err(cpu_env, t0, t1);
269     tcg_temp_free_i32(t0);
270     tcg_temp_free_i32(t1);
271     ctx->base.is_jmp = DISAS_NORETURN;
272 }
273 
274 static void gen_exception(DisasContext *ctx, uint32_t excp)
275 {
276     TCGv_i32 t0;
277 
278     /*
279      * These are all synchronous exceptions, we set the PC back to the
280      * faulting instruction
281      */
282     gen_update_nip(ctx, ctx->cia);
283     t0 = tcg_const_i32(excp);
284     gen_helper_raise_exception(cpu_env, t0);
285     tcg_temp_free_i32(t0);
286     ctx->base.is_jmp = DISAS_NORETURN;
287 }
288 
289 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
290                               target_ulong nip)
291 {
292     TCGv_i32 t0;
293 
294     gen_update_nip(ctx, nip);
295     t0 = tcg_const_i32(excp);
296     gen_helper_raise_exception(cpu_env, t0);
297     tcg_temp_free_i32(t0);
298     ctx->base.is_jmp = DISAS_NORETURN;
299 }
300 
301 static void gen_icount_io_start(DisasContext *ctx)
302 {
303     if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
304         gen_io_start();
305         /*
306          * An I/O instruction must be last in the TB.
307          * Chain to the next TB, and let the code from gen_tb_start
308          * decide if we need to return to the main loop.
309          * Doing this first also allows this value to be overridden.
310          */
311         ctx->base.is_jmp = DISAS_TOO_MANY;
312     }
313 }
314 
315 #if !defined(CONFIG_USER_ONLY)
316 static void gen_ppc_maybe_interrupt(DisasContext *ctx)
317 {
318     gen_icount_io_start(ctx);
319     gen_helper_ppc_maybe_interrupt(cpu_env);
320 }
321 #endif
322 
323 /*
324  * Tells the caller what is the appropriate exception to generate and prepares
325  * SPR registers for this exception.
326  *
327  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
328  * POWERPC_EXCP_DEBUG (on BookE).
329  */
330 static uint32_t gen_prep_dbgex(DisasContext *ctx)
331 {
332     if (ctx->flags & POWERPC_FLAG_DE) {
333         target_ulong dbsr = 0;
334         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
335             dbsr = DBCR0_ICMP;
336         } else {
337             /* Must have been branch */
338             dbsr = DBCR0_BRT;
339         }
340         TCGv t0 = tcg_temp_new();
341         gen_load_spr(t0, SPR_BOOKE_DBSR);
342         tcg_gen_ori_tl(t0, t0, dbsr);
343         gen_store_spr(SPR_BOOKE_DBSR, t0);
344         tcg_temp_free(t0);
345         return POWERPC_EXCP_DEBUG;
346     } else {
347         return POWERPC_EXCP_TRACE;
348     }
349 }
350 
351 static void gen_debug_exception(DisasContext *ctx)
352 {
353     gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx)));
354     ctx->base.is_jmp = DISAS_NORETURN;
355 }
356 
357 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
358 {
359     /* Will be converted to program check if needed */
360     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
361 }
362 
363 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
364 {
365     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
366 }
367 
368 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
369 {
370     /* Will be converted to program check if needed */
371     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
372 }
373 
374 /*****************************************************************************/
375 /* SPR READ/WRITE CALLBACKS */
376 
377 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
378 {
379 #if 0
380     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
381     printf("ERROR: try to access SPR %d !\n", sprn);
382 #endif
383 }
384 
385 /* #define PPC_DUMP_SPR_ACCESSES */
386 
387 /*
388  * Generic callbacks:
389  * do nothing but store/retrieve spr value
390  */
391 static void spr_load_dump_spr(int sprn)
392 {
393 #ifdef PPC_DUMP_SPR_ACCESSES
394     TCGv_i32 t0 = tcg_const_i32(sprn);
395     gen_helper_load_dump_spr(cpu_env, t0);
396     tcg_temp_free_i32(t0);
397 #endif
398 }
399 
400 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
401 {
402     gen_load_spr(cpu_gpr[gprn], sprn);
403     spr_load_dump_spr(sprn);
404 }
405 
406 static void spr_store_dump_spr(int sprn)
407 {
408 #ifdef PPC_DUMP_SPR_ACCESSES
409     TCGv_i32 t0 = tcg_const_i32(sprn);
410     gen_helper_store_dump_spr(cpu_env, t0);
411     tcg_temp_free_i32(t0);
412 #endif
413 }
414 
415 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
416 {
417     gen_store_spr(sprn, cpu_gpr[gprn]);
418     spr_store_dump_spr(sprn);
419 }
420 
421 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
422 {
423     spr_write_generic(ctx, sprn, gprn);
424 
425     /*
426      * SPR_CTRL writes must force a new translation block,
427      * allowing the PMU to calculate the run latch events with
428      * more accuracy.
429      */
430     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
431 }
432 
433 #if !defined(CONFIG_USER_ONLY)
434 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
435 {
436 #ifdef TARGET_PPC64
437     TCGv t0 = tcg_temp_new();
438     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
439     gen_store_spr(sprn, t0);
440     tcg_temp_free(t0);
441     spr_store_dump_spr(sprn);
442 #else
443     spr_write_generic(ctx, sprn, gprn);
444 #endif
445 }
446 
447 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
448 {
449     TCGv t0 = tcg_temp_new();
450     TCGv t1 = tcg_temp_new();
451     gen_load_spr(t0, sprn);
452     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
453     tcg_gen_and_tl(t0, t0, t1);
454     gen_store_spr(sprn, t0);
455     tcg_temp_free(t0);
456     tcg_temp_free(t1);
457 }
458 
459 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
460 {
461 }
462 
463 #endif
464 
465 /* SPR common to all PowerPC */
466 /* XER */
467 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
468 {
469     TCGv dst = cpu_gpr[gprn];
470     TCGv t0 = tcg_temp_new();
471     TCGv t1 = tcg_temp_new();
472     TCGv t2 = tcg_temp_new();
473     tcg_gen_mov_tl(dst, cpu_xer);
474     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
475     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
476     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
477     tcg_gen_or_tl(t0, t0, t1);
478     tcg_gen_or_tl(dst, dst, t2);
479     tcg_gen_or_tl(dst, dst, t0);
480     if (is_isa300(ctx)) {
481         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
482         tcg_gen_or_tl(dst, dst, t0);
483         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
484         tcg_gen_or_tl(dst, dst, t0);
485     }
486     tcg_temp_free(t0);
487     tcg_temp_free(t1);
488     tcg_temp_free(t2);
489 }
490 
491 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
492 {
493     TCGv src = cpu_gpr[gprn];
494     /* Write all flags, while reading back check for isa300 */
495     tcg_gen_andi_tl(cpu_xer, src,
496                     ~((1u << XER_SO) |
497                       (1u << XER_OV) | (1u << XER_OV32) |
498                       (1u << XER_CA) | (1u << XER_CA32)));
499     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
500     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
501     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
502     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
503     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
504 }
505 
506 /* LR */
507 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
508 {
509     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
510 }
511 
512 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
513 {
514     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
515 }
516 
517 /* CFAR */
518 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
519 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
520 {
521     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
522 }
523 
524 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
525 {
526     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
527 }
528 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
529 
530 /* CTR */
531 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
532 {
533     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
534 }
535 
536 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
537 {
538     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
539 }
540 
541 /* User read access to SPR */
542 /* USPRx */
543 /* UMMCRx */
544 /* UPMCx */
545 /* USIA */
546 /* UDECR */
547 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
548 {
549     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
550 }
551 
552 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
553 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
554 {
555     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
556 }
557 #endif
558 
559 /* SPR common to all non-embedded PowerPC */
560 /* DECR */
561 #if !defined(CONFIG_USER_ONLY)
562 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
563 {
564     gen_icount_io_start(ctx);
565     gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
566 }
567 
568 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
569 {
570     gen_icount_io_start(ctx);
571     gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
572 }
573 #endif
574 
575 /* SPR common to all non-embedded PowerPC, except 601 */
576 /* Time base */
577 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
578 {
579     gen_icount_io_start(ctx);
580     gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
581 }
582 
583 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
584 {
585     gen_icount_io_start(ctx);
586     gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
587 }
588 
589 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
590 {
591     gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
592 }
593 
594 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
595 {
596     gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
597 }
598 
599 #if !defined(CONFIG_USER_ONLY)
600 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
601 {
602     gen_icount_io_start(ctx);
603     gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
604 }
605 
606 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
607 {
608     gen_icount_io_start(ctx);
609     gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
610 }
611 
612 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
613 {
614     gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
615 }
616 
617 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
618 {
619     gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
620 }
621 
622 #if defined(TARGET_PPC64)
623 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
624 {
625     gen_icount_io_start(ctx);
626     gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
627 }
628 
629 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
630 {
631     gen_icount_io_start(ctx);
632     gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
633 }
634 
635 /* HDECR */
636 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
637 {
638     gen_icount_io_start(ctx);
639     gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
640 }
641 
642 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
643 {
644     gen_icount_io_start(ctx);
645     gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
646 }
647 
648 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
649 {
650     gen_icount_io_start(ctx);
651     gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
652 }
653 
654 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
655 {
656     gen_icount_io_start(ctx);
657     gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
658 }
659 
660 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
661 {
662     gen_icount_io_start(ctx);
663     gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
664 }
665 
666 #endif
667 #endif
668 
669 #if !defined(CONFIG_USER_ONLY)
670 /* IBAT0U...IBAT0U */
671 /* IBAT0L...IBAT7L */
672 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
673 {
674     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
675                   offsetof(CPUPPCState,
676                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
677 }
678 
679 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
680 {
681     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
682                   offsetof(CPUPPCState,
683                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
684 }
685 
686 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
687 {
688     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
689     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
690     tcg_temp_free_i32(t0);
691 }
692 
693 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
694 {
695     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
696     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
697     tcg_temp_free_i32(t0);
698 }
699 
700 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
701 {
702     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
703     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
704     tcg_temp_free_i32(t0);
705 }
706 
707 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
708 {
709     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
710     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
711     tcg_temp_free_i32(t0);
712 }
713 
714 /* DBAT0U...DBAT7U */
715 /* DBAT0L...DBAT7L */
716 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
717 {
718     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
719                   offsetof(CPUPPCState,
720                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
721 }
722 
723 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
724 {
725     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
726                   offsetof(CPUPPCState,
727                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
728 }
729 
730 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
731 {
732     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
733     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
734     tcg_temp_free_i32(t0);
735 }
736 
737 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
738 {
739     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
740     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
741     tcg_temp_free_i32(t0);
742 }
743 
744 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
745 {
746     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
747     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
748     tcg_temp_free_i32(t0);
749 }
750 
751 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
752 {
753     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
754     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
755     tcg_temp_free_i32(t0);
756 }
757 
758 /* SDR1 */
759 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
760 {
761     gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
762 }
763 
764 #if defined(TARGET_PPC64)
765 /* 64 bits PowerPC specific SPRs */
766 /* PIDR */
767 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
768 {
769     gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
770 }
771 
772 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
773 {
774     gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
775 }
776 
777 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
778 {
779     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
780 }
781 
782 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
783 {
784     TCGv t0 = tcg_temp_new();
785     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
786     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
787     tcg_temp_free(t0);
788 }
789 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
790 {
791     gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
792 }
793 
794 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
795 {
796     gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
797 }
798 
799 /* DPDES */
800 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
801 {
802     gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
803 }
804 
805 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
806 {
807     gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
808 }
809 #endif
810 #endif
811 
812 /* PowerPC 40x specific registers */
813 #if !defined(CONFIG_USER_ONLY)
814 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
815 {
816     gen_icount_io_start(ctx);
817     gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
818 }
819 
820 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
821 {
822     gen_icount_io_start(ctx);
823     gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
824 }
825 
826 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
827 {
828     gen_icount_io_start(ctx);
829     gen_store_spr(sprn, cpu_gpr[gprn]);
830     gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
831     /* We must stop translation as we may have rebooted */
832     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
833 }
834 
835 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
836 {
837     gen_icount_io_start(ctx);
838     gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
839 }
840 
841 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
842 {
843     gen_icount_io_start(ctx);
844     gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]);
845 }
846 
847 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
848 {
849     gen_icount_io_start(ctx);
850     gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]);
851 }
852 
853 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
854 {
855     TCGv t0 = tcg_temp_new();
856     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
857     gen_helper_store_40x_pid(cpu_env, t0);
858     tcg_temp_free(t0);
859 }
860 
861 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
862 {
863     gen_icount_io_start(ctx);
864     gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
865 }
866 
867 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
868 {
869     gen_icount_io_start(ctx);
870     gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
871 }
872 #endif
873 
874 /* PIR */
875 #if !defined(CONFIG_USER_ONLY)
876 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
877 {
878     TCGv t0 = tcg_temp_new();
879     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
880     gen_store_spr(SPR_PIR, t0);
881     tcg_temp_free(t0);
882 }
883 #endif
884 
885 /* SPE specific registers */
886 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
887 {
888     TCGv_i32 t0 = tcg_temp_new_i32();
889     tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
890     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
891     tcg_temp_free_i32(t0);
892 }
893 
894 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
895 {
896     TCGv_i32 t0 = tcg_temp_new_i32();
897     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
898     tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
899     tcg_temp_free_i32(t0);
900 }
901 
902 #if !defined(CONFIG_USER_ONLY)
903 /* Callback used to write the exception vector base */
904 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
905 {
906     TCGv t0 = tcg_temp_new();
907     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
908     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
909     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
910     gen_store_spr(sprn, t0);
911     tcg_temp_free(t0);
912 }
913 
914 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
915 {
916     int sprn_offs;
917 
918     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
919         sprn_offs = sprn - SPR_BOOKE_IVOR0;
920     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
921         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
922     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
923         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
924     } else {
925         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
926                       " vector 0x%03x\n", sprn);
927         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
928         return;
929     }
930 
931     TCGv t0 = tcg_temp_new();
932     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
933     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
934     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
935     gen_store_spr(sprn, t0);
936     tcg_temp_free(t0);
937 }
938 #endif
939 
940 #ifdef TARGET_PPC64
941 #ifndef CONFIG_USER_ONLY
942 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
943 {
944     TCGv t0 = tcg_temp_new();
945     TCGv t1 = tcg_temp_new();
946     TCGv t2 = tcg_temp_new();
947 
948     /*
949      * Note, the HV=1 PR=0 case is handled earlier by simply using
950      * spr_write_generic for HV mode in the SPR table
951      */
952 
953     /* Build insertion mask into t1 based on context */
954     if (ctx->pr) {
955         gen_load_spr(t1, SPR_UAMOR);
956     } else {
957         gen_load_spr(t1, SPR_AMOR);
958     }
959 
960     /* Mask new bits into t2 */
961     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
962 
963     /* Load AMR and clear new bits in t0 */
964     gen_load_spr(t0, SPR_AMR);
965     tcg_gen_andc_tl(t0, t0, t1);
966 
967     /* Or'in new bits and write it out */
968     tcg_gen_or_tl(t0, t0, t2);
969     gen_store_spr(SPR_AMR, t0);
970     spr_store_dump_spr(SPR_AMR);
971 
972     tcg_temp_free(t0);
973     tcg_temp_free(t1);
974     tcg_temp_free(t2);
975 }
976 
977 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
978 {
979     TCGv t0 = tcg_temp_new();
980     TCGv t1 = tcg_temp_new();
981     TCGv t2 = tcg_temp_new();
982 
983     /*
984      * Note, the HV=1 case is handled earlier by simply using
985      * spr_write_generic for HV mode in the SPR table
986      */
987 
988     /* Build insertion mask into t1 based on context */
989     gen_load_spr(t1, SPR_AMOR);
990 
991     /* Mask new bits into t2 */
992     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
993 
994     /* Load AMR and clear new bits in t0 */
995     gen_load_spr(t0, SPR_UAMOR);
996     tcg_gen_andc_tl(t0, t0, t1);
997 
998     /* Or'in new bits and write it out */
999     tcg_gen_or_tl(t0, t0, t2);
1000     gen_store_spr(SPR_UAMOR, t0);
1001     spr_store_dump_spr(SPR_UAMOR);
1002 
1003     tcg_temp_free(t0);
1004     tcg_temp_free(t1);
1005     tcg_temp_free(t2);
1006 }
1007 
1008 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1009 {
1010     TCGv t0 = tcg_temp_new();
1011     TCGv t1 = tcg_temp_new();
1012     TCGv t2 = tcg_temp_new();
1013 
1014     /*
1015      * Note, the HV=1 case is handled earlier by simply using
1016      * spr_write_generic for HV mode in the SPR table
1017      */
1018 
1019     /* Build insertion mask into t1 based on context */
1020     gen_load_spr(t1, SPR_AMOR);
1021 
1022     /* Mask new bits into t2 */
1023     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1024 
1025     /* Load AMR and clear new bits in t0 */
1026     gen_load_spr(t0, SPR_IAMR);
1027     tcg_gen_andc_tl(t0, t0, t1);
1028 
1029     /* Or'in new bits and write it out */
1030     tcg_gen_or_tl(t0, t0, t2);
1031     gen_store_spr(SPR_IAMR, t0);
1032     spr_store_dump_spr(SPR_IAMR);
1033 
1034     tcg_temp_free(t0);
1035     tcg_temp_free(t1);
1036     tcg_temp_free(t2);
1037 }
1038 #endif
1039 #endif
1040 
1041 #ifndef CONFIG_USER_ONLY
1042 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1043 {
1044     gen_helper_fixup_thrm(cpu_env);
1045     gen_load_spr(cpu_gpr[gprn], sprn);
1046     spr_load_dump_spr(sprn);
1047 }
1048 #endif /* !CONFIG_USER_ONLY */
1049 
1050 #if !defined(CONFIG_USER_ONLY)
1051 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1052 {
1053     TCGv t0 = tcg_temp_new();
1054 
1055     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1056     gen_store_spr(sprn, t0);
1057     tcg_temp_free(t0);
1058 }
1059 
1060 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1061 {
1062     TCGv t0 = tcg_temp_new();
1063 
1064     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1065     gen_store_spr(sprn, t0);
1066     tcg_temp_free(t0);
1067 }
1068 
1069 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1070 {
1071     TCGv t0 = tcg_temp_new();
1072 
1073     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1074                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1075     gen_store_spr(sprn, t0);
1076     tcg_temp_free(t0);
1077 }
1078 
1079 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1080 {
1081     gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1082 }
1083 
1084 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1085 {
1086     TCGv_i32 t0 = tcg_const_i32(sprn);
1087     gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1088     tcg_temp_free_i32(t0);
1089 }
1090 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1091 {
1092     gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1093 }
1094 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1095 {
1096     gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1097 }
1098 
1099 #endif
1100 
1101 #if !defined(CONFIG_USER_ONLY)
1102 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1103 {
1104     TCGv val = tcg_temp_new();
1105     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1106     gen_store_spr(SPR_BOOKE_MAS3, val);
1107     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1108     gen_store_spr(SPR_BOOKE_MAS7, val);
1109     tcg_temp_free(val);
1110 }
1111 
1112 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1113 {
1114     TCGv mas7 = tcg_temp_new();
1115     TCGv mas3 = tcg_temp_new();
1116     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1117     tcg_gen_shli_tl(mas7, mas7, 32);
1118     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1119     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1120     tcg_temp_free(mas3);
1121     tcg_temp_free(mas7);
1122 }
1123 
1124 #endif
1125 
1126 #ifdef TARGET_PPC64
1127 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1128                                     int bit, int sprn, int cause)
1129 {
1130     TCGv_i32 t1 = tcg_const_i32(bit);
1131     TCGv_i32 t2 = tcg_const_i32(sprn);
1132     TCGv_i32 t3 = tcg_const_i32(cause);
1133 
1134     gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1135 
1136     tcg_temp_free_i32(t3);
1137     tcg_temp_free_i32(t2);
1138     tcg_temp_free_i32(t1);
1139 }
1140 
1141 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1142                                    int bit, int sprn, int cause)
1143 {
1144     TCGv_i32 t1 = tcg_const_i32(bit);
1145     TCGv_i32 t2 = tcg_const_i32(sprn);
1146     TCGv_i32 t3 = tcg_const_i32(cause);
1147 
1148     gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1149 
1150     tcg_temp_free_i32(t3);
1151     tcg_temp_free_i32(t2);
1152     tcg_temp_free_i32(t1);
1153 }
1154 
1155 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1156 {
1157     TCGv spr_up = tcg_temp_new();
1158     TCGv spr = tcg_temp_new();
1159 
1160     gen_load_spr(spr, sprn - 1);
1161     tcg_gen_shri_tl(spr_up, spr, 32);
1162     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1163 
1164     tcg_temp_free(spr);
1165     tcg_temp_free(spr_up);
1166 }
1167 
1168 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1169 {
1170     TCGv spr = tcg_temp_new();
1171 
1172     gen_load_spr(spr, sprn - 1);
1173     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1174     gen_store_spr(sprn - 1, spr);
1175 
1176     tcg_temp_free(spr);
1177 }
1178 
1179 #if !defined(CONFIG_USER_ONLY)
1180 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1181 {
1182     TCGv hmer = tcg_temp_new();
1183 
1184     gen_load_spr(hmer, sprn);
1185     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1186     gen_store_spr(sprn, hmer);
1187     spr_store_dump_spr(sprn);
1188     tcg_temp_free(hmer);
1189 }
1190 
1191 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1192 {
1193     gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1194 }
1195 #endif /* !defined(CONFIG_USER_ONLY) */
1196 
1197 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1198 {
1199     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1200     spr_read_generic(ctx, gprn, sprn);
1201 }
1202 
1203 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1204 {
1205     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1206     spr_write_generic(ctx, sprn, gprn);
1207 }
1208 
1209 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1210 {
1211     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1212     spr_read_generic(ctx, gprn, sprn);
1213 }
1214 
1215 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1216 {
1217     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1218     spr_write_generic(ctx, sprn, gprn);
1219 }
1220 
1221 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1222 {
1223     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1224     spr_read_prev_upper32(ctx, gprn, sprn);
1225 }
1226 
1227 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1228 {
1229     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1230     spr_write_prev_upper32(ctx, sprn, gprn);
1231 }
1232 
1233 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1234 {
1235     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1236     spr_read_generic(ctx, gprn, sprn);
1237 }
1238 
1239 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1240 {
1241     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1242     spr_write_generic(ctx, sprn, gprn);
1243 }
1244 
1245 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1246 {
1247     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1248     spr_read_prev_upper32(ctx, gprn, sprn);
1249 }
1250 
1251 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1252 {
1253     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1254     spr_write_prev_upper32(ctx, sprn, gprn);
1255 }
1256 
1257 void spr_read_dexcr_ureg(DisasContext *ctx, int gprn, int sprn)
1258 {
1259     TCGv t0 = tcg_temp_new();
1260 
1261     /*
1262      * Access to the (H)DEXCR in problem state is done using separated
1263      * SPR indexes which are 16 below the SPR indexes which have full
1264      * access to the (H)DEXCR in privileged state. Problem state can
1265      * only read bits 32:63, bits 0:31 return 0.
1266      *
1267      * See section 9.3.1-9.3.2 of PowerISA v3.1B
1268      */
1269 
1270     gen_load_spr(t0, sprn + 16);
1271     tcg_gen_ext32u_tl(cpu_gpr[gprn], t0);
1272 
1273     tcg_temp_free(t0);
1274 }
1275 #endif
1276 
1277 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1278 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1279 
1280 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1281 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1282 
1283 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1284 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1285 
1286 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1287 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1288 
1289 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1290 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1291 
1292 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1293 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1294 
1295 typedef struct opcode_t {
1296     unsigned char opc1, opc2, opc3, opc4;
1297 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1298     unsigned char pad[4];
1299 #endif
1300     opc_handler_t handler;
1301     const char *oname;
1302 } opcode_t;
1303 
1304 static void gen_priv_opc(DisasContext *ctx)
1305 {
1306     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1307 }
1308 
1309 /* Helpers for priv. check */
1310 #define GEN_PRIV(CTX)              \
1311     do {                           \
1312         gen_priv_opc(CTX); return; \
1313     } while (0)
1314 
1315 #if defined(CONFIG_USER_ONLY)
1316 #define CHK_HV(CTX) GEN_PRIV(CTX)
1317 #define CHK_SV(CTX) GEN_PRIV(CTX)
1318 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1319 #else
1320 #define CHK_HV(CTX)                         \
1321     do {                                    \
1322         if (unlikely(ctx->pr || !ctx->hv)) {\
1323             GEN_PRIV(CTX);                  \
1324         }                                   \
1325     } while (0)
1326 #define CHK_SV(CTX)              \
1327     do {                         \
1328         if (unlikely(ctx->pr)) { \
1329             GEN_PRIV(CTX);       \
1330         }                        \
1331     } while (0)
1332 #define CHK_HVRM(CTX)                                   \
1333     do {                                                \
1334         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1335             GEN_PRIV(CTX);                              \
1336         }                                               \
1337     } while (0)
1338 #endif
1339 
1340 #define CHK_NONE(CTX)
1341 
1342 /*****************************************************************************/
1343 /* PowerPC instructions table                                                */
1344 
1345 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1346 {                                                                             \
1347     .opc1 = op1,                                                              \
1348     .opc2 = op2,                                                              \
1349     .opc3 = op3,                                                              \
1350     .opc4 = 0xff,                                                             \
1351     .handler = {                                                              \
1352         .inval1  = invl,                                                      \
1353         .type = _typ,                                                         \
1354         .type2 = _typ2,                                                       \
1355         .handler = &gen_##name,                                               \
1356     },                                                                        \
1357     .oname = stringify(name),                                                 \
1358 }
1359 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1360 {                                                                             \
1361     .opc1 = op1,                                                              \
1362     .opc2 = op2,                                                              \
1363     .opc3 = op3,                                                              \
1364     .opc4 = 0xff,                                                             \
1365     .handler = {                                                              \
1366         .inval1  = invl1,                                                     \
1367         .inval2  = invl2,                                                     \
1368         .type = _typ,                                                         \
1369         .type2 = _typ2,                                                       \
1370         .handler = &gen_##name,                                               \
1371     },                                                                        \
1372     .oname = stringify(name),                                                 \
1373 }
1374 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1375 {                                                                             \
1376     .opc1 = op1,                                                              \
1377     .opc2 = op2,                                                              \
1378     .opc3 = op3,                                                              \
1379     .opc4 = 0xff,                                                             \
1380     .handler = {                                                              \
1381         .inval1  = invl,                                                      \
1382         .type = _typ,                                                         \
1383         .type2 = _typ2,                                                       \
1384         .handler = &gen_##name,                                               \
1385     },                                                                        \
1386     .oname = onam,                                                            \
1387 }
1388 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1389 {                                                                             \
1390     .opc1 = op1,                                                              \
1391     .opc2 = op2,                                                              \
1392     .opc3 = op3,                                                              \
1393     .opc4 = op4,                                                              \
1394     .handler = {                                                              \
1395         .inval1  = invl,                                                      \
1396         .type = _typ,                                                         \
1397         .type2 = _typ2,                                                       \
1398         .handler = &gen_##name,                                               \
1399     },                                                                        \
1400     .oname = stringify(name),                                                 \
1401 }
1402 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1403 {                                                                             \
1404     .opc1 = op1,                                                              \
1405     .opc2 = op2,                                                              \
1406     .opc3 = op3,                                                              \
1407     .opc4 = op4,                                                              \
1408     .handler = {                                                              \
1409         .inval1  = invl,                                                      \
1410         .type = _typ,                                                         \
1411         .type2 = _typ2,                                                       \
1412         .handler = &gen_##name,                                               \
1413     },                                                                        \
1414     .oname = onam,                                                            \
1415 }
1416 
1417 /* Invalid instruction */
1418 static void gen_invalid(DisasContext *ctx)
1419 {
1420     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1421 }
1422 
1423 static opc_handler_t invalid_handler = {
1424     .inval1  = 0xFFFFFFFF,
1425     .inval2  = 0xFFFFFFFF,
1426     .type    = PPC_NONE,
1427     .type2   = PPC_NONE,
1428     .handler = gen_invalid,
1429 };
1430 
1431 /***                           Integer comparison                          ***/
1432 
1433 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1434 {
1435     TCGv t0 = tcg_temp_new();
1436     TCGv t1 = tcg_temp_new();
1437     TCGv_i32 t = tcg_temp_new_i32();
1438 
1439     tcg_gen_movi_tl(t0, CRF_EQ);
1440     tcg_gen_movi_tl(t1, CRF_LT);
1441     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1442                        t0, arg0, arg1, t1, t0);
1443     tcg_gen_movi_tl(t1, CRF_GT);
1444     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1445                        t0, arg0, arg1, t1, t0);
1446 
1447     tcg_gen_trunc_tl_i32(t, t0);
1448     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1449     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1450 
1451     tcg_temp_free(t0);
1452     tcg_temp_free(t1);
1453     tcg_temp_free_i32(t);
1454 }
1455 
1456 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1457 {
1458     TCGv t0 = tcg_const_tl(arg1);
1459     gen_op_cmp(arg0, t0, s, crf);
1460     tcg_temp_free(t0);
1461 }
1462 
1463 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1464 {
1465     TCGv t0, t1;
1466     t0 = tcg_temp_new();
1467     t1 = tcg_temp_new();
1468     if (s) {
1469         tcg_gen_ext32s_tl(t0, arg0);
1470         tcg_gen_ext32s_tl(t1, arg1);
1471     } else {
1472         tcg_gen_ext32u_tl(t0, arg0);
1473         tcg_gen_ext32u_tl(t1, arg1);
1474     }
1475     gen_op_cmp(t0, t1, s, crf);
1476     tcg_temp_free(t1);
1477     tcg_temp_free(t0);
1478 }
1479 
1480 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1481 {
1482     TCGv t0 = tcg_const_tl(arg1);
1483     gen_op_cmp32(arg0, t0, s, crf);
1484     tcg_temp_free(t0);
1485 }
1486 
1487 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1488 {
1489     if (NARROW_MODE(ctx)) {
1490         gen_op_cmpi32(reg, 0, 1, 0);
1491     } else {
1492         gen_op_cmpi(reg, 0, 1, 0);
1493     }
1494 }
1495 
1496 /* cmprb - range comparison: isupper, isaplha, islower*/
1497 static void gen_cmprb(DisasContext *ctx)
1498 {
1499     TCGv_i32 src1 = tcg_temp_new_i32();
1500     TCGv_i32 src2 = tcg_temp_new_i32();
1501     TCGv_i32 src2lo = tcg_temp_new_i32();
1502     TCGv_i32 src2hi = tcg_temp_new_i32();
1503     TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1504 
1505     tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1506     tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1507 
1508     tcg_gen_andi_i32(src1, src1, 0xFF);
1509     tcg_gen_ext8u_i32(src2lo, src2);
1510     tcg_gen_shri_i32(src2, src2, 8);
1511     tcg_gen_ext8u_i32(src2hi, src2);
1512 
1513     tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1514     tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1515     tcg_gen_and_i32(crf, src2lo, src2hi);
1516 
1517     if (ctx->opcode & 0x00200000) {
1518         tcg_gen_shri_i32(src2, src2, 8);
1519         tcg_gen_ext8u_i32(src2lo, src2);
1520         tcg_gen_shri_i32(src2, src2, 8);
1521         tcg_gen_ext8u_i32(src2hi, src2);
1522         tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1523         tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1524         tcg_gen_and_i32(src2lo, src2lo, src2hi);
1525         tcg_gen_or_i32(crf, crf, src2lo);
1526     }
1527     tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1528     tcg_temp_free_i32(src1);
1529     tcg_temp_free_i32(src2);
1530     tcg_temp_free_i32(src2lo);
1531     tcg_temp_free_i32(src2hi);
1532 }
1533 
1534 #if defined(TARGET_PPC64)
1535 /* cmpeqb */
1536 static void gen_cmpeqb(DisasContext *ctx)
1537 {
1538     gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1539                       cpu_gpr[rB(ctx->opcode)]);
1540 }
1541 #endif
1542 
1543 /* isel (PowerPC 2.03 specification) */
1544 static void gen_isel(DisasContext *ctx)
1545 {
1546     uint32_t bi = rC(ctx->opcode);
1547     uint32_t mask = 0x08 >> (bi & 0x03);
1548     TCGv t0 = tcg_temp_new();
1549     TCGv zr;
1550 
1551     tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1552     tcg_gen_andi_tl(t0, t0, mask);
1553 
1554     zr = tcg_const_tl(0);
1555     tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1556                        rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1557                        cpu_gpr[rB(ctx->opcode)]);
1558     tcg_temp_free(zr);
1559     tcg_temp_free(t0);
1560 }
1561 
1562 /* cmpb: PowerPC 2.05 specification */
1563 static void gen_cmpb(DisasContext *ctx)
1564 {
1565     gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1566                     cpu_gpr[rB(ctx->opcode)]);
1567 }
1568 
1569 /***                           Integer arithmetic                          ***/
1570 
1571 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1572                                            TCGv arg1, TCGv arg2, int sub)
1573 {
1574     TCGv t0 = tcg_temp_new();
1575 
1576     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1577     tcg_gen_xor_tl(t0, arg1, arg2);
1578     if (sub) {
1579         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1580     } else {
1581         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1582     }
1583     tcg_temp_free(t0);
1584     if (NARROW_MODE(ctx)) {
1585         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1586         if (is_isa300(ctx)) {
1587             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1588         }
1589     } else {
1590         if (is_isa300(ctx)) {
1591             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1592         }
1593         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1594     }
1595     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1596 }
1597 
1598 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1599                                              TCGv res, TCGv arg0, TCGv arg1,
1600                                              TCGv ca32, int sub)
1601 {
1602     TCGv t0;
1603 
1604     if (!is_isa300(ctx)) {
1605         return;
1606     }
1607 
1608     t0 = tcg_temp_new();
1609     if (sub) {
1610         tcg_gen_eqv_tl(t0, arg0, arg1);
1611     } else {
1612         tcg_gen_xor_tl(t0, arg0, arg1);
1613     }
1614     tcg_gen_xor_tl(t0, t0, res);
1615     tcg_gen_extract_tl(ca32, t0, 32, 1);
1616     tcg_temp_free(t0);
1617 }
1618 
1619 /* Common add function */
1620 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1621                                     TCGv arg2, TCGv ca, TCGv ca32,
1622                                     bool add_ca, bool compute_ca,
1623                                     bool compute_ov, bool compute_rc0)
1624 {
1625     TCGv t0 = ret;
1626 
1627     if (compute_ca || compute_ov) {
1628         t0 = tcg_temp_new();
1629     }
1630 
1631     if (compute_ca) {
1632         if (NARROW_MODE(ctx)) {
1633             /*
1634              * Caution: a non-obvious corner case of the spec is that
1635              * we must produce the *entire* 64-bit addition, but
1636              * produce the carry into bit 32.
1637              */
1638             TCGv t1 = tcg_temp_new();
1639             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1640             tcg_gen_add_tl(t0, arg1, arg2);
1641             if (add_ca) {
1642                 tcg_gen_add_tl(t0, t0, ca);
1643             }
1644             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1645             tcg_temp_free(t1);
1646             tcg_gen_extract_tl(ca, ca, 32, 1);
1647             if (is_isa300(ctx)) {
1648                 tcg_gen_mov_tl(ca32, ca);
1649             }
1650         } else {
1651             TCGv zero = tcg_const_tl(0);
1652             if (add_ca) {
1653                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1654                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1655             } else {
1656                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1657             }
1658             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1659             tcg_temp_free(zero);
1660         }
1661     } else {
1662         tcg_gen_add_tl(t0, arg1, arg2);
1663         if (add_ca) {
1664             tcg_gen_add_tl(t0, t0, ca);
1665         }
1666     }
1667 
1668     if (compute_ov) {
1669         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1670     }
1671     if (unlikely(compute_rc0)) {
1672         gen_set_Rc0(ctx, t0);
1673     }
1674 
1675     if (t0 != ret) {
1676         tcg_gen_mov_tl(ret, t0);
1677         tcg_temp_free(t0);
1678     }
1679 }
1680 /* Add functions with two operands */
1681 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov)     \
1682 static void glue(gen_, name)(DisasContext *ctx)                               \
1683 {                                                                             \
1684     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1685                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1686                      ca, glue(ca, 32),                                        \
1687                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1688 }
1689 /* Add functions with one operand and one immediate */
1690 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca,                    \
1691                                 add_ca, compute_ca, compute_ov)               \
1692 static void glue(gen_, name)(DisasContext *ctx)                               \
1693 {                                                                             \
1694     TCGv t0 = tcg_const_tl(const_val);                                        \
1695     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1696                      cpu_gpr[rA(ctx->opcode)], t0,                            \
1697                      ca, glue(ca, 32),                                        \
1698                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1699     tcg_temp_free(t0);                                                        \
1700 }
1701 
1702 /* add  add.  addo  addo. */
1703 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1704 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1705 /* addc  addc.  addco  addco. */
1706 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1707 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1708 /* adde  adde.  addeo  addeo. */
1709 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1710 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1711 /* addme  addme.  addmeo  addmeo.  */
1712 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1713 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1714 /* addex */
1715 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1716 /* addze  addze.  addzeo  addzeo.*/
1717 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1718 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1719 /* addic  addic.*/
1720 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1721 {
1722     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1723     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1724                      c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1725     tcg_temp_free(c);
1726 }
1727 
1728 static void gen_addic(DisasContext *ctx)
1729 {
1730     gen_op_addic(ctx, 0);
1731 }
1732 
1733 static void gen_addic_(DisasContext *ctx)
1734 {
1735     gen_op_addic(ctx, 1);
1736 }
1737 
1738 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1739                                      TCGv arg2, int sign, int compute_ov)
1740 {
1741     TCGv_i32 t0 = tcg_temp_new_i32();
1742     TCGv_i32 t1 = tcg_temp_new_i32();
1743     TCGv_i32 t2 = tcg_temp_new_i32();
1744     TCGv_i32 t3 = tcg_temp_new_i32();
1745 
1746     tcg_gen_trunc_tl_i32(t0, arg1);
1747     tcg_gen_trunc_tl_i32(t1, arg2);
1748     if (sign) {
1749         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1750         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1751         tcg_gen_and_i32(t2, t2, t3);
1752         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1753         tcg_gen_or_i32(t2, t2, t3);
1754         tcg_gen_movi_i32(t3, 0);
1755         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1756         tcg_gen_div_i32(t3, t0, t1);
1757         tcg_gen_extu_i32_tl(ret, t3);
1758     } else {
1759         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1760         tcg_gen_movi_i32(t3, 0);
1761         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1762         tcg_gen_divu_i32(t3, t0, t1);
1763         tcg_gen_extu_i32_tl(ret, t3);
1764     }
1765     if (compute_ov) {
1766         tcg_gen_extu_i32_tl(cpu_ov, t2);
1767         if (is_isa300(ctx)) {
1768             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1769         }
1770         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1771     }
1772     tcg_temp_free_i32(t0);
1773     tcg_temp_free_i32(t1);
1774     tcg_temp_free_i32(t2);
1775     tcg_temp_free_i32(t3);
1776 
1777     if (unlikely(Rc(ctx->opcode) != 0)) {
1778         gen_set_Rc0(ctx, ret);
1779     }
1780 }
1781 /* Div functions */
1782 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
1783 static void glue(gen_, name)(DisasContext *ctx)                               \
1784 {                                                                             \
1785     gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1786                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1787                      sign, compute_ov);                                       \
1788 }
1789 /* divwu  divwu.  divwuo  divwuo.   */
1790 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1791 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1792 /* divw  divw.  divwo  divwo.   */
1793 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1794 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1795 
1796 /* div[wd]eu[o][.] */
1797 #define GEN_DIVE(name, hlpr, compute_ov)                                      \
1798 static void gen_##name(DisasContext *ctx)                                     \
1799 {                                                                             \
1800     TCGv_i32 t0 = tcg_const_i32(compute_ov);                                  \
1801     gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env,                      \
1802                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1803     tcg_temp_free_i32(t0);                                                    \
1804     if (unlikely(Rc(ctx->opcode) != 0)) {                                     \
1805         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);                           \
1806     }                                                                         \
1807 }
1808 
1809 GEN_DIVE(divweu, divweu, 0);
1810 GEN_DIVE(divweuo, divweu, 1);
1811 GEN_DIVE(divwe, divwe, 0);
1812 GEN_DIVE(divweo, divwe, 1);
1813 
1814 #if defined(TARGET_PPC64)
1815 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1816                                      TCGv arg2, int sign, int compute_ov)
1817 {
1818     TCGv_i64 t0 = tcg_temp_new_i64();
1819     TCGv_i64 t1 = tcg_temp_new_i64();
1820     TCGv_i64 t2 = tcg_temp_new_i64();
1821     TCGv_i64 t3 = tcg_temp_new_i64();
1822 
1823     tcg_gen_mov_i64(t0, arg1);
1824     tcg_gen_mov_i64(t1, arg2);
1825     if (sign) {
1826         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1827         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1828         tcg_gen_and_i64(t2, t2, t3);
1829         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1830         tcg_gen_or_i64(t2, t2, t3);
1831         tcg_gen_movi_i64(t3, 0);
1832         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1833         tcg_gen_div_i64(ret, t0, t1);
1834     } else {
1835         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1836         tcg_gen_movi_i64(t3, 0);
1837         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1838         tcg_gen_divu_i64(ret, t0, t1);
1839     }
1840     if (compute_ov) {
1841         tcg_gen_mov_tl(cpu_ov, t2);
1842         if (is_isa300(ctx)) {
1843             tcg_gen_mov_tl(cpu_ov32, t2);
1844         }
1845         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1846     }
1847     tcg_temp_free_i64(t0);
1848     tcg_temp_free_i64(t1);
1849     tcg_temp_free_i64(t2);
1850     tcg_temp_free_i64(t3);
1851 
1852     if (unlikely(Rc(ctx->opcode) != 0)) {
1853         gen_set_Rc0(ctx, ret);
1854     }
1855 }
1856 
1857 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
1858 static void glue(gen_, name)(DisasContext *ctx)                               \
1859 {                                                                             \
1860     gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1861                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
1862                       sign, compute_ov);                                      \
1863 }
1864 /* divdu  divdu.  divduo  divduo.   */
1865 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1866 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1867 /* divd  divd.  divdo  divdo.   */
1868 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1869 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1870 
1871 GEN_DIVE(divdeu, divdeu, 0);
1872 GEN_DIVE(divdeuo, divdeu, 1);
1873 GEN_DIVE(divde, divde, 0);
1874 GEN_DIVE(divdeo, divde, 1);
1875 #endif
1876 
1877 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1878                                      TCGv arg2, int sign)
1879 {
1880     TCGv_i32 t0 = tcg_temp_new_i32();
1881     TCGv_i32 t1 = tcg_temp_new_i32();
1882 
1883     tcg_gen_trunc_tl_i32(t0, arg1);
1884     tcg_gen_trunc_tl_i32(t1, arg2);
1885     if (sign) {
1886         TCGv_i32 t2 = tcg_temp_new_i32();
1887         TCGv_i32 t3 = tcg_temp_new_i32();
1888         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1889         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1890         tcg_gen_and_i32(t2, t2, t3);
1891         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1892         tcg_gen_or_i32(t2, t2, t3);
1893         tcg_gen_movi_i32(t3, 0);
1894         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1895         tcg_gen_rem_i32(t3, t0, t1);
1896         tcg_gen_ext_i32_tl(ret, t3);
1897         tcg_temp_free_i32(t2);
1898         tcg_temp_free_i32(t3);
1899     } else {
1900         TCGv_i32 t2 = tcg_const_i32(1);
1901         TCGv_i32 t3 = tcg_const_i32(0);
1902         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1903         tcg_gen_remu_i32(t3, t0, t1);
1904         tcg_gen_extu_i32_tl(ret, t3);
1905         tcg_temp_free_i32(t2);
1906         tcg_temp_free_i32(t3);
1907     }
1908     tcg_temp_free_i32(t0);
1909     tcg_temp_free_i32(t1);
1910 }
1911 
1912 #define GEN_INT_ARITH_MODW(name, opc3, sign)                                \
1913 static void glue(gen_, name)(DisasContext *ctx)                             \
1914 {                                                                           \
1915     gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1916                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1917                       sign);                                                \
1918 }
1919 
1920 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1921 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1922 
1923 #if defined(TARGET_PPC64)
1924 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1925                                      TCGv arg2, int sign)
1926 {
1927     TCGv_i64 t0 = tcg_temp_new_i64();
1928     TCGv_i64 t1 = tcg_temp_new_i64();
1929 
1930     tcg_gen_mov_i64(t0, arg1);
1931     tcg_gen_mov_i64(t1, arg2);
1932     if (sign) {
1933         TCGv_i64 t2 = tcg_temp_new_i64();
1934         TCGv_i64 t3 = tcg_temp_new_i64();
1935         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1936         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1937         tcg_gen_and_i64(t2, t2, t3);
1938         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1939         tcg_gen_or_i64(t2, t2, t3);
1940         tcg_gen_movi_i64(t3, 0);
1941         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1942         tcg_gen_rem_i64(ret, t0, t1);
1943         tcg_temp_free_i64(t2);
1944         tcg_temp_free_i64(t3);
1945     } else {
1946         TCGv_i64 t2 = tcg_const_i64(1);
1947         TCGv_i64 t3 = tcg_const_i64(0);
1948         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1949         tcg_gen_remu_i64(ret, t0, t1);
1950         tcg_temp_free_i64(t2);
1951         tcg_temp_free_i64(t3);
1952     }
1953     tcg_temp_free_i64(t0);
1954     tcg_temp_free_i64(t1);
1955 }
1956 
1957 #define GEN_INT_ARITH_MODD(name, opc3, sign)                            \
1958 static void glue(gen_, name)(DisasContext *ctx)                           \
1959 {                                                                         \
1960   gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1961                     cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1962                     sign);                                                \
1963 }
1964 
1965 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1966 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1967 #endif
1968 
1969 /* mulhw  mulhw. */
1970 static void gen_mulhw(DisasContext *ctx)
1971 {
1972     TCGv_i32 t0 = tcg_temp_new_i32();
1973     TCGv_i32 t1 = tcg_temp_new_i32();
1974 
1975     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1976     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1977     tcg_gen_muls2_i32(t0, t1, t0, t1);
1978     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1979     tcg_temp_free_i32(t0);
1980     tcg_temp_free_i32(t1);
1981     if (unlikely(Rc(ctx->opcode) != 0)) {
1982         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1983     }
1984 }
1985 
1986 /* mulhwu  mulhwu.  */
1987 static void gen_mulhwu(DisasContext *ctx)
1988 {
1989     TCGv_i32 t0 = tcg_temp_new_i32();
1990     TCGv_i32 t1 = tcg_temp_new_i32();
1991 
1992     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1993     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1994     tcg_gen_mulu2_i32(t0, t1, t0, t1);
1995     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1996     tcg_temp_free_i32(t0);
1997     tcg_temp_free_i32(t1);
1998     if (unlikely(Rc(ctx->opcode) != 0)) {
1999         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2000     }
2001 }
2002 
2003 /* mullw  mullw. */
2004 static void gen_mullw(DisasContext *ctx)
2005 {
2006 #if defined(TARGET_PPC64)
2007     TCGv_i64 t0, t1;
2008     t0 = tcg_temp_new_i64();
2009     t1 = tcg_temp_new_i64();
2010     tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
2011     tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
2012     tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2013     tcg_temp_free(t0);
2014     tcg_temp_free(t1);
2015 #else
2016     tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2017                     cpu_gpr[rB(ctx->opcode)]);
2018 #endif
2019     if (unlikely(Rc(ctx->opcode) != 0)) {
2020         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2021     }
2022 }
2023 
2024 /* mullwo  mullwo. */
2025 static void gen_mullwo(DisasContext *ctx)
2026 {
2027     TCGv_i32 t0 = tcg_temp_new_i32();
2028     TCGv_i32 t1 = tcg_temp_new_i32();
2029 
2030     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2031     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2032     tcg_gen_muls2_i32(t0, t1, t0, t1);
2033 #if defined(TARGET_PPC64)
2034     tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2035 #else
2036     tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2037 #endif
2038 
2039     tcg_gen_sari_i32(t0, t0, 31);
2040     tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2041     tcg_gen_extu_i32_tl(cpu_ov, t0);
2042     if (is_isa300(ctx)) {
2043         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2044     }
2045     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2046 
2047     tcg_temp_free_i32(t0);
2048     tcg_temp_free_i32(t1);
2049     if (unlikely(Rc(ctx->opcode) != 0)) {
2050         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2051     }
2052 }
2053 
2054 /* mulli */
2055 static void gen_mulli(DisasContext *ctx)
2056 {
2057     tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2058                     SIMM(ctx->opcode));
2059 }
2060 
2061 #if defined(TARGET_PPC64)
2062 /* mulhd  mulhd. */
2063 static void gen_mulhd(DisasContext *ctx)
2064 {
2065     TCGv lo = tcg_temp_new();
2066     tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2067                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2068     tcg_temp_free(lo);
2069     if (unlikely(Rc(ctx->opcode) != 0)) {
2070         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2071     }
2072 }
2073 
2074 /* mulhdu  mulhdu. */
2075 static void gen_mulhdu(DisasContext *ctx)
2076 {
2077     TCGv lo = tcg_temp_new();
2078     tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2079                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2080     tcg_temp_free(lo);
2081     if (unlikely(Rc(ctx->opcode) != 0)) {
2082         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2083     }
2084 }
2085 
2086 /* mulld  mulld. */
2087 static void gen_mulld(DisasContext *ctx)
2088 {
2089     tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2090                    cpu_gpr[rB(ctx->opcode)]);
2091     if (unlikely(Rc(ctx->opcode) != 0)) {
2092         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2093     }
2094 }
2095 
2096 /* mulldo  mulldo. */
2097 static void gen_mulldo(DisasContext *ctx)
2098 {
2099     TCGv_i64 t0 = tcg_temp_new_i64();
2100     TCGv_i64 t1 = tcg_temp_new_i64();
2101 
2102     tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2103                       cpu_gpr[rB(ctx->opcode)]);
2104     tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2105 
2106     tcg_gen_sari_i64(t0, t0, 63);
2107     tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2108     if (is_isa300(ctx)) {
2109         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2110     }
2111     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2112 
2113     tcg_temp_free_i64(t0);
2114     tcg_temp_free_i64(t1);
2115 
2116     if (unlikely(Rc(ctx->opcode) != 0)) {
2117         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2118     }
2119 }
2120 #endif
2121 
2122 /* Common subf function */
2123 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2124                                      TCGv arg2, bool add_ca, bool compute_ca,
2125                                      bool compute_ov, bool compute_rc0)
2126 {
2127     TCGv t0 = ret;
2128 
2129     if (compute_ca || compute_ov) {
2130         t0 = tcg_temp_new();
2131     }
2132 
2133     if (compute_ca) {
2134         /* dest = ~arg1 + arg2 [+ ca].  */
2135         if (NARROW_MODE(ctx)) {
2136             /*
2137              * Caution: a non-obvious corner case of the spec is that
2138              * we must produce the *entire* 64-bit addition, but
2139              * produce the carry into bit 32.
2140              */
2141             TCGv inv1 = tcg_temp_new();
2142             TCGv t1 = tcg_temp_new();
2143             tcg_gen_not_tl(inv1, arg1);
2144             if (add_ca) {
2145                 tcg_gen_add_tl(t0, arg2, cpu_ca);
2146             } else {
2147                 tcg_gen_addi_tl(t0, arg2, 1);
2148             }
2149             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
2150             tcg_gen_add_tl(t0, t0, inv1);
2151             tcg_temp_free(inv1);
2152             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
2153             tcg_temp_free(t1);
2154             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2155             if (is_isa300(ctx)) {
2156                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2157             }
2158         } else if (add_ca) {
2159             TCGv zero, inv1 = tcg_temp_new();
2160             tcg_gen_not_tl(inv1, arg1);
2161             zero = tcg_const_tl(0);
2162             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2163             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2164             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2165             tcg_temp_free(zero);
2166             tcg_temp_free(inv1);
2167         } else {
2168             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2169             tcg_gen_sub_tl(t0, arg2, arg1);
2170             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2171         }
2172     } else if (add_ca) {
2173         /*
2174          * Since we're ignoring carry-out, we can simplify the
2175          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2176          */
2177         tcg_gen_sub_tl(t0, arg2, arg1);
2178         tcg_gen_add_tl(t0, t0, cpu_ca);
2179         tcg_gen_subi_tl(t0, t0, 1);
2180     } else {
2181         tcg_gen_sub_tl(t0, arg2, arg1);
2182     }
2183 
2184     if (compute_ov) {
2185         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2186     }
2187     if (unlikely(compute_rc0)) {
2188         gen_set_Rc0(ctx, t0);
2189     }
2190 
2191     if (t0 != ret) {
2192         tcg_gen_mov_tl(ret, t0);
2193         tcg_temp_free(t0);
2194     }
2195 }
2196 /* Sub functions with Two operands functions */
2197 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
2198 static void glue(gen_, name)(DisasContext *ctx)                               \
2199 {                                                                             \
2200     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2201                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
2202                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2203 }
2204 /* Sub functions with one operand and one immediate */
2205 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
2206                                 add_ca, compute_ca, compute_ov)               \
2207 static void glue(gen_, name)(DisasContext *ctx)                               \
2208 {                                                                             \
2209     TCGv t0 = tcg_const_tl(const_val);                                        \
2210     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2211                       cpu_gpr[rA(ctx->opcode)], t0,                           \
2212                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2213     tcg_temp_free(t0);                                                        \
2214 }
2215 /* subf  subf.  subfo  subfo. */
2216 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2217 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2218 /* subfc  subfc.  subfco  subfco. */
2219 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2220 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2221 /* subfe  subfe.  subfeo  subfo. */
2222 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2223 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2224 /* subfme  subfme.  subfmeo  subfmeo.  */
2225 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2226 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2227 /* subfze  subfze.  subfzeo  subfzeo.*/
2228 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2229 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2230 
2231 /* subfic */
2232 static void gen_subfic(DisasContext *ctx)
2233 {
2234     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2235     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2236                       c, 0, 1, 0, 0);
2237     tcg_temp_free(c);
2238 }
2239 
2240 /* neg neg. nego nego. */
2241 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2242 {
2243     TCGv zero = tcg_const_tl(0);
2244     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2245                       zero, 0, 0, compute_ov, Rc(ctx->opcode));
2246     tcg_temp_free(zero);
2247 }
2248 
2249 static void gen_neg(DisasContext *ctx)
2250 {
2251     tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2252     if (unlikely(Rc(ctx->opcode))) {
2253         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2254     }
2255 }
2256 
2257 static void gen_nego(DisasContext *ctx)
2258 {
2259     gen_op_arith_neg(ctx, 1);
2260 }
2261 
2262 /***                            Integer logical                            ***/
2263 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
2264 static void glue(gen_, name)(DisasContext *ctx)                               \
2265 {                                                                             \
2266     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],                \
2267        cpu_gpr[rB(ctx->opcode)]);                                             \
2268     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2269         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2270 }
2271 
2272 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
2273 static void glue(gen_, name)(DisasContext *ctx)                               \
2274 {                                                                             \
2275     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);               \
2276     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2277         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2278 }
2279 
2280 /* and & and. */
2281 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2282 /* andc & andc. */
2283 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2284 
2285 /* andi. */
2286 static void gen_andi_(DisasContext *ctx)
2287 {
2288     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2289                     UIMM(ctx->opcode));
2290     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2291 }
2292 
2293 /* andis. */
2294 static void gen_andis_(DisasContext *ctx)
2295 {
2296     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2297                     UIMM(ctx->opcode) << 16);
2298     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2299 }
2300 
2301 /* cntlzw */
2302 static void gen_cntlzw(DisasContext *ctx)
2303 {
2304     TCGv_i32 t = tcg_temp_new_i32();
2305 
2306     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2307     tcg_gen_clzi_i32(t, t, 32);
2308     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2309     tcg_temp_free_i32(t);
2310 
2311     if (unlikely(Rc(ctx->opcode) != 0)) {
2312         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2313     }
2314 }
2315 
2316 /* cnttzw */
2317 static void gen_cnttzw(DisasContext *ctx)
2318 {
2319     TCGv_i32 t = tcg_temp_new_i32();
2320 
2321     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2322     tcg_gen_ctzi_i32(t, t, 32);
2323     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2324     tcg_temp_free_i32(t);
2325 
2326     if (unlikely(Rc(ctx->opcode) != 0)) {
2327         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2328     }
2329 }
2330 
2331 /* eqv & eqv. */
2332 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2333 /* extsb & extsb. */
2334 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2335 /* extsh & extsh. */
2336 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2337 /* nand & nand. */
2338 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2339 /* nor & nor. */
2340 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2341 
2342 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2343 static void gen_pause(DisasContext *ctx)
2344 {
2345     TCGv_i32 t0 = tcg_const_i32(0);
2346     tcg_gen_st_i32(t0, cpu_env,
2347                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2348     tcg_temp_free_i32(t0);
2349 
2350     /* Stop translation, this gives other CPUs a chance to run */
2351     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2352 }
2353 #endif /* defined(TARGET_PPC64) */
2354 
2355 /* or & or. */
2356 static void gen_or(DisasContext *ctx)
2357 {
2358     int rs, ra, rb;
2359 
2360     rs = rS(ctx->opcode);
2361     ra = rA(ctx->opcode);
2362     rb = rB(ctx->opcode);
2363     /* Optimisation for mr. ri case */
2364     if (rs != ra || rs != rb) {
2365         if (rs != rb) {
2366             tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2367         } else {
2368             tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2369         }
2370         if (unlikely(Rc(ctx->opcode) != 0)) {
2371             gen_set_Rc0(ctx, cpu_gpr[ra]);
2372         }
2373     } else if (unlikely(Rc(ctx->opcode) != 0)) {
2374         gen_set_Rc0(ctx, cpu_gpr[rs]);
2375 #if defined(TARGET_PPC64)
2376     } else if (rs != 0) { /* 0 is nop */
2377         int prio = 0;
2378 
2379         switch (rs) {
2380         case 1:
2381             /* Set process priority to low */
2382             prio = 2;
2383             break;
2384         case 6:
2385             /* Set process priority to medium-low */
2386             prio = 3;
2387             break;
2388         case 2:
2389             /* Set process priority to normal */
2390             prio = 4;
2391             break;
2392 #if !defined(CONFIG_USER_ONLY)
2393         case 31:
2394             if (!ctx->pr) {
2395                 /* Set process priority to very low */
2396                 prio = 1;
2397             }
2398             break;
2399         case 5:
2400             if (!ctx->pr) {
2401                 /* Set process priority to medium-hight */
2402                 prio = 5;
2403             }
2404             break;
2405         case 3:
2406             if (!ctx->pr) {
2407                 /* Set process priority to high */
2408                 prio = 6;
2409             }
2410             break;
2411         case 7:
2412             if (ctx->hv && !ctx->pr) {
2413                 /* Set process priority to very high */
2414                 prio = 7;
2415             }
2416             break;
2417 #endif
2418         default:
2419             break;
2420         }
2421         if (prio) {
2422             TCGv t0 = tcg_temp_new();
2423             gen_load_spr(t0, SPR_PPR);
2424             tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2425             tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2426             gen_store_spr(SPR_PPR, t0);
2427             tcg_temp_free(t0);
2428         }
2429 #if !defined(CONFIG_USER_ONLY)
2430         /*
2431          * Pause out of TCG otherwise spin loops with smt_low eat too
2432          * much CPU and the kernel hangs.  This applies to all
2433          * encodings other than no-op, e.g., miso(rs=26), yield(27),
2434          * mdoio(29), mdoom(30), and all currently undefined.
2435          */
2436         gen_pause(ctx);
2437 #endif
2438 #endif
2439     }
2440 }
2441 /* orc & orc. */
2442 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2443 
2444 /* xor & xor. */
2445 static void gen_xor(DisasContext *ctx)
2446 {
2447     /* Optimisation for "set to zero" case */
2448     if (rS(ctx->opcode) != rB(ctx->opcode)) {
2449         tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2450                        cpu_gpr[rB(ctx->opcode)]);
2451     } else {
2452         tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2453     }
2454     if (unlikely(Rc(ctx->opcode) != 0)) {
2455         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2456     }
2457 }
2458 
2459 /* ori */
2460 static void gen_ori(DisasContext *ctx)
2461 {
2462     target_ulong uimm = UIMM(ctx->opcode);
2463 
2464     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2465         return;
2466     }
2467     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2468 }
2469 
2470 /* oris */
2471 static void gen_oris(DisasContext *ctx)
2472 {
2473     target_ulong uimm = UIMM(ctx->opcode);
2474 
2475     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2476         /* NOP */
2477         return;
2478     }
2479     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2480                    uimm << 16);
2481 }
2482 
2483 /* xori */
2484 static void gen_xori(DisasContext *ctx)
2485 {
2486     target_ulong uimm = UIMM(ctx->opcode);
2487 
2488     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2489         /* NOP */
2490         return;
2491     }
2492     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2493 }
2494 
2495 /* xoris */
2496 static void gen_xoris(DisasContext *ctx)
2497 {
2498     target_ulong uimm = UIMM(ctx->opcode);
2499 
2500     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2501         /* NOP */
2502         return;
2503     }
2504     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2505                     uimm << 16);
2506 }
2507 
2508 /* popcntb : PowerPC 2.03 specification */
2509 static void gen_popcntb(DisasContext *ctx)
2510 {
2511     gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2512 }
2513 
2514 static void gen_popcntw(DisasContext *ctx)
2515 {
2516 #if defined(TARGET_PPC64)
2517     gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2518 #else
2519     tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2520 #endif
2521 }
2522 
2523 #if defined(TARGET_PPC64)
2524 /* popcntd: PowerPC 2.06 specification */
2525 static void gen_popcntd(DisasContext *ctx)
2526 {
2527     tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2528 }
2529 #endif
2530 
2531 /* prtyw: PowerPC 2.05 specification */
2532 static void gen_prtyw(DisasContext *ctx)
2533 {
2534     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2535     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2536     TCGv t0 = tcg_temp_new();
2537     tcg_gen_shri_tl(t0, rs, 16);
2538     tcg_gen_xor_tl(ra, rs, t0);
2539     tcg_gen_shri_tl(t0, ra, 8);
2540     tcg_gen_xor_tl(ra, ra, t0);
2541     tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2542     tcg_temp_free(t0);
2543 }
2544 
2545 #if defined(TARGET_PPC64)
2546 /* prtyd: PowerPC 2.05 specification */
2547 static void gen_prtyd(DisasContext *ctx)
2548 {
2549     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2550     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2551     TCGv t0 = tcg_temp_new();
2552     tcg_gen_shri_tl(t0, rs, 32);
2553     tcg_gen_xor_tl(ra, rs, t0);
2554     tcg_gen_shri_tl(t0, ra, 16);
2555     tcg_gen_xor_tl(ra, ra, t0);
2556     tcg_gen_shri_tl(t0, ra, 8);
2557     tcg_gen_xor_tl(ra, ra, t0);
2558     tcg_gen_andi_tl(ra, ra, 1);
2559     tcg_temp_free(t0);
2560 }
2561 #endif
2562 
2563 #if defined(TARGET_PPC64)
2564 /* bpermd */
2565 static void gen_bpermd(DisasContext *ctx)
2566 {
2567     gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2568                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2569 }
2570 #endif
2571 
2572 #if defined(TARGET_PPC64)
2573 /* extsw & extsw. */
2574 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2575 
2576 /* cntlzd */
2577 static void gen_cntlzd(DisasContext *ctx)
2578 {
2579     tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2580     if (unlikely(Rc(ctx->opcode) != 0)) {
2581         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2582     }
2583 }
2584 
2585 /* cnttzd */
2586 static void gen_cnttzd(DisasContext *ctx)
2587 {
2588     tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2589     if (unlikely(Rc(ctx->opcode) != 0)) {
2590         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2591     }
2592 }
2593 
2594 /* darn */
2595 static void gen_darn(DisasContext *ctx)
2596 {
2597     int l = L(ctx->opcode);
2598 
2599     if (l > 2) {
2600         tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2601     } else {
2602         gen_icount_io_start(ctx);
2603         if (l == 0) {
2604             gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2605         } else {
2606             /* Return 64-bit random for both CRN and RRN */
2607             gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2608         }
2609     }
2610 }
2611 #endif
2612 
2613 /***                             Integer rotate                            ***/
2614 
2615 /* rlwimi & rlwimi. */
2616 static void gen_rlwimi(DisasContext *ctx)
2617 {
2618     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2619     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2620     uint32_t sh = SH(ctx->opcode);
2621     uint32_t mb = MB(ctx->opcode);
2622     uint32_t me = ME(ctx->opcode);
2623 
2624     if (sh == (31 - me) && mb <= me) {
2625         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2626     } else {
2627         target_ulong mask;
2628         bool mask_in_32b = true;
2629         TCGv t1;
2630 
2631 #if defined(TARGET_PPC64)
2632         mb += 32;
2633         me += 32;
2634 #endif
2635         mask = MASK(mb, me);
2636 
2637 #if defined(TARGET_PPC64)
2638         if (mask > 0xffffffffu) {
2639             mask_in_32b = false;
2640         }
2641 #endif
2642         t1 = tcg_temp_new();
2643         if (mask_in_32b) {
2644             TCGv_i32 t0 = tcg_temp_new_i32();
2645             tcg_gen_trunc_tl_i32(t0, t_rs);
2646             tcg_gen_rotli_i32(t0, t0, sh);
2647             tcg_gen_extu_i32_tl(t1, t0);
2648             tcg_temp_free_i32(t0);
2649         } else {
2650 #if defined(TARGET_PPC64)
2651             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2652             tcg_gen_rotli_i64(t1, t1, sh);
2653 #else
2654             g_assert_not_reached();
2655 #endif
2656         }
2657 
2658         tcg_gen_andi_tl(t1, t1, mask);
2659         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2660         tcg_gen_or_tl(t_ra, t_ra, t1);
2661         tcg_temp_free(t1);
2662     }
2663     if (unlikely(Rc(ctx->opcode) != 0)) {
2664         gen_set_Rc0(ctx, t_ra);
2665     }
2666 }
2667 
2668 /* rlwinm & rlwinm. */
2669 static void gen_rlwinm(DisasContext *ctx)
2670 {
2671     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2672     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2673     int sh = SH(ctx->opcode);
2674     int mb = MB(ctx->opcode);
2675     int me = ME(ctx->opcode);
2676     int len = me - mb + 1;
2677     int rsh = (32 - sh) & 31;
2678 
2679     if (sh != 0 && len > 0 && me == (31 - sh)) {
2680         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2681     } else if (me == 31 && rsh + len <= 32) {
2682         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2683     } else {
2684         target_ulong mask;
2685         bool mask_in_32b = true;
2686 #if defined(TARGET_PPC64)
2687         mb += 32;
2688         me += 32;
2689 #endif
2690         mask = MASK(mb, me);
2691 #if defined(TARGET_PPC64)
2692         if (mask > 0xffffffffu) {
2693             mask_in_32b = false;
2694         }
2695 #endif
2696         if (mask_in_32b) {
2697             if (sh == 0) {
2698                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2699             } else {
2700                 TCGv_i32 t0 = tcg_temp_new_i32();
2701                 tcg_gen_trunc_tl_i32(t0, t_rs);
2702                 tcg_gen_rotli_i32(t0, t0, sh);
2703                 tcg_gen_andi_i32(t0, t0, mask);
2704                 tcg_gen_extu_i32_tl(t_ra, t0);
2705                 tcg_temp_free_i32(t0);
2706             }
2707         } else {
2708 #if defined(TARGET_PPC64)
2709             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2710             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2711             tcg_gen_andi_i64(t_ra, t_ra, mask);
2712 #else
2713             g_assert_not_reached();
2714 #endif
2715         }
2716     }
2717     if (unlikely(Rc(ctx->opcode) != 0)) {
2718         gen_set_Rc0(ctx, t_ra);
2719     }
2720 }
2721 
2722 /* rlwnm & rlwnm. */
2723 static void gen_rlwnm(DisasContext *ctx)
2724 {
2725     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2726     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2727     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2728     uint32_t mb = MB(ctx->opcode);
2729     uint32_t me = ME(ctx->opcode);
2730     target_ulong mask;
2731     bool mask_in_32b = true;
2732 
2733 #if defined(TARGET_PPC64)
2734     mb += 32;
2735     me += 32;
2736 #endif
2737     mask = MASK(mb, me);
2738 
2739 #if defined(TARGET_PPC64)
2740     if (mask > 0xffffffffu) {
2741         mask_in_32b = false;
2742     }
2743 #endif
2744     if (mask_in_32b) {
2745         TCGv_i32 t0 = tcg_temp_new_i32();
2746         TCGv_i32 t1 = tcg_temp_new_i32();
2747         tcg_gen_trunc_tl_i32(t0, t_rb);
2748         tcg_gen_trunc_tl_i32(t1, t_rs);
2749         tcg_gen_andi_i32(t0, t0, 0x1f);
2750         tcg_gen_rotl_i32(t1, t1, t0);
2751         tcg_gen_extu_i32_tl(t_ra, t1);
2752         tcg_temp_free_i32(t0);
2753         tcg_temp_free_i32(t1);
2754     } else {
2755 #if defined(TARGET_PPC64)
2756         TCGv_i64 t0 = tcg_temp_new_i64();
2757         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2758         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2759         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2760         tcg_temp_free_i64(t0);
2761 #else
2762         g_assert_not_reached();
2763 #endif
2764     }
2765 
2766     tcg_gen_andi_tl(t_ra, t_ra, mask);
2767 
2768     if (unlikely(Rc(ctx->opcode) != 0)) {
2769         gen_set_Rc0(ctx, t_ra);
2770     }
2771 }
2772 
2773 #if defined(TARGET_PPC64)
2774 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2775 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2776 {                                                                             \
2777     gen_##name(ctx, 0);                                                       \
2778 }                                                                             \
2779                                                                               \
2780 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2781 {                                                                             \
2782     gen_##name(ctx, 1);                                                       \
2783 }
2784 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2785 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2786 {                                                                             \
2787     gen_##name(ctx, 0, 0);                                                    \
2788 }                                                                             \
2789                                                                               \
2790 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2791 {                                                                             \
2792     gen_##name(ctx, 0, 1);                                                    \
2793 }                                                                             \
2794                                                                               \
2795 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2796 {                                                                             \
2797     gen_##name(ctx, 1, 0);                                                    \
2798 }                                                                             \
2799                                                                               \
2800 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2801 {                                                                             \
2802     gen_##name(ctx, 1, 1);                                                    \
2803 }
2804 
2805 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2806 {
2807     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2808     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2809     int len = me - mb + 1;
2810     int rsh = (64 - sh) & 63;
2811 
2812     if (sh != 0 && len > 0 && me == (63 - sh)) {
2813         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2814     } else if (me == 63 && rsh + len <= 64) {
2815         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2816     } else {
2817         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2818         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2819     }
2820     if (unlikely(Rc(ctx->opcode) != 0)) {
2821         gen_set_Rc0(ctx, t_ra);
2822     }
2823 }
2824 
2825 /* rldicl - rldicl. */
2826 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2827 {
2828     uint32_t sh, mb;
2829 
2830     sh = SH(ctx->opcode) | (shn << 5);
2831     mb = MB(ctx->opcode) | (mbn << 5);
2832     gen_rldinm(ctx, mb, 63, sh);
2833 }
2834 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2835 
2836 /* rldicr - rldicr. */
2837 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2838 {
2839     uint32_t sh, me;
2840 
2841     sh = SH(ctx->opcode) | (shn << 5);
2842     me = MB(ctx->opcode) | (men << 5);
2843     gen_rldinm(ctx, 0, me, sh);
2844 }
2845 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2846 
2847 /* rldic - rldic. */
2848 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2849 {
2850     uint32_t sh, mb;
2851 
2852     sh = SH(ctx->opcode) | (shn << 5);
2853     mb = MB(ctx->opcode) | (mbn << 5);
2854     gen_rldinm(ctx, mb, 63 - sh, sh);
2855 }
2856 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2857 
2858 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2859 {
2860     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2861     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2862     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2863     TCGv t0;
2864 
2865     t0 = tcg_temp_new();
2866     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2867     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2868     tcg_temp_free(t0);
2869 
2870     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2871     if (unlikely(Rc(ctx->opcode) != 0)) {
2872         gen_set_Rc0(ctx, t_ra);
2873     }
2874 }
2875 
2876 /* rldcl - rldcl. */
2877 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2878 {
2879     uint32_t mb;
2880 
2881     mb = MB(ctx->opcode) | (mbn << 5);
2882     gen_rldnm(ctx, mb, 63);
2883 }
2884 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2885 
2886 /* rldcr - rldcr. */
2887 static inline void gen_rldcr(DisasContext *ctx, int men)
2888 {
2889     uint32_t me;
2890 
2891     me = MB(ctx->opcode) | (men << 5);
2892     gen_rldnm(ctx, 0, me);
2893 }
2894 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2895 
2896 /* rldimi - rldimi. */
2897 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2898 {
2899     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2900     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2901     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2902     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2903     uint32_t me = 63 - sh;
2904 
2905     if (mb <= me) {
2906         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2907     } else {
2908         target_ulong mask = MASK(mb, me);
2909         TCGv t1 = tcg_temp_new();
2910 
2911         tcg_gen_rotli_tl(t1, t_rs, sh);
2912         tcg_gen_andi_tl(t1, t1, mask);
2913         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2914         tcg_gen_or_tl(t_ra, t_ra, t1);
2915         tcg_temp_free(t1);
2916     }
2917     if (unlikely(Rc(ctx->opcode) != 0)) {
2918         gen_set_Rc0(ctx, t_ra);
2919     }
2920 }
2921 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2922 #endif
2923 
2924 /***                             Integer shift                             ***/
2925 
2926 /* slw & slw. */
2927 static void gen_slw(DisasContext *ctx)
2928 {
2929     TCGv t0, t1;
2930 
2931     t0 = tcg_temp_new();
2932     /* AND rS with a mask that is 0 when rB >= 0x20 */
2933 #if defined(TARGET_PPC64)
2934     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2935     tcg_gen_sari_tl(t0, t0, 0x3f);
2936 #else
2937     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2938     tcg_gen_sari_tl(t0, t0, 0x1f);
2939 #endif
2940     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2941     t1 = tcg_temp_new();
2942     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2943     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2944     tcg_temp_free(t1);
2945     tcg_temp_free(t0);
2946     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2947     if (unlikely(Rc(ctx->opcode) != 0)) {
2948         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2949     }
2950 }
2951 
2952 /* sraw & sraw. */
2953 static void gen_sraw(DisasContext *ctx)
2954 {
2955     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2956                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2957     if (unlikely(Rc(ctx->opcode) != 0)) {
2958         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2959     }
2960 }
2961 
2962 /* srawi & srawi. */
2963 static void gen_srawi(DisasContext *ctx)
2964 {
2965     int sh = SH(ctx->opcode);
2966     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2967     TCGv src = cpu_gpr[rS(ctx->opcode)];
2968     if (sh == 0) {
2969         tcg_gen_ext32s_tl(dst, src);
2970         tcg_gen_movi_tl(cpu_ca, 0);
2971         if (is_isa300(ctx)) {
2972             tcg_gen_movi_tl(cpu_ca32, 0);
2973         }
2974     } else {
2975         TCGv t0;
2976         tcg_gen_ext32s_tl(dst, src);
2977         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2978         t0 = tcg_temp_new();
2979         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2980         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2981         tcg_temp_free(t0);
2982         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2983         if (is_isa300(ctx)) {
2984             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2985         }
2986         tcg_gen_sari_tl(dst, dst, sh);
2987     }
2988     if (unlikely(Rc(ctx->opcode) != 0)) {
2989         gen_set_Rc0(ctx, dst);
2990     }
2991 }
2992 
2993 /* srw & srw. */
2994 static void gen_srw(DisasContext *ctx)
2995 {
2996     TCGv t0, t1;
2997 
2998     t0 = tcg_temp_new();
2999     /* AND rS with a mask that is 0 when rB >= 0x20 */
3000 #if defined(TARGET_PPC64)
3001     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
3002     tcg_gen_sari_tl(t0, t0, 0x3f);
3003 #else
3004     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
3005     tcg_gen_sari_tl(t0, t0, 0x1f);
3006 #endif
3007     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3008     tcg_gen_ext32u_tl(t0, t0);
3009     t1 = tcg_temp_new();
3010     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
3011     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3012     tcg_temp_free(t1);
3013     tcg_temp_free(t0);
3014     if (unlikely(Rc(ctx->opcode) != 0)) {
3015         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3016     }
3017 }
3018 
3019 #if defined(TARGET_PPC64)
3020 /* sld & sld. */
3021 static void gen_sld(DisasContext *ctx)
3022 {
3023     TCGv t0, t1;
3024 
3025     t0 = tcg_temp_new();
3026     /* AND rS with a mask that is 0 when rB >= 0x40 */
3027     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3028     tcg_gen_sari_tl(t0, t0, 0x3f);
3029     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3030     t1 = tcg_temp_new();
3031     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3032     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3033     tcg_temp_free(t1);
3034     tcg_temp_free(t0);
3035     if (unlikely(Rc(ctx->opcode) != 0)) {
3036         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3037     }
3038 }
3039 
3040 /* srad & srad. */
3041 static void gen_srad(DisasContext *ctx)
3042 {
3043     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3044                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3045     if (unlikely(Rc(ctx->opcode) != 0)) {
3046         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3047     }
3048 }
3049 /* sradi & sradi. */
3050 static inline void gen_sradi(DisasContext *ctx, int n)
3051 {
3052     int sh = SH(ctx->opcode) + (n << 5);
3053     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3054     TCGv src = cpu_gpr[rS(ctx->opcode)];
3055     if (sh == 0) {
3056         tcg_gen_mov_tl(dst, src);
3057         tcg_gen_movi_tl(cpu_ca, 0);
3058         if (is_isa300(ctx)) {
3059             tcg_gen_movi_tl(cpu_ca32, 0);
3060         }
3061     } else {
3062         TCGv t0;
3063         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3064         t0 = tcg_temp_new();
3065         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3066         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3067         tcg_temp_free(t0);
3068         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3069         if (is_isa300(ctx)) {
3070             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3071         }
3072         tcg_gen_sari_tl(dst, src, sh);
3073     }
3074     if (unlikely(Rc(ctx->opcode) != 0)) {
3075         gen_set_Rc0(ctx, dst);
3076     }
3077 }
3078 
3079 static void gen_sradi0(DisasContext *ctx)
3080 {
3081     gen_sradi(ctx, 0);
3082 }
3083 
3084 static void gen_sradi1(DisasContext *ctx)
3085 {
3086     gen_sradi(ctx, 1);
3087 }
3088 
3089 /* extswsli & extswsli. */
3090 static inline void gen_extswsli(DisasContext *ctx, int n)
3091 {
3092     int sh = SH(ctx->opcode) + (n << 5);
3093     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3094     TCGv src = cpu_gpr[rS(ctx->opcode)];
3095 
3096     tcg_gen_ext32s_tl(dst, src);
3097     tcg_gen_shli_tl(dst, dst, sh);
3098     if (unlikely(Rc(ctx->opcode) != 0)) {
3099         gen_set_Rc0(ctx, dst);
3100     }
3101 }
3102 
3103 static void gen_extswsli0(DisasContext *ctx)
3104 {
3105     gen_extswsli(ctx, 0);
3106 }
3107 
3108 static void gen_extswsli1(DisasContext *ctx)
3109 {
3110     gen_extswsli(ctx, 1);
3111 }
3112 
3113 /* srd & srd. */
3114 static void gen_srd(DisasContext *ctx)
3115 {
3116     TCGv t0, t1;
3117 
3118     t0 = tcg_temp_new();
3119     /* AND rS with a mask that is 0 when rB >= 0x40 */
3120     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3121     tcg_gen_sari_tl(t0, t0, 0x3f);
3122     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3123     t1 = tcg_temp_new();
3124     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3125     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3126     tcg_temp_free(t1);
3127     tcg_temp_free(t0);
3128     if (unlikely(Rc(ctx->opcode) != 0)) {
3129         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3130     }
3131 }
3132 #endif
3133 
3134 /***                           Addressing modes                            ***/
3135 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3136 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3137                                       target_long maskl)
3138 {
3139     target_long simm = SIMM(ctx->opcode);
3140 
3141     simm &= ~maskl;
3142     if (rA(ctx->opcode) == 0) {
3143         if (NARROW_MODE(ctx)) {
3144             simm = (uint32_t)simm;
3145         }
3146         tcg_gen_movi_tl(EA, simm);
3147     } else if (likely(simm != 0)) {
3148         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3149         if (NARROW_MODE(ctx)) {
3150             tcg_gen_ext32u_tl(EA, EA);
3151         }
3152     } else {
3153         if (NARROW_MODE(ctx)) {
3154             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3155         } else {
3156             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3157         }
3158     }
3159 }
3160 
3161 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3162 {
3163     if (rA(ctx->opcode) == 0) {
3164         if (NARROW_MODE(ctx)) {
3165             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3166         } else {
3167             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3168         }
3169     } else {
3170         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3171         if (NARROW_MODE(ctx)) {
3172             tcg_gen_ext32u_tl(EA, EA);
3173         }
3174     }
3175 }
3176 
3177 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3178 {
3179     if (rA(ctx->opcode) == 0) {
3180         tcg_gen_movi_tl(EA, 0);
3181     } else if (NARROW_MODE(ctx)) {
3182         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3183     } else {
3184         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3185     }
3186 }
3187 
3188 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3189                                 target_long val)
3190 {
3191     tcg_gen_addi_tl(ret, arg1, val);
3192     if (NARROW_MODE(ctx)) {
3193         tcg_gen_ext32u_tl(ret, ret);
3194     }
3195 }
3196 
3197 static inline void gen_align_no_le(DisasContext *ctx)
3198 {
3199     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3200                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3201 }
3202 
3203 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
3204 {
3205     TCGv ea = tcg_temp_new();
3206     if (ra) {
3207         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
3208     } else {
3209         tcg_gen_mov_tl(ea, displ);
3210     }
3211     if (NARROW_MODE(ctx)) {
3212         tcg_gen_ext32u_tl(ea, ea);
3213     }
3214     return ea;
3215 }
3216 
3217 /***                             Integer load                              ***/
3218 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3219 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3220 
3221 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
3222 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
3223                                   TCGv val,                             \
3224                                   TCGv addr)                            \
3225 {                                                                       \
3226     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
3227 }
3228 
3229 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
3230 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3231 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3232 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3233 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3234 
3235 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3236 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3237 
3238 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
3239 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
3240                                              TCGv_i64 val,          \
3241                                              TCGv addr)             \
3242 {                                                                   \
3243     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
3244 }
3245 
3246 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
3247 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3248 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3249 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3250 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
3251 
3252 #if defined(TARGET_PPC64)
3253 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
3254 #endif
3255 
3256 #define GEN_QEMU_STORE_TL(stop, op)                                     \
3257 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
3258                                   TCGv val,                             \
3259                                   TCGv addr)                            \
3260 {                                                                       \
3261     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
3262 }
3263 
3264 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
3265 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
3266 #endif
3267 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3268 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3269 
3270 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3271 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3272 
3273 #define GEN_QEMU_STORE_64(stop, op)                               \
3274 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
3275                                               TCGv_i64 val,       \
3276                                               TCGv addr)          \
3277 {                                                                 \
3278     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
3279 }
3280 
3281 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
3282 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3283 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3284 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
3285 
3286 #if defined(TARGET_PPC64)
3287 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
3288 #endif
3289 
3290 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
3291 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3292 {                                                                             \
3293     TCGv EA;                                                                  \
3294     chk(ctx);                                                                 \
3295     gen_set_access_type(ctx, ACCESS_INT);                                     \
3296     EA = tcg_temp_new();                                                      \
3297     gen_addr_reg_index(ctx, EA);                                              \
3298     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
3299     tcg_temp_free(EA);                                                        \
3300 }
3301 
3302 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
3303     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3304 
3305 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
3306     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3307 
3308 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
3309 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3310 {                                                                             \
3311     TCGv EA;                                                                  \
3312     CHK_SV(ctx);                                                              \
3313     gen_set_access_type(ctx, ACCESS_INT);                                     \
3314     EA = tcg_temp_new();                                                      \
3315     gen_addr_reg_index(ctx, EA);                                              \
3316     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3317     tcg_temp_free(EA);                                                        \
3318 }
3319 
3320 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3321 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3322 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3323 #if defined(TARGET_PPC64)
3324 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
3325 #endif
3326 
3327 #if defined(TARGET_PPC64)
3328 /* CI load/store variants */
3329 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3330 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3331 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3332 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3333 #endif
3334 
3335 /***                              Integer store                            ***/
3336 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
3337 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3338 {                                                                             \
3339     TCGv EA;                                                                  \
3340     chk(ctx);                                                                 \
3341     gen_set_access_type(ctx, ACCESS_INT);                                     \
3342     EA = tcg_temp_new();                                                      \
3343     gen_addr_reg_index(ctx, EA);                                              \
3344     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
3345     tcg_temp_free(EA);                                                        \
3346 }
3347 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
3348     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3349 
3350 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
3351     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3352 
3353 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
3354 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3355 {                                                                             \
3356     TCGv EA;                                                                  \
3357     CHK_SV(ctx);                                                              \
3358     gen_set_access_type(ctx, ACCESS_INT);                                     \
3359     EA = tcg_temp_new();                                                      \
3360     gen_addr_reg_index(ctx, EA);                                              \
3361     tcg_gen_qemu_st_tl(                                                       \
3362         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
3363     tcg_temp_free(EA);                                                        \
3364 }
3365 
3366 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3367 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3368 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3369 #if defined(TARGET_PPC64)
3370 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
3371 #endif
3372 
3373 #if defined(TARGET_PPC64)
3374 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3375 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3376 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3377 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3378 #endif
3379 /***                Integer load and store with byte reverse               ***/
3380 
3381 /* lhbrx */
3382 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3383 
3384 /* lwbrx */
3385 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3386 
3387 #if defined(TARGET_PPC64)
3388 /* ldbrx */
3389 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3390 /* stdbrx */
3391 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3392 #endif  /* TARGET_PPC64 */
3393 
3394 /* sthbrx */
3395 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3396 /* stwbrx */
3397 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3398 
3399 /***                    Integer load and store multiple                    ***/
3400 
3401 /* lmw */
3402 static void gen_lmw(DisasContext *ctx)
3403 {
3404     TCGv t0;
3405     TCGv_i32 t1;
3406 
3407     if (ctx->le_mode) {
3408         gen_align_no_le(ctx);
3409         return;
3410     }
3411     gen_set_access_type(ctx, ACCESS_INT);
3412     t0 = tcg_temp_new();
3413     t1 = tcg_const_i32(rD(ctx->opcode));
3414     gen_addr_imm_index(ctx, t0, 0);
3415     gen_helper_lmw(cpu_env, t0, t1);
3416     tcg_temp_free(t0);
3417     tcg_temp_free_i32(t1);
3418 }
3419 
3420 /* stmw */
3421 static void gen_stmw(DisasContext *ctx)
3422 {
3423     TCGv t0;
3424     TCGv_i32 t1;
3425 
3426     if (ctx->le_mode) {
3427         gen_align_no_le(ctx);
3428         return;
3429     }
3430     gen_set_access_type(ctx, ACCESS_INT);
3431     t0 = tcg_temp_new();
3432     t1 = tcg_const_i32(rS(ctx->opcode));
3433     gen_addr_imm_index(ctx, t0, 0);
3434     gen_helper_stmw(cpu_env, t0, t1);
3435     tcg_temp_free(t0);
3436     tcg_temp_free_i32(t1);
3437 }
3438 
3439 /***                    Integer load and store strings                     ***/
3440 
3441 /* lswi */
3442 /*
3443  * PowerPC32 specification says we must generate an exception if rA is
3444  * in the range of registers to be loaded.  In an other hand, IBM says
3445  * this is valid, but rA won't be loaded.  For now, I'll follow the
3446  * spec...
3447  */
3448 static void gen_lswi(DisasContext *ctx)
3449 {
3450     TCGv t0;
3451     TCGv_i32 t1, t2;
3452     int nb = NB(ctx->opcode);
3453     int start = rD(ctx->opcode);
3454     int ra = rA(ctx->opcode);
3455     int nr;
3456 
3457     if (ctx->le_mode) {
3458         gen_align_no_le(ctx);
3459         return;
3460     }
3461     if (nb == 0) {
3462         nb = 32;
3463     }
3464     nr = DIV_ROUND_UP(nb, 4);
3465     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3466         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3467         return;
3468     }
3469     gen_set_access_type(ctx, ACCESS_INT);
3470     t0 = tcg_temp_new();
3471     gen_addr_register(ctx, t0);
3472     t1 = tcg_const_i32(nb);
3473     t2 = tcg_const_i32(start);
3474     gen_helper_lsw(cpu_env, t0, t1, t2);
3475     tcg_temp_free(t0);
3476     tcg_temp_free_i32(t1);
3477     tcg_temp_free_i32(t2);
3478 }
3479 
3480 /* lswx */
3481 static void gen_lswx(DisasContext *ctx)
3482 {
3483     TCGv t0;
3484     TCGv_i32 t1, t2, t3;
3485 
3486     if (ctx->le_mode) {
3487         gen_align_no_le(ctx);
3488         return;
3489     }
3490     gen_set_access_type(ctx, ACCESS_INT);
3491     t0 = tcg_temp_new();
3492     gen_addr_reg_index(ctx, t0);
3493     t1 = tcg_const_i32(rD(ctx->opcode));
3494     t2 = tcg_const_i32(rA(ctx->opcode));
3495     t3 = tcg_const_i32(rB(ctx->opcode));
3496     gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3497     tcg_temp_free(t0);
3498     tcg_temp_free_i32(t1);
3499     tcg_temp_free_i32(t2);
3500     tcg_temp_free_i32(t3);
3501 }
3502 
3503 /* stswi */
3504 static void gen_stswi(DisasContext *ctx)
3505 {
3506     TCGv t0;
3507     TCGv_i32 t1, t2;
3508     int nb = NB(ctx->opcode);
3509 
3510     if (ctx->le_mode) {
3511         gen_align_no_le(ctx);
3512         return;
3513     }
3514     gen_set_access_type(ctx, ACCESS_INT);
3515     t0 = tcg_temp_new();
3516     gen_addr_register(ctx, t0);
3517     if (nb == 0) {
3518         nb = 32;
3519     }
3520     t1 = tcg_const_i32(nb);
3521     t2 = tcg_const_i32(rS(ctx->opcode));
3522     gen_helper_stsw(cpu_env, t0, t1, t2);
3523     tcg_temp_free(t0);
3524     tcg_temp_free_i32(t1);
3525     tcg_temp_free_i32(t2);
3526 }
3527 
3528 /* stswx */
3529 static void gen_stswx(DisasContext *ctx)
3530 {
3531     TCGv t0;
3532     TCGv_i32 t1, t2;
3533 
3534     if (ctx->le_mode) {
3535         gen_align_no_le(ctx);
3536         return;
3537     }
3538     gen_set_access_type(ctx, ACCESS_INT);
3539     t0 = tcg_temp_new();
3540     gen_addr_reg_index(ctx, t0);
3541     t1 = tcg_temp_new_i32();
3542     tcg_gen_trunc_tl_i32(t1, cpu_xer);
3543     tcg_gen_andi_i32(t1, t1, 0x7F);
3544     t2 = tcg_const_i32(rS(ctx->opcode));
3545     gen_helper_stsw(cpu_env, t0, t1, t2);
3546     tcg_temp_free(t0);
3547     tcg_temp_free_i32(t1);
3548     tcg_temp_free_i32(t2);
3549 }
3550 
3551 /***                        Memory synchronisation                         ***/
3552 /* eieio */
3553 static void gen_eieio(DisasContext *ctx)
3554 {
3555     TCGBar bar = TCG_MO_ALL;
3556 
3557     /*
3558      * eieio has complex semanitcs. It provides memory ordering between
3559      * operations in the set:
3560      * - loads from CI memory.
3561      * - stores to CI memory.
3562      * - stores to WT memory.
3563      *
3564      * It separately also orders memory for operations in the set:
3565      * - stores to cacheble memory.
3566      *
3567      * It also serializes instructions:
3568      * - dcbt and dcbst.
3569      *
3570      * It separately serializes:
3571      * - tlbie and tlbsync.
3572      *
3573      * And separately serializes:
3574      * - slbieg, slbiag, and slbsync.
3575      *
3576      * The end result is that CI memory ordering requires TCG_MO_ALL
3577      * and it is not possible to special-case more relaxed ordering for
3578      * cacheable accesses. TCG_BAR_SC is required to provide this
3579      * serialization.
3580      */
3581 
3582     /*
3583      * POWER9 has a eieio instruction variant using bit 6 as a hint to
3584      * tell the CPU it is a store-forwarding barrier.
3585      */
3586     if (ctx->opcode & 0x2000000) {
3587         /*
3588          * ISA says that "Reserved fields in instructions are ignored
3589          * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3590          * as this is not an instruction software should be using,
3591          * complain to the user.
3592          */
3593         if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3594             qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3595                           TARGET_FMT_lx "\n", ctx->cia);
3596         } else {
3597             bar = TCG_MO_ST_LD;
3598         }
3599     }
3600 
3601     tcg_gen_mb(bar | TCG_BAR_SC);
3602 }
3603 
3604 #if !defined(CONFIG_USER_ONLY)
3605 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3606 {
3607     TCGv_i32 t;
3608     TCGLabel *l;
3609 
3610     if (!ctx->lazy_tlb_flush) {
3611         return;
3612     }
3613     l = gen_new_label();
3614     t = tcg_temp_new_i32();
3615     tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3616     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3617     if (global) {
3618         gen_helper_check_tlb_flush_global(cpu_env);
3619     } else {
3620         gen_helper_check_tlb_flush_local(cpu_env);
3621     }
3622     gen_set_label(l);
3623     tcg_temp_free_i32(t);
3624 }
3625 #else
3626 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3627 #endif
3628 
3629 /* isync */
3630 static void gen_isync(DisasContext *ctx)
3631 {
3632     /*
3633      * We need to check for a pending TLB flush. This can only happen in
3634      * kernel mode however so check MSR_PR
3635      */
3636     if (!ctx->pr) {
3637         gen_check_tlb_flush(ctx, false);
3638     }
3639     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3640     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3641 }
3642 
3643 #define MEMOP_GET_SIZE(x)  (1 << ((x) & MO_SIZE))
3644 
3645 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3646 {
3647     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3648     TCGv t0 = tcg_temp_new();
3649 
3650     gen_set_access_type(ctx, ACCESS_RES);
3651     gen_addr_reg_index(ctx, t0);
3652     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3653     tcg_gen_mov_tl(cpu_reserve, t0);
3654     tcg_gen_mov_tl(cpu_reserve_val, gpr);
3655     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3656     tcg_temp_free(t0);
3657 }
3658 
3659 #define LARX(name, memop)                  \
3660 static void gen_##name(DisasContext *ctx)  \
3661 {                                          \
3662     gen_load_locked(ctx, memop);           \
3663 }
3664 
3665 /* lwarx */
3666 LARX(lbarx, DEF_MEMOP(MO_UB))
3667 LARX(lharx, DEF_MEMOP(MO_UW))
3668 LARX(lwarx, DEF_MEMOP(MO_UL))
3669 
3670 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
3671                                       TCGv EA, TCGCond cond, int addend)
3672 {
3673     TCGv t = tcg_temp_new();
3674     TCGv t2 = tcg_temp_new();
3675     TCGv u = tcg_temp_new();
3676 
3677     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3678     tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
3679     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3680     tcg_gen_addi_tl(u, t, addend);
3681 
3682     /* E.g. for fetch and increment bounded... */
3683     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3684     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3685     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3686 
3687     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3688     tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
3689     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
3690 
3691     tcg_temp_free(t);
3692     tcg_temp_free(t2);
3693     tcg_temp_free(u);
3694 }
3695 
3696 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3697 {
3698     uint32_t gpr_FC = FC(ctx->opcode);
3699     TCGv EA = tcg_temp_new();
3700     int rt = rD(ctx->opcode);
3701     bool need_serial;
3702     TCGv src, dst;
3703 
3704     gen_addr_register(ctx, EA);
3705     dst = cpu_gpr[rt];
3706     src = cpu_gpr[(rt + 1) & 31];
3707 
3708     need_serial = false;
3709     memop |= MO_ALIGN;
3710     switch (gpr_FC) {
3711     case 0: /* Fetch and add */
3712         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3713         break;
3714     case 1: /* Fetch and xor */
3715         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3716         break;
3717     case 2: /* Fetch and or */
3718         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3719         break;
3720     case 3: /* Fetch and 'and' */
3721         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3722         break;
3723     case 4:  /* Fetch and max unsigned */
3724         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3725         break;
3726     case 5:  /* Fetch and max signed */
3727         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3728         break;
3729     case 6:  /* Fetch and min unsigned */
3730         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3731         break;
3732     case 7:  /* Fetch and min signed */
3733         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3734         break;
3735     case 8: /* Swap */
3736         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3737         break;
3738 
3739     case 16: /* Compare and swap not equal */
3740         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3741             need_serial = true;
3742         } else {
3743             TCGv t0 = tcg_temp_new();
3744             TCGv t1 = tcg_temp_new();
3745 
3746             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3747             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3748                 tcg_gen_mov_tl(t1, src);
3749             } else {
3750                 tcg_gen_ext32u_tl(t1, src);
3751             }
3752             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3753                                cpu_gpr[(rt + 2) & 31], t0);
3754             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3755             tcg_gen_mov_tl(dst, t0);
3756 
3757             tcg_temp_free(t0);
3758             tcg_temp_free(t1);
3759         }
3760         break;
3761 
3762     case 24: /* Fetch and increment bounded */
3763         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3764             need_serial = true;
3765         } else {
3766             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3767         }
3768         break;
3769     case 25: /* Fetch and increment equal */
3770         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3771             need_serial = true;
3772         } else {
3773             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3774         }
3775         break;
3776     case 28: /* Fetch and decrement bounded */
3777         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3778             need_serial = true;
3779         } else {
3780             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3781         }
3782         break;
3783 
3784     default:
3785         /* invoke data storage error handler */
3786         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3787     }
3788     tcg_temp_free(EA);
3789 
3790     if (need_serial) {
3791         /* Restart with exclusive lock.  */
3792         gen_helper_exit_atomic(cpu_env);
3793         ctx->base.is_jmp = DISAS_NORETURN;
3794     }
3795 }
3796 
3797 static void gen_lwat(DisasContext *ctx)
3798 {
3799     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3800 }
3801 
3802 #ifdef TARGET_PPC64
3803 static void gen_ldat(DisasContext *ctx)
3804 {
3805     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3806 }
3807 #endif
3808 
3809 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3810 {
3811     uint32_t gpr_FC = FC(ctx->opcode);
3812     TCGv EA = tcg_temp_new();
3813     TCGv src, discard;
3814 
3815     gen_addr_register(ctx, EA);
3816     src = cpu_gpr[rD(ctx->opcode)];
3817     discard = tcg_temp_new();
3818 
3819     memop |= MO_ALIGN;
3820     switch (gpr_FC) {
3821     case 0: /* add and Store */
3822         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3823         break;
3824     case 1: /* xor and Store */
3825         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3826         break;
3827     case 2: /* Or and Store */
3828         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3829         break;
3830     case 3: /* 'and' and Store */
3831         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3832         break;
3833     case 4:  /* Store max unsigned */
3834         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3835         break;
3836     case 5:  /* Store max signed */
3837         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3838         break;
3839     case 6:  /* Store min unsigned */
3840         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3841         break;
3842     case 7:  /* Store min signed */
3843         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3844         break;
3845     case 24: /* Store twin  */
3846         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3847             /* Restart with exclusive lock.  */
3848             gen_helper_exit_atomic(cpu_env);
3849             ctx->base.is_jmp = DISAS_NORETURN;
3850         } else {
3851             TCGv t = tcg_temp_new();
3852             TCGv t2 = tcg_temp_new();
3853             TCGv s = tcg_temp_new();
3854             TCGv s2 = tcg_temp_new();
3855             TCGv ea_plus_s = tcg_temp_new();
3856 
3857             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3858             tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
3859             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3860             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3861             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3862             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3863             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3864 
3865             tcg_temp_free(ea_plus_s);
3866             tcg_temp_free(s2);
3867             tcg_temp_free(s);
3868             tcg_temp_free(t2);
3869             tcg_temp_free(t);
3870         }
3871         break;
3872     default:
3873         /* invoke data storage error handler */
3874         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3875     }
3876     tcg_temp_free(discard);
3877     tcg_temp_free(EA);
3878 }
3879 
3880 static void gen_stwat(DisasContext *ctx)
3881 {
3882     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3883 }
3884 
3885 #ifdef TARGET_PPC64
3886 static void gen_stdat(DisasContext *ctx)
3887 {
3888     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3889 }
3890 #endif
3891 
3892 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3893 {
3894     TCGLabel *l1 = gen_new_label();
3895     TCGLabel *l2 = gen_new_label();
3896     TCGv t0 = tcg_temp_new();
3897     int reg = rS(ctx->opcode);
3898 
3899     gen_set_access_type(ctx, ACCESS_RES);
3900     gen_addr_reg_index(ctx, t0);
3901     tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3902     tcg_temp_free(t0);
3903 
3904     t0 = tcg_temp_new();
3905     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3906                               cpu_gpr[reg], ctx->mem_idx,
3907                               DEF_MEMOP(memop) | MO_ALIGN);
3908     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3909     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3910     tcg_gen_or_tl(t0, t0, cpu_so);
3911     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3912     tcg_temp_free(t0);
3913     tcg_gen_br(l2);
3914 
3915     gen_set_label(l1);
3916 
3917     /*
3918      * Address mismatch implies failure.  But we still need to provide
3919      * the memory barrier semantics of the instruction.
3920      */
3921     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3922     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3923 
3924     gen_set_label(l2);
3925     tcg_gen_movi_tl(cpu_reserve, -1);
3926 }
3927 
3928 #define STCX(name, memop)                  \
3929 static void gen_##name(DisasContext *ctx)  \
3930 {                                          \
3931     gen_conditional_store(ctx, memop);     \
3932 }
3933 
3934 STCX(stbcx_, DEF_MEMOP(MO_UB))
3935 STCX(sthcx_, DEF_MEMOP(MO_UW))
3936 STCX(stwcx_, DEF_MEMOP(MO_UL))
3937 
3938 #if defined(TARGET_PPC64)
3939 /* ldarx */
3940 LARX(ldarx, DEF_MEMOP(MO_UQ))
3941 /* stdcx. */
3942 STCX(stdcx_, DEF_MEMOP(MO_UQ))
3943 
3944 /* lqarx */
3945 static void gen_lqarx(DisasContext *ctx)
3946 {
3947     int rd = rD(ctx->opcode);
3948     TCGv EA, hi, lo;
3949 
3950     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3951                  (rd == rB(ctx->opcode)))) {
3952         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3953         return;
3954     }
3955 
3956     gen_set_access_type(ctx, ACCESS_RES);
3957     EA = tcg_temp_new();
3958     gen_addr_reg_index(ctx, EA);
3959 
3960     /* Note that the low part is always in RD+1, even in LE mode.  */
3961     lo = cpu_gpr[rd + 1];
3962     hi = cpu_gpr[rd];
3963 
3964     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3965         if (HAVE_ATOMIC128) {
3966             TCGv_i32 oi = tcg_temp_new_i32();
3967             if (ctx->le_mode) {
3968                 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN,
3969                                                     ctx->mem_idx));
3970                 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3971             } else {
3972                 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN,
3973                                                     ctx->mem_idx));
3974                 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3975             }
3976             tcg_temp_free_i32(oi);
3977             tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3978         } else {
3979             /* Restart with exclusive lock.  */
3980             gen_helper_exit_atomic(cpu_env);
3981             ctx->base.is_jmp = DISAS_NORETURN;
3982             tcg_temp_free(EA);
3983             return;
3984         }
3985     } else if (ctx->le_mode) {
3986         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEUQ | MO_ALIGN_16);
3987         tcg_gen_mov_tl(cpu_reserve, EA);
3988         gen_addr_add(ctx, EA, EA, 8);
3989         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEUQ);
3990     } else {
3991         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEUQ | MO_ALIGN_16);
3992         tcg_gen_mov_tl(cpu_reserve, EA);
3993         gen_addr_add(ctx, EA, EA, 8);
3994         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEUQ);
3995     }
3996     tcg_temp_free(EA);
3997 
3998     tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
3999     tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
4000 }
4001 
4002 /* stqcx. */
4003 static void gen_stqcx_(DisasContext *ctx)
4004 {
4005     TCGLabel *lab_fail, *lab_over;
4006     int rs = rS(ctx->opcode);
4007     TCGv EA, t0, t1;
4008     TCGv_i128 cmp, val;
4009 
4010     if (unlikely(rs & 1)) {
4011         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4012         return;
4013     }
4014 
4015     lab_fail = gen_new_label();
4016     lab_over = gen_new_label();
4017 
4018     gen_set_access_type(ctx, ACCESS_RES);
4019     EA = tcg_temp_new();
4020     gen_addr_reg_index(ctx, EA);
4021 
4022     tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4023     tcg_temp_free(EA);
4024 
4025     cmp = tcg_temp_new_i128();
4026     val = tcg_temp_new_i128();
4027 
4028     tcg_gen_concat_i64_i128(cmp, cpu_reserve_val2, cpu_reserve_val);
4029 
4030     /* Note that the low part is always in RS+1, even in LE mode.  */
4031     tcg_gen_concat_i64_i128(val, cpu_gpr[rs + 1], cpu_gpr[rs]);
4032 
4033     tcg_gen_atomic_cmpxchg_i128(val, cpu_reserve, cmp, val, ctx->mem_idx,
4034                                 DEF_MEMOP(MO_128 | MO_ALIGN));
4035     tcg_temp_free_i128(cmp);
4036 
4037     t0 = tcg_temp_new();
4038     t1 = tcg_temp_new();
4039     tcg_gen_extr_i128_i64(t1, t0, val);
4040     tcg_temp_free_i128(val);
4041 
4042     tcg_gen_xor_tl(t1, t1, cpu_reserve_val2);
4043     tcg_gen_xor_tl(t0, t0, cpu_reserve_val);
4044     tcg_gen_or_tl(t0, t0, t1);
4045     tcg_temp_free(t1);
4046 
4047     tcg_gen_setcondi_tl(TCG_COND_EQ, t0, t0, 0);
4048     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
4049     tcg_gen_or_tl(t0, t0, cpu_so);
4050     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
4051     tcg_temp_free(t0);
4052 
4053     tcg_gen_br(lab_over);
4054     gen_set_label(lab_fail);
4055 
4056     /*
4057      * Address mismatch implies failure.  But we still need to provide
4058      * the memory barrier semantics of the instruction.
4059      */
4060     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
4061     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4062 
4063     gen_set_label(lab_over);
4064     tcg_gen_movi_tl(cpu_reserve, -1);
4065 }
4066 #endif /* defined(TARGET_PPC64) */
4067 
4068 /* sync */
4069 static void gen_sync(DisasContext *ctx)
4070 {
4071     TCGBar bar = TCG_MO_ALL;
4072     uint32_t l = (ctx->opcode >> 21) & 3;
4073 
4074     if ((l == 1) && (ctx->insns_flags2 & PPC2_MEM_LWSYNC)) {
4075         bar = TCG_MO_LD_LD | TCG_MO_LD_ST | TCG_MO_ST_ST;
4076     }
4077 
4078     /*
4079      * We may need to check for a pending TLB flush.
4080      *
4081      * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4082      *
4083      * Additionally, this can only happen in kernel mode however so
4084      * check MSR_PR as well.
4085      */
4086     if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4087         gen_check_tlb_flush(ctx, true);
4088     }
4089 
4090     tcg_gen_mb(bar | TCG_BAR_SC);
4091 }
4092 
4093 /* wait */
4094 static void gen_wait(DisasContext *ctx)
4095 {
4096     uint32_t wc;
4097 
4098     if (ctx->insns_flags & PPC_WAIT) {
4099         /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
4100 
4101         if (ctx->insns_flags2 & PPC2_PM_ISA206) {
4102             /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
4103             wc = WC(ctx->opcode);
4104         } else {
4105             wc = 0;
4106         }
4107 
4108     } else if (ctx->insns_flags2 & PPC2_ISA300) {
4109         /* v3.0 defines a new 'wait' encoding. */
4110         wc = WC(ctx->opcode);
4111         if (ctx->insns_flags2 & PPC2_ISA310) {
4112             uint32_t pl = PL(ctx->opcode);
4113 
4114             /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
4115             if (wc == 3) {
4116                 gen_invalid(ctx);
4117                 return;
4118             }
4119 
4120             /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
4121             if (pl > 0 && wc != 2) {
4122                 gen_invalid(ctx);
4123                 return;
4124             }
4125 
4126         } else { /* ISA300 */
4127             /* WC 1-3 are reserved */
4128             if (wc > 0) {
4129                 gen_invalid(ctx);
4130                 return;
4131             }
4132         }
4133 
4134     } else {
4135         warn_report("wait instruction decoded with wrong ISA flags.");
4136         gen_invalid(ctx);
4137         return;
4138     }
4139 
4140     /*
4141      * wait without WC field or with WC=0 waits for an exception / interrupt
4142      * to occur.
4143      */
4144     if (wc == 0) {
4145         TCGv_i32 t0 = tcg_const_i32(1);
4146         tcg_gen_st_i32(t0, cpu_env,
4147                        -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4148         tcg_temp_free_i32(t0);
4149         /* Stop translation, as the CPU is supposed to sleep from now */
4150         gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4151     }
4152 
4153     /*
4154      * Other wait types must not just wait until an exception occurs because
4155      * ignoring their other wake-up conditions could cause a hang.
4156      *
4157      * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
4158      * no-ops.
4159      *
4160      * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
4161      *
4162      * wc=2 waits for an implementation-specific condition, such could be
4163      * always true, so it can be implemented as a no-op.
4164      *
4165      * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
4166      *
4167      * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
4168      * Reservation-loss may have implementation-specific conditions, so it
4169      * can be implemented as a no-op.
4170      *
4171      * wc=2 waits for an exception or an amount of time to pass. This
4172      * amount is implementation-specific so it can be implemented as a
4173      * no-op.
4174      *
4175      * ISA v3.1 allows for execution to resume "in the rare case of
4176      * an implementation-dependent event", so in any case software must
4177      * not depend on the architected resumption condition to become
4178      * true, so no-op implementations should be architecturally correct
4179      * (if suboptimal).
4180      */
4181 }
4182 
4183 #if defined(TARGET_PPC64)
4184 static void gen_doze(DisasContext *ctx)
4185 {
4186 #if defined(CONFIG_USER_ONLY)
4187     GEN_PRIV(ctx);
4188 #else
4189     TCGv_i32 t;
4190 
4191     CHK_HV(ctx);
4192     t = tcg_const_i32(PPC_PM_DOZE);
4193     gen_helper_pminsn(cpu_env, t);
4194     tcg_temp_free_i32(t);
4195     /* Stop translation, as the CPU is supposed to sleep from now */
4196     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4197 #endif /* defined(CONFIG_USER_ONLY) */
4198 }
4199 
4200 static void gen_nap(DisasContext *ctx)
4201 {
4202 #if defined(CONFIG_USER_ONLY)
4203     GEN_PRIV(ctx);
4204 #else
4205     TCGv_i32 t;
4206 
4207     CHK_HV(ctx);
4208     t = tcg_const_i32(PPC_PM_NAP);
4209     gen_helper_pminsn(cpu_env, t);
4210     tcg_temp_free_i32(t);
4211     /* Stop translation, as the CPU is supposed to sleep from now */
4212     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4213 #endif /* defined(CONFIG_USER_ONLY) */
4214 }
4215 
4216 static void gen_stop(DisasContext *ctx)
4217 {
4218 #if defined(CONFIG_USER_ONLY)
4219     GEN_PRIV(ctx);
4220 #else
4221     TCGv_i32 t;
4222 
4223     CHK_HV(ctx);
4224     t = tcg_const_i32(PPC_PM_STOP);
4225     gen_helper_pminsn(cpu_env, t);
4226     tcg_temp_free_i32(t);
4227     /* Stop translation, as the CPU is supposed to sleep from now */
4228     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4229 #endif /* defined(CONFIG_USER_ONLY) */
4230 }
4231 
4232 static void gen_sleep(DisasContext *ctx)
4233 {
4234 #if defined(CONFIG_USER_ONLY)
4235     GEN_PRIV(ctx);
4236 #else
4237     TCGv_i32 t;
4238 
4239     CHK_HV(ctx);
4240     t = tcg_const_i32(PPC_PM_SLEEP);
4241     gen_helper_pminsn(cpu_env, t);
4242     tcg_temp_free_i32(t);
4243     /* Stop translation, as the CPU is supposed to sleep from now */
4244     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4245 #endif /* defined(CONFIG_USER_ONLY) */
4246 }
4247 
4248 static void gen_rvwinkle(DisasContext *ctx)
4249 {
4250 #if defined(CONFIG_USER_ONLY)
4251     GEN_PRIV(ctx);
4252 #else
4253     TCGv_i32 t;
4254 
4255     CHK_HV(ctx);
4256     t = tcg_const_i32(PPC_PM_RVWINKLE);
4257     gen_helper_pminsn(cpu_env, t);
4258     tcg_temp_free_i32(t);
4259     /* Stop translation, as the CPU is supposed to sleep from now */
4260     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4261 #endif /* defined(CONFIG_USER_ONLY) */
4262 }
4263 #endif /* #if defined(TARGET_PPC64) */
4264 
4265 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4266 {
4267 #if defined(TARGET_PPC64)
4268     if (ctx->has_cfar) {
4269         tcg_gen_movi_tl(cpu_cfar, nip);
4270     }
4271 #endif
4272 }
4273 
4274 #if defined(TARGET_PPC64)
4275 static void pmu_count_insns(DisasContext *ctx)
4276 {
4277     /*
4278      * Do not bother calling the helper if the PMU isn't counting
4279      * instructions.
4280      */
4281     if (!ctx->pmu_insn_cnt) {
4282         return;
4283     }
4284 
4285  #if !defined(CONFIG_USER_ONLY)
4286     TCGLabel *l;
4287     TCGv t0;
4288 
4289     /*
4290      * The PMU insns_inc() helper stops the internal PMU timer if a
4291      * counter overflows happens. In that case, if the guest is
4292      * running with icount and we do not handle it beforehand,
4293      * the helper can trigger a 'bad icount read'.
4294      */
4295     gen_icount_io_start(ctx);
4296 
4297     /* Avoid helper calls when only PMC5-6 are enabled. */
4298     if (!ctx->pmc_other) {
4299         l = gen_new_label();
4300         t0 = tcg_temp_new();
4301 
4302         gen_load_spr(t0, SPR_POWER_PMC5);
4303         tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4304         gen_store_spr(SPR_POWER_PMC5, t0);
4305         /* Check for overflow, if it's enabled */
4306         if (ctx->mmcr0_pmcjce) {
4307             tcg_gen_brcondi_tl(TCG_COND_LT, t0, PMC_COUNTER_NEGATIVE_VAL, l);
4308             gen_helper_handle_pmc5_overflow(cpu_env);
4309         }
4310 
4311         gen_set_label(l);
4312         tcg_temp_free(t0);
4313     } else {
4314         gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns));
4315     }
4316   #else
4317     /*
4318      * User mode can read (but not write) PMC5 and start/stop
4319      * the PMU via MMCR0_FC. In this case just increment
4320      * PMC5 with base.num_insns.
4321      */
4322     TCGv t0 = tcg_temp_new();
4323 
4324     gen_load_spr(t0, SPR_POWER_PMC5);
4325     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4326     gen_store_spr(SPR_POWER_PMC5, t0);
4327 
4328     tcg_temp_free(t0);
4329   #endif /* #if !defined(CONFIG_USER_ONLY) */
4330 }
4331 #else
4332 static void pmu_count_insns(DisasContext *ctx)
4333 {
4334     return;
4335 }
4336 #endif /* #if defined(TARGET_PPC64) */
4337 
4338 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4339 {
4340     return translator_use_goto_tb(&ctx->base, dest);
4341 }
4342 
4343 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4344 {
4345     if (unlikely(ctx->singlestep_enabled)) {
4346         gen_debug_exception(ctx);
4347     } else {
4348         /*
4349          * tcg_gen_lookup_and_goto_ptr will exit the TB if
4350          * CF_NO_GOTO_PTR is set. Count insns now.
4351          */
4352         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
4353             pmu_count_insns(ctx);
4354         }
4355 
4356         tcg_gen_lookup_and_goto_ptr();
4357     }
4358 }
4359 
4360 /***                                Branch                                 ***/
4361 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4362 {
4363     if (NARROW_MODE(ctx)) {
4364         dest = (uint32_t) dest;
4365     }
4366     if (use_goto_tb(ctx, dest)) {
4367         pmu_count_insns(ctx);
4368         tcg_gen_goto_tb(n);
4369         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4370         tcg_gen_exit_tb(ctx->base.tb, n);
4371     } else {
4372         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4373         gen_lookup_and_goto_ptr(ctx);
4374     }
4375 }
4376 
4377 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4378 {
4379     if (NARROW_MODE(ctx)) {
4380         nip = (uint32_t)nip;
4381     }
4382     tcg_gen_movi_tl(cpu_lr, nip);
4383 }
4384 
4385 /* b ba bl bla */
4386 static void gen_b(DisasContext *ctx)
4387 {
4388     target_ulong li, target;
4389 
4390     /* sign extend LI */
4391     li = LI(ctx->opcode);
4392     li = (li ^ 0x02000000) - 0x02000000;
4393     if (likely(AA(ctx->opcode) == 0)) {
4394         target = ctx->cia + li;
4395     } else {
4396         target = li;
4397     }
4398     if (LK(ctx->opcode)) {
4399         gen_setlr(ctx, ctx->base.pc_next);
4400     }
4401     gen_update_cfar(ctx, ctx->cia);
4402     gen_goto_tb(ctx, 0, target);
4403     ctx->base.is_jmp = DISAS_NORETURN;
4404 }
4405 
4406 #define BCOND_IM  0
4407 #define BCOND_LR  1
4408 #define BCOND_CTR 2
4409 #define BCOND_TAR 3
4410 
4411 static void gen_bcond(DisasContext *ctx, int type)
4412 {
4413     uint32_t bo = BO(ctx->opcode);
4414     TCGLabel *l1;
4415     TCGv target;
4416 
4417     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4418         target = tcg_temp_local_new();
4419         if (type == BCOND_CTR) {
4420             tcg_gen_mov_tl(target, cpu_ctr);
4421         } else if (type == BCOND_TAR) {
4422             gen_load_spr(target, SPR_TAR);
4423         } else {
4424             tcg_gen_mov_tl(target, cpu_lr);
4425         }
4426     } else {
4427         target = NULL;
4428     }
4429     if (LK(ctx->opcode)) {
4430         gen_setlr(ctx, ctx->base.pc_next);
4431     }
4432     l1 = gen_new_label();
4433     if ((bo & 0x4) == 0) {
4434         /* Decrement and test CTR */
4435         TCGv temp = tcg_temp_new();
4436 
4437         if (type == BCOND_CTR) {
4438             /*
4439              * All ISAs up to v3 describe this form of bcctr as invalid but
4440              * some processors, ie. 64-bit server processors compliant with
4441              * arch 2.x, do implement a "test and decrement" logic instead,
4442              * as described in their respective UMs. This logic involves CTR
4443              * to act as both the branch target and a counter, which makes
4444              * it basically useless and thus never used in real code.
4445              *
4446              * This form was hence chosen to trigger extra micro-architectural
4447              * side-effect on real HW needed for the Spectre v2 workaround.
4448              * It is up to guests that implement such workaround, ie. linux, to
4449              * use this form in a way it just triggers the side-effect without
4450              * doing anything else harmful.
4451              */
4452             if (unlikely(!is_book3s_arch2x(ctx))) {
4453                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4454                 tcg_temp_free(temp);
4455                 tcg_temp_free(target);
4456                 return;
4457             }
4458 
4459             if (NARROW_MODE(ctx)) {
4460                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4461             } else {
4462                 tcg_gen_mov_tl(temp, cpu_ctr);
4463             }
4464             if (bo & 0x2) {
4465                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4466             } else {
4467                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4468             }
4469             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4470         } else {
4471             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4472             if (NARROW_MODE(ctx)) {
4473                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4474             } else {
4475                 tcg_gen_mov_tl(temp, cpu_ctr);
4476             }
4477             if (bo & 0x2) {
4478                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4479             } else {
4480                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4481             }
4482         }
4483         tcg_temp_free(temp);
4484     }
4485     if ((bo & 0x10) == 0) {
4486         /* Test CR */
4487         uint32_t bi = BI(ctx->opcode);
4488         uint32_t mask = 0x08 >> (bi & 0x03);
4489         TCGv_i32 temp = tcg_temp_new_i32();
4490 
4491         if (bo & 0x8) {
4492             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4493             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4494         } else {
4495             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4496             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4497         }
4498         tcg_temp_free_i32(temp);
4499     }
4500     gen_update_cfar(ctx, ctx->cia);
4501     if (type == BCOND_IM) {
4502         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4503         if (likely(AA(ctx->opcode) == 0)) {
4504             gen_goto_tb(ctx, 0, ctx->cia + li);
4505         } else {
4506             gen_goto_tb(ctx, 0, li);
4507         }
4508     } else {
4509         if (NARROW_MODE(ctx)) {
4510             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4511         } else {
4512             tcg_gen_andi_tl(cpu_nip, target, ~3);
4513         }
4514         gen_lookup_and_goto_ptr(ctx);
4515         tcg_temp_free(target);
4516     }
4517     if ((bo & 0x14) != 0x14) {
4518         /* fallthrough case */
4519         gen_set_label(l1);
4520         gen_goto_tb(ctx, 1, ctx->base.pc_next);
4521     }
4522     ctx->base.is_jmp = DISAS_NORETURN;
4523 }
4524 
4525 static void gen_bc(DisasContext *ctx)
4526 {
4527     gen_bcond(ctx, BCOND_IM);
4528 }
4529 
4530 static void gen_bcctr(DisasContext *ctx)
4531 {
4532     gen_bcond(ctx, BCOND_CTR);
4533 }
4534 
4535 static void gen_bclr(DisasContext *ctx)
4536 {
4537     gen_bcond(ctx, BCOND_LR);
4538 }
4539 
4540 static void gen_bctar(DisasContext *ctx)
4541 {
4542     gen_bcond(ctx, BCOND_TAR);
4543 }
4544 
4545 /***                      Condition register logical                       ***/
4546 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
4547 static void glue(gen_, name)(DisasContext *ctx)                               \
4548 {                                                                             \
4549     uint8_t bitmask;                                                          \
4550     int sh;                                                                   \
4551     TCGv_i32 t0, t1;                                                          \
4552     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
4553     t0 = tcg_temp_new_i32();                                                  \
4554     if (sh > 0)                                                               \
4555         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
4556     else if (sh < 0)                                                          \
4557         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
4558     else                                                                      \
4559         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
4560     t1 = tcg_temp_new_i32();                                                  \
4561     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
4562     if (sh > 0)                                                               \
4563         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
4564     else if (sh < 0)                                                          \
4565         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
4566     else                                                                      \
4567         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
4568     tcg_op(t0, t0, t1);                                                       \
4569     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
4570     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
4571     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
4572     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
4573     tcg_temp_free_i32(t0);                                                    \
4574     tcg_temp_free_i32(t1);                                                    \
4575 }
4576 
4577 /* crand */
4578 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4579 /* crandc */
4580 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4581 /* creqv */
4582 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4583 /* crnand */
4584 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4585 /* crnor */
4586 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4587 /* cror */
4588 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4589 /* crorc */
4590 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4591 /* crxor */
4592 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4593 
4594 /* mcrf */
4595 static void gen_mcrf(DisasContext *ctx)
4596 {
4597     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4598 }
4599 
4600 /***                           System linkage                              ***/
4601 
4602 /* rfi (supervisor only) */
4603 static void gen_rfi(DisasContext *ctx)
4604 {
4605 #if defined(CONFIG_USER_ONLY)
4606     GEN_PRIV(ctx);
4607 #else
4608     /*
4609      * This instruction doesn't exist anymore on 64-bit server
4610      * processors compliant with arch 2.x
4611      */
4612     if (is_book3s_arch2x(ctx)) {
4613         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4614         return;
4615     }
4616     /* Restore CPU state */
4617     CHK_SV(ctx);
4618     gen_icount_io_start(ctx);
4619     gen_update_cfar(ctx, ctx->cia);
4620     gen_helper_rfi(cpu_env);
4621     ctx->base.is_jmp = DISAS_EXIT;
4622 #endif
4623 }
4624 
4625 #if defined(TARGET_PPC64)
4626 static void gen_rfid(DisasContext *ctx)
4627 {
4628 #if defined(CONFIG_USER_ONLY)
4629     GEN_PRIV(ctx);
4630 #else
4631     /* Restore CPU state */
4632     CHK_SV(ctx);
4633     gen_icount_io_start(ctx);
4634     gen_update_cfar(ctx, ctx->cia);
4635     gen_helper_rfid(cpu_env);
4636     ctx->base.is_jmp = DISAS_EXIT;
4637 #endif
4638 }
4639 
4640 #if !defined(CONFIG_USER_ONLY)
4641 static void gen_rfscv(DisasContext *ctx)
4642 {
4643 #if defined(CONFIG_USER_ONLY)
4644     GEN_PRIV(ctx);
4645 #else
4646     /* Restore CPU state */
4647     CHK_SV(ctx);
4648     gen_icount_io_start(ctx);
4649     gen_update_cfar(ctx, ctx->cia);
4650     gen_helper_rfscv(cpu_env);
4651     ctx->base.is_jmp = DISAS_EXIT;
4652 #endif
4653 }
4654 #endif
4655 
4656 static void gen_hrfid(DisasContext *ctx)
4657 {
4658 #if defined(CONFIG_USER_ONLY)
4659     GEN_PRIV(ctx);
4660 #else
4661     /* Restore CPU state */
4662     CHK_HV(ctx);
4663     gen_helper_hrfid(cpu_env);
4664     ctx->base.is_jmp = DISAS_EXIT;
4665 #endif
4666 }
4667 #endif
4668 
4669 /* sc */
4670 #if defined(CONFIG_USER_ONLY)
4671 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4672 #else
4673 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4674 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4675 #endif
4676 static void gen_sc(DisasContext *ctx)
4677 {
4678     uint32_t lev;
4679 
4680     lev = (ctx->opcode >> 5) & 0x7F;
4681     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4682 }
4683 
4684 #if defined(TARGET_PPC64)
4685 #if !defined(CONFIG_USER_ONLY)
4686 static void gen_scv(DisasContext *ctx)
4687 {
4688     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4689 
4690     /* Set the PC back to the faulting instruction. */
4691     gen_update_nip(ctx, ctx->cia);
4692     gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4693 
4694     ctx->base.is_jmp = DISAS_NORETURN;
4695 }
4696 #endif
4697 #endif
4698 
4699 /***                                Trap                                   ***/
4700 
4701 /* Check for unconditional traps (always or never) */
4702 static bool check_unconditional_trap(DisasContext *ctx)
4703 {
4704     /* Trap never */
4705     if (TO(ctx->opcode) == 0) {
4706         return true;
4707     }
4708     /* Trap always */
4709     if (TO(ctx->opcode) == 31) {
4710         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4711         return true;
4712     }
4713     return false;
4714 }
4715 
4716 /* tw */
4717 static void gen_tw(DisasContext *ctx)
4718 {
4719     TCGv_i32 t0;
4720 
4721     if (check_unconditional_trap(ctx)) {
4722         return;
4723     }
4724     t0 = tcg_const_i32(TO(ctx->opcode));
4725     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4726                   t0);
4727     tcg_temp_free_i32(t0);
4728 }
4729 
4730 /* twi */
4731 static void gen_twi(DisasContext *ctx)
4732 {
4733     TCGv t0;
4734     TCGv_i32 t1;
4735 
4736     if (check_unconditional_trap(ctx)) {
4737         return;
4738     }
4739     t0 = tcg_const_tl(SIMM(ctx->opcode));
4740     t1 = tcg_const_i32(TO(ctx->opcode));
4741     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4742     tcg_temp_free(t0);
4743     tcg_temp_free_i32(t1);
4744 }
4745 
4746 #if defined(TARGET_PPC64)
4747 /* td */
4748 static void gen_td(DisasContext *ctx)
4749 {
4750     TCGv_i32 t0;
4751 
4752     if (check_unconditional_trap(ctx)) {
4753         return;
4754     }
4755     t0 = tcg_const_i32(TO(ctx->opcode));
4756     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4757                   t0);
4758     tcg_temp_free_i32(t0);
4759 }
4760 
4761 /* tdi */
4762 static void gen_tdi(DisasContext *ctx)
4763 {
4764     TCGv t0;
4765     TCGv_i32 t1;
4766 
4767     if (check_unconditional_trap(ctx)) {
4768         return;
4769     }
4770     t0 = tcg_const_tl(SIMM(ctx->opcode));
4771     t1 = tcg_const_i32(TO(ctx->opcode));
4772     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4773     tcg_temp_free(t0);
4774     tcg_temp_free_i32(t1);
4775 }
4776 #endif
4777 
4778 /***                          Processor control                            ***/
4779 
4780 /* mcrxr */
4781 static void gen_mcrxr(DisasContext *ctx)
4782 {
4783     TCGv_i32 t0 = tcg_temp_new_i32();
4784     TCGv_i32 t1 = tcg_temp_new_i32();
4785     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4786 
4787     tcg_gen_trunc_tl_i32(t0, cpu_so);
4788     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4789     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4790     tcg_gen_shli_i32(t0, t0, 3);
4791     tcg_gen_shli_i32(t1, t1, 2);
4792     tcg_gen_shli_i32(dst, dst, 1);
4793     tcg_gen_or_i32(dst, dst, t0);
4794     tcg_gen_or_i32(dst, dst, t1);
4795     tcg_temp_free_i32(t0);
4796     tcg_temp_free_i32(t1);
4797 
4798     tcg_gen_movi_tl(cpu_so, 0);
4799     tcg_gen_movi_tl(cpu_ov, 0);
4800     tcg_gen_movi_tl(cpu_ca, 0);
4801 }
4802 
4803 #ifdef TARGET_PPC64
4804 /* mcrxrx */
4805 static void gen_mcrxrx(DisasContext *ctx)
4806 {
4807     TCGv t0 = tcg_temp_new();
4808     TCGv t1 = tcg_temp_new();
4809     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4810 
4811     /* copy OV and OV32 */
4812     tcg_gen_shli_tl(t0, cpu_ov, 1);
4813     tcg_gen_or_tl(t0, t0, cpu_ov32);
4814     tcg_gen_shli_tl(t0, t0, 2);
4815     /* copy CA and CA32 */
4816     tcg_gen_shli_tl(t1, cpu_ca, 1);
4817     tcg_gen_or_tl(t1, t1, cpu_ca32);
4818     tcg_gen_or_tl(t0, t0, t1);
4819     tcg_gen_trunc_tl_i32(dst, t0);
4820     tcg_temp_free(t0);
4821     tcg_temp_free(t1);
4822 }
4823 #endif
4824 
4825 /* mfcr mfocrf */
4826 static void gen_mfcr(DisasContext *ctx)
4827 {
4828     uint32_t crm, crn;
4829 
4830     if (likely(ctx->opcode & 0x00100000)) {
4831         crm = CRM(ctx->opcode);
4832         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4833             crn = ctz32(crm);
4834             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4835             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4836                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4837         }
4838     } else {
4839         TCGv_i32 t0 = tcg_temp_new_i32();
4840         tcg_gen_mov_i32(t0, cpu_crf[0]);
4841         tcg_gen_shli_i32(t0, t0, 4);
4842         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4843         tcg_gen_shli_i32(t0, t0, 4);
4844         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4845         tcg_gen_shli_i32(t0, t0, 4);
4846         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4847         tcg_gen_shli_i32(t0, t0, 4);
4848         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4849         tcg_gen_shli_i32(t0, t0, 4);
4850         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4851         tcg_gen_shli_i32(t0, t0, 4);
4852         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4853         tcg_gen_shli_i32(t0, t0, 4);
4854         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4855         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4856         tcg_temp_free_i32(t0);
4857     }
4858 }
4859 
4860 /* mfmsr */
4861 static void gen_mfmsr(DisasContext *ctx)
4862 {
4863     CHK_SV(ctx);
4864     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4865 }
4866 
4867 /* mfspr */
4868 static inline void gen_op_mfspr(DisasContext *ctx)
4869 {
4870     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4871     uint32_t sprn = SPR(ctx->opcode);
4872 
4873 #if defined(CONFIG_USER_ONLY)
4874     read_cb = ctx->spr_cb[sprn].uea_read;
4875 #else
4876     if (ctx->pr) {
4877         read_cb = ctx->spr_cb[sprn].uea_read;
4878     } else if (ctx->hv) {
4879         read_cb = ctx->spr_cb[sprn].hea_read;
4880     } else {
4881         read_cb = ctx->spr_cb[sprn].oea_read;
4882     }
4883 #endif
4884     if (likely(read_cb != NULL)) {
4885         if (likely(read_cb != SPR_NOACCESS)) {
4886             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4887         } else {
4888             /* Privilege exception */
4889             /*
4890              * This is a hack to avoid warnings when running Linux:
4891              * this OS breaks the PowerPC virtualisation model,
4892              * allowing userland application to read the PVR
4893              */
4894             if (sprn != SPR_PVR) {
4895                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4896                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4897                               ctx->cia);
4898             }
4899             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4900         }
4901     } else {
4902         /* ISA 2.07 defines these as no-ops */
4903         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4904             (sprn >= 808 && sprn <= 811)) {
4905             /* This is a nop */
4906             return;
4907         }
4908         /* Not defined */
4909         qemu_log_mask(LOG_GUEST_ERROR,
4910                       "Trying to read invalid spr %d (0x%03x) at "
4911                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4912 
4913         /*
4914          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4915          * generate a priv, a hv emu or a no-op
4916          */
4917         if (sprn & 0x10) {
4918             if (ctx->pr) {
4919                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4920             }
4921         } else {
4922             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4923                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4924             }
4925         }
4926     }
4927 }
4928 
4929 static void gen_mfspr(DisasContext *ctx)
4930 {
4931     gen_op_mfspr(ctx);
4932 }
4933 
4934 /* mftb */
4935 static void gen_mftb(DisasContext *ctx)
4936 {
4937     gen_op_mfspr(ctx);
4938 }
4939 
4940 /* mtcrf mtocrf*/
4941 static void gen_mtcrf(DisasContext *ctx)
4942 {
4943     uint32_t crm, crn;
4944 
4945     crm = CRM(ctx->opcode);
4946     if (likely((ctx->opcode & 0x00100000))) {
4947         if (crm && ((crm & (crm - 1)) == 0)) {
4948             TCGv_i32 temp = tcg_temp_new_i32();
4949             crn = ctz32(crm);
4950             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4951             tcg_gen_shri_i32(temp, temp, crn * 4);
4952             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4953             tcg_temp_free_i32(temp);
4954         }
4955     } else {
4956         TCGv_i32 temp = tcg_temp_new_i32();
4957         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4958         for (crn = 0 ; crn < 8 ; crn++) {
4959             if (crm & (1 << crn)) {
4960                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4961                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4962             }
4963         }
4964         tcg_temp_free_i32(temp);
4965     }
4966 }
4967 
4968 /* mtmsr */
4969 #if defined(TARGET_PPC64)
4970 static void gen_mtmsrd(DisasContext *ctx)
4971 {
4972     if (unlikely(!is_book3s_arch2x(ctx))) {
4973         gen_invalid(ctx);
4974         return;
4975     }
4976 
4977     CHK_SV(ctx);
4978 
4979 #if !defined(CONFIG_USER_ONLY)
4980     TCGv t0, t1;
4981     target_ulong mask;
4982 
4983     t0 = tcg_temp_new();
4984     t1 = tcg_temp_new();
4985 
4986     gen_icount_io_start(ctx);
4987 
4988     if (ctx->opcode & 0x00010000) {
4989         /* L=1 form only updates EE and RI */
4990         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4991     } else {
4992         /* mtmsrd does not alter HV, S, ME, or LE */
4993         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4994                  (1ULL << MSR_HV));
4995         /*
4996          * XXX: we need to update nip before the store if we enter
4997          *      power saving mode, we will exit the loop directly from
4998          *      ppc_store_msr
4999          */
5000         gen_update_nip(ctx, ctx->base.pc_next);
5001     }
5002 
5003     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
5004     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
5005     tcg_gen_or_tl(t0, t0, t1);
5006 
5007     gen_helper_store_msr(cpu_env, t0);
5008 
5009     /* Must stop the translation as machine state (may have) changed */
5010     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5011 
5012     tcg_temp_free(t0);
5013     tcg_temp_free(t1);
5014 #endif /* !defined(CONFIG_USER_ONLY) */
5015 }
5016 #endif /* defined(TARGET_PPC64) */
5017 
5018 static void gen_mtmsr(DisasContext *ctx)
5019 {
5020     CHK_SV(ctx);
5021 
5022 #if !defined(CONFIG_USER_ONLY)
5023     TCGv t0, t1;
5024     target_ulong mask = 0xFFFFFFFF;
5025 
5026     t0 = tcg_temp_new();
5027     t1 = tcg_temp_new();
5028 
5029     gen_icount_io_start(ctx);
5030     if (ctx->opcode & 0x00010000) {
5031         /* L=1 form only updates EE and RI */
5032         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
5033     } else {
5034         /* mtmsr does not alter S, ME, or LE */
5035         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
5036 
5037         /*
5038          * XXX: we need to update nip before the store if we enter
5039          *      power saving mode, we will exit the loop directly from
5040          *      ppc_store_msr
5041          */
5042         gen_update_nip(ctx, ctx->base.pc_next);
5043     }
5044 
5045     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
5046     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
5047     tcg_gen_or_tl(t0, t0, t1);
5048 
5049     gen_helper_store_msr(cpu_env, t0);
5050 
5051     /* Must stop the translation as machine state (may have) changed */
5052     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5053 
5054     tcg_temp_free(t0);
5055     tcg_temp_free(t1);
5056 #endif
5057 }
5058 
5059 /* mtspr */
5060 static void gen_mtspr(DisasContext *ctx)
5061 {
5062     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
5063     uint32_t sprn = SPR(ctx->opcode);
5064 
5065 #if defined(CONFIG_USER_ONLY)
5066     write_cb = ctx->spr_cb[sprn].uea_write;
5067 #else
5068     if (ctx->pr) {
5069         write_cb = ctx->spr_cb[sprn].uea_write;
5070     } else if (ctx->hv) {
5071         write_cb = ctx->spr_cb[sprn].hea_write;
5072     } else {
5073         write_cb = ctx->spr_cb[sprn].oea_write;
5074     }
5075 #endif
5076     if (likely(write_cb != NULL)) {
5077         if (likely(write_cb != SPR_NOACCESS)) {
5078             (*write_cb)(ctx, sprn, rS(ctx->opcode));
5079         } else {
5080             /* Privilege exception */
5081             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
5082                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5083                           ctx->cia);
5084             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5085         }
5086     } else {
5087         /* ISA 2.07 defines these as no-ops */
5088         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5089             (sprn >= 808 && sprn <= 811)) {
5090             /* This is a nop */
5091             return;
5092         }
5093 
5094         /* Not defined */
5095         qemu_log_mask(LOG_GUEST_ERROR,
5096                       "Trying to write invalid spr %d (0x%03x) at "
5097                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5098 
5099 
5100         /*
5101          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5102          * generate a priv, a hv emu or a no-op
5103          */
5104         if (sprn & 0x10) {
5105             if (ctx->pr) {
5106                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5107             }
5108         } else {
5109             if (ctx->pr || sprn == 0) {
5110                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5111             }
5112         }
5113     }
5114 }
5115 
5116 #if defined(TARGET_PPC64)
5117 /* setb */
5118 static void gen_setb(DisasContext *ctx)
5119 {
5120     TCGv_i32 t0 = tcg_temp_new_i32();
5121     TCGv_i32 t8 = tcg_constant_i32(8);
5122     TCGv_i32 tm1 = tcg_constant_i32(-1);
5123     int crf = crfS(ctx->opcode);
5124 
5125     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5126     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5127     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5128 
5129     tcg_temp_free_i32(t0);
5130 }
5131 #endif
5132 
5133 /***                         Cache management                              ***/
5134 
5135 /* dcbf */
5136 static void gen_dcbf(DisasContext *ctx)
5137 {
5138     /* XXX: specification says this is treated as a load by the MMU */
5139     TCGv t0;
5140     gen_set_access_type(ctx, ACCESS_CACHE);
5141     t0 = tcg_temp_new();
5142     gen_addr_reg_index(ctx, t0);
5143     gen_qemu_ld8u(ctx, t0, t0);
5144     tcg_temp_free(t0);
5145 }
5146 
5147 /* dcbfep (external PID dcbf) */
5148 static void gen_dcbfep(DisasContext *ctx)
5149 {
5150     /* XXX: specification says this is treated as a load by the MMU */
5151     TCGv t0;
5152     CHK_SV(ctx);
5153     gen_set_access_type(ctx, ACCESS_CACHE);
5154     t0 = tcg_temp_new();
5155     gen_addr_reg_index(ctx, t0);
5156     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5157     tcg_temp_free(t0);
5158 }
5159 
5160 /* dcbi (Supervisor only) */
5161 static void gen_dcbi(DisasContext *ctx)
5162 {
5163 #if defined(CONFIG_USER_ONLY)
5164     GEN_PRIV(ctx);
5165 #else
5166     TCGv EA, val;
5167 
5168     CHK_SV(ctx);
5169     EA = tcg_temp_new();
5170     gen_set_access_type(ctx, ACCESS_CACHE);
5171     gen_addr_reg_index(ctx, EA);
5172     val = tcg_temp_new();
5173     /* XXX: specification says this should be treated as a store by the MMU */
5174     gen_qemu_ld8u(ctx, val, EA);
5175     gen_qemu_st8(ctx, val, EA);
5176     tcg_temp_free(val);
5177     tcg_temp_free(EA);
5178 #endif /* defined(CONFIG_USER_ONLY) */
5179 }
5180 
5181 /* dcdst */
5182 static void gen_dcbst(DisasContext *ctx)
5183 {
5184     /* XXX: specification say this is treated as a load by the MMU */
5185     TCGv t0;
5186     gen_set_access_type(ctx, ACCESS_CACHE);
5187     t0 = tcg_temp_new();
5188     gen_addr_reg_index(ctx, t0);
5189     gen_qemu_ld8u(ctx, t0, t0);
5190     tcg_temp_free(t0);
5191 }
5192 
5193 /* dcbstep (dcbstep External PID version) */
5194 static void gen_dcbstep(DisasContext *ctx)
5195 {
5196     /* XXX: specification say this is treated as a load by the MMU */
5197     TCGv t0;
5198     gen_set_access_type(ctx, ACCESS_CACHE);
5199     t0 = tcg_temp_new();
5200     gen_addr_reg_index(ctx, t0);
5201     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5202     tcg_temp_free(t0);
5203 }
5204 
5205 /* dcbt */
5206 static void gen_dcbt(DisasContext *ctx)
5207 {
5208     /*
5209      * interpreted as no-op
5210      * XXX: specification say this is treated as a load by the MMU but
5211      *      does not generate any exception
5212      */
5213 }
5214 
5215 /* dcbtep */
5216 static void gen_dcbtep(DisasContext *ctx)
5217 {
5218     /*
5219      * interpreted as no-op
5220      * XXX: specification say this is treated as a load by the MMU but
5221      *      does not generate any exception
5222      */
5223 }
5224 
5225 /* dcbtst */
5226 static void gen_dcbtst(DisasContext *ctx)
5227 {
5228     /*
5229      * interpreted as no-op
5230      * XXX: specification say this is treated as a load by the MMU but
5231      *      does not generate any exception
5232      */
5233 }
5234 
5235 /* dcbtstep */
5236 static void gen_dcbtstep(DisasContext *ctx)
5237 {
5238     /*
5239      * interpreted as no-op
5240      * XXX: specification say this is treated as a load by the MMU but
5241      *      does not generate any exception
5242      */
5243 }
5244 
5245 /* dcbtls */
5246 static void gen_dcbtls(DisasContext *ctx)
5247 {
5248     /* Always fails locking the cache */
5249     TCGv t0 = tcg_temp_new();
5250     gen_load_spr(t0, SPR_Exxx_L1CSR0);
5251     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5252     gen_store_spr(SPR_Exxx_L1CSR0, t0);
5253     tcg_temp_free(t0);
5254 }
5255 
5256 /* dcbz */
5257 static void gen_dcbz(DisasContext *ctx)
5258 {
5259     TCGv tcgv_addr;
5260     TCGv_i32 tcgv_op;
5261 
5262     gen_set_access_type(ctx, ACCESS_CACHE);
5263     tcgv_addr = tcg_temp_new();
5264     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5265     gen_addr_reg_index(ctx, tcgv_addr);
5266     gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5267     tcg_temp_free(tcgv_addr);
5268     tcg_temp_free_i32(tcgv_op);
5269 }
5270 
5271 /* dcbzep */
5272 static void gen_dcbzep(DisasContext *ctx)
5273 {
5274     TCGv tcgv_addr;
5275     TCGv_i32 tcgv_op;
5276 
5277     gen_set_access_type(ctx, ACCESS_CACHE);
5278     tcgv_addr = tcg_temp_new();
5279     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5280     gen_addr_reg_index(ctx, tcgv_addr);
5281     gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5282     tcg_temp_free(tcgv_addr);
5283     tcg_temp_free_i32(tcgv_op);
5284 }
5285 
5286 /* dst / dstt */
5287 static void gen_dst(DisasContext *ctx)
5288 {
5289     if (rA(ctx->opcode) == 0) {
5290         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5291     } else {
5292         /* interpreted as no-op */
5293     }
5294 }
5295 
5296 /* dstst /dststt */
5297 static void gen_dstst(DisasContext *ctx)
5298 {
5299     if (rA(ctx->opcode) == 0) {
5300         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5301     } else {
5302         /* interpreted as no-op */
5303     }
5304 
5305 }
5306 
5307 /* dss / dssall */
5308 static void gen_dss(DisasContext *ctx)
5309 {
5310     /* interpreted as no-op */
5311 }
5312 
5313 /* icbi */
5314 static void gen_icbi(DisasContext *ctx)
5315 {
5316     TCGv t0;
5317     gen_set_access_type(ctx, ACCESS_CACHE);
5318     t0 = tcg_temp_new();
5319     gen_addr_reg_index(ctx, t0);
5320     gen_helper_icbi(cpu_env, t0);
5321     tcg_temp_free(t0);
5322 }
5323 
5324 /* icbiep */
5325 static void gen_icbiep(DisasContext *ctx)
5326 {
5327     TCGv t0;
5328     gen_set_access_type(ctx, ACCESS_CACHE);
5329     t0 = tcg_temp_new();
5330     gen_addr_reg_index(ctx, t0);
5331     gen_helper_icbiep(cpu_env, t0);
5332     tcg_temp_free(t0);
5333 }
5334 
5335 /* Optional: */
5336 /* dcba */
5337 static void gen_dcba(DisasContext *ctx)
5338 {
5339     /*
5340      * interpreted as no-op
5341      * XXX: specification say this is treated as a store by the MMU
5342      *      but does not generate any exception
5343      */
5344 }
5345 
5346 /***                    Segment register manipulation                      ***/
5347 /* Supervisor only: */
5348 
5349 /* mfsr */
5350 static void gen_mfsr(DisasContext *ctx)
5351 {
5352 #if defined(CONFIG_USER_ONLY)
5353     GEN_PRIV(ctx);
5354 #else
5355     TCGv t0;
5356 
5357     CHK_SV(ctx);
5358     t0 = tcg_const_tl(SR(ctx->opcode));
5359     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5360     tcg_temp_free(t0);
5361 #endif /* defined(CONFIG_USER_ONLY) */
5362 }
5363 
5364 /* mfsrin */
5365 static void gen_mfsrin(DisasContext *ctx)
5366 {
5367 #if defined(CONFIG_USER_ONLY)
5368     GEN_PRIV(ctx);
5369 #else
5370     TCGv t0;
5371 
5372     CHK_SV(ctx);
5373     t0 = tcg_temp_new();
5374     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5375     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5376     tcg_temp_free(t0);
5377 #endif /* defined(CONFIG_USER_ONLY) */
5378 }
5379 
5380 /* mtsr */
5381 static void gen_mtsr(DisasContext *ctx)
5382 {
5383 #if defined(CONFIG_USER_ONLY)
5384     GEN_PRIV(ctx);
5385 #else
5386     TCGv t0;
5387 
5388     CHK_SV(ctx);
5389     t0 = tcg_const_tl(SR(ctx->opcode));
5390     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5391     tcg_temp_free(t0);
5392 #endif /* defined(CONFIG_USER_ONLY) */
5393 }
5394 
5395 /* mtsrin */
5396 static void gen_mtsrin(DisasContext *ctx)
5397 {
5398 #if defined(CONFIG_USER_ONLY)
5399     GEN_PRIV(ctx);
5400 #else
5401     TCGv t0;
5402     CHK_SV(ctx);
5403 
5404     t0 = tcg_temp_new();
5405     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5406     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5407     tcg_temp_free(t0);
5408 #endif /* defined(CONFIG_USER_ONLY) */
5409 }
5410 
5411 #if defined(TARGET_PPC64)
5412 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5413 
5414 /* mfsr */
5415 static void gen_mfsr_64b(DisasContext *ctx)
5416 {
5417 #if defined(CONFIG_USER_ONLY)
5418     GEN_PRIV(ctx);
5419 #else
5420     TCGv t0;
5421 
5422     CHK_SV(ctx);
5423     t0 = tcg_const_tl(SR(ctx->opcode));
5424     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5425     tcg_temp_free(t0);
5426 #endif /* defined(CONFIG_USER_ONLY) */
5427 }
5428 
5429 /* mfsrin */
5430 static void gen_mfsrin_64b(DisasContext *ctx)
5431 {
5432 #if defined(CONFIG_USER_ONLY)
5433     GEN_PRIV(ctx);
5434 #else
5435     TCGv t0;
5436 
5437     CHK_SV(ctx);
5438     t0 = tcg_temp_new();
5439     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5440     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5441     tcg_temp_free(t0);
5442 #endif /* defined(CONFIG_USER_ONLY) */
5443 }
5444 
5445 /* mtsr */
5446 static void gen_mtsr_64b(DisasContext *ctx)
5447 {
5448 #if defined(CONFIG_USER_ONLY)
5449     GEN_PRIV(ctx);
5450 #else
5451     TCGv t0;
5452 
5453     CHK_SV(ctx);
5454     t0 = tcg_const_tl(SR(ctx->opcode));
5455     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5456     tcg_temp_free(t0);
5457 #endif /* defined(CONFIG_USER_ONLY) */
5458 }
5459 
5460 /* mtsrin */
5461 static void gen_mtsrin_64b(DisasContext *ctx)
5462 {
5463 #if defined(CONFIG_USER_ONLY)
5464     GEN_PRIV(ctx);
5465 #else
5466     TCGv t0;
5467 
5468     CHK_SV(ctx);
5469     t0 = tcg_temp_new();
5470     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5471     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5472     tcg_temp_free(t0);
5473 #endif /* defined(CONFIG_USER_ONLY) */
5474 }
5475 
5476 #endif /* defined(TARGET_PPC64) */
5477 
5478 /***                      Lookaside buffer management                      ***/
5479 /* Optional & supervisor only: */
5480 
5481 /* tlbia */
5482 static void gen_tlbia(DisasContext *ctx)
5483 {
5484 #if defined(CONFIG_USER_ONLY)
5485     GEN_PRIV(ctx);
5486 #else
5487     CHK_HV(ctx);
5488 
5489     gen_helper_tlbia(cpu_env);
5490 #endif  /* defined(CONFIG_USER_ONLY) */
5491 }
5492 
5493 /* tlbsync */
5494 static void gen_tlbsync(DisasContext *ctx)
5495 {
5496 #if defined(CONFIG_USER_ONLY)
5497     GEN_PRIV(ctx);
5498 #else
5499 
5500     if (ctx->gtse) {
5501         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
5502     } else {
5503         CHK_HV(ctx); /* Else hypervisor privileged */
5504     }
5505 
5506     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5507     if (ctx->insns_flags & PPC_BOOKE) {
5508         gen_check_tlb_flush(ctx, true);
5509     }
5510 #endif /* defined(CONFIG_USER_ONLY) */
5511 }
5512 
5513 /***                              External control                         ***/
5514 /* Optional: */
5515 
5516 /* eciwx */
5517 static void gen_eciwx(DisasContext *ctx)
5518 {
5519     TCGv t0;
5520     /* Should check EAR[E] ! */
5521     gen_set_access_type(ctx, ACCESS_EXT);
5522     t0 = tcg_temp_new();
5523     gen_addr_reg_index(ctx, t0);
5524     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5525                        DEF_MEMOP(MO_UL | MO_ALIGN));
5526     tcg_temp_free(t0);
5527 }
5528 
5529 /* ecowx */
5530 static void gen_ecowx(DisasContext *ctx)
5531 {
5532     TCGv t0;
5533     /* Should check EAR[E] ! */
5534     gen_set_access_type(ctx, ACCESS_EXT);
5535     t0 = tcg_temp_new();
5536     gen_addr_reg_index(ctx, t0);
5537     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5538                        DEF_MEMOP(MO_UL | MO_ALIGN));
5539     tcg_temp_free(t0);
5540 }
5541 
5542 /* 602 - 603 - G2 TLB management */
5543 
5544 /* tlbld */
5545 static void gen_tlbld_6xx(DisasContext *ctx)
5546 {
5547 #if defined(CONFIG_USER_ONLY)
5548     GEN_PRIV(ctx);
5549 #else
5550     CHK_SV(ctx);
5551     gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5552 #endif /* defined(CONFIG_USER_ONLY) */
5553 }
5554 
5555 /* tlbli */
5556 static void gen_tlbli_6xx(DisasContext *ctx)
5557 {
5558 #if defined(CONFIG_USER_ONLY)
5559     GEN_PRIV(ctx);
5560 #else
5561     CHK_SV(ctx);
5562     gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5563 #endif /* defined(CONFIG_USER_ONLY) */
5564 }
5565 
5566 /* BookE specific instructions */
5567 
5568 /* XXX: not implemented on 440 ? */
5569 static void gen_mfapidi(DisasContext *ctx)
5570 {
5571     /* XXX: TODO */
5572     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5573 }
5574 
5575 /* XXX: not implemented on 440 ? */
5576 static void gen_tlbiva(DisasContext *ctx)
5577 {
5578 #if defined(CONFIG_USER_ONLY)
5579     GEN_PRIV(ctx);
5580 #else
5581     TCGv t0;
5582 
5583     CHK_SV(ctx);
5584     t0 = tcg_temp_new();
5585     gen_addr_reg_index(ctx, t0);
5586     gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5587     tcg_temp_free(t0);
5588 #endif /* defined(CONFIG_USER_ONLY) */
5589 }
5590 
5591 /* All 405 MAC instructions are translated here */
5592 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5593                                         int ra, int rb, int rt, int Rc)
5594 {
5595     TCGv t0, t1;
5596 
5597     t0 = tcg_temp_local_new();
5598     t1 = tcg_temp_local_new();
5599 
5600     switch (opc3 & 0x0D) {
5601     case 0x05:
5602         /* macchw    - macchw.    - macchwo   - macchwo.   */
5603         /* macchws   - macchws.   - macchwso  - macchwso.  */
5604         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
5605         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
5606         /* mulchw - mulchw. */
5607         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5608         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5609         tcg_gen_ext16s_tl(t1, t1);
5610         break;
5611     case 0x04:
5612         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
5613         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
5614         /* mulchwu - mulchwu. */
5615         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5616         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5617         tcg_gen_ext16u_tl(t1, t1);
5618         break;
5619     case 0x01:
5620         /* machhw    - machhw.    - machhwo   - machhwo.   */
5621         /* machhws   - machhws.   - machhwso  - machhwso.  */
5622         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
5623         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
5624         /* mulhhw - mulhhw. */
5625         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5626         tcg_gen_ext16s_tl(t0, t0);
5627         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5628         tcg_gen_ext16s_tl(t1, t1);
5629         break;
5630     case 0x00:
5631         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
5632         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
5633         /* mulhhwu - mulhhwu. */
5634         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5635         tcg_gen_ext16u_tl(t0, t0);
5636         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5637         tcg_gen_ext16u_tl(t1, t1);
5638         break;
5639     case 0x0D:
5640         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
5641         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
5642         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
5643         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
5644         /* mullhw - mullhw. */
5645         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5646         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5647         break;
5648     case 0x0C:
5649         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
5650         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
5651         /* mullhwu - mullhwu. */
5652         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5653         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5654         break;
5655     }
5656     if (opc2 & 0x04) {
5657         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5658         tcg_gen_mul_tl(t1, t0, t1);
5659         if (opc2 & 0x02) {
5660             /* nmultiply-and-accumulate (0x0E) */
5661             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5662         } else {
5663             /* multiply-and-accumulate (0x0C) */
5664             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5665         }
5666 
5667         if (opc3 & 0x12) {
5668             /* Check overflow and/or saturate */
5669             TCGLabel *l1 = gen_new_label();
5670 
5671             if (opc3 & 0x10) {
5672                 /* Start with XER OV disabled, the most likely case */
5673                 tcg_gen_movi_tl(cpu_ov, 0);
5674             }
5675             if (opc3 & 0x01) {
5676                 /* Signed */
5677                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5678                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5679                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5680                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5681                 if (opc3 & 0x02) {
5682                     /* Saturate */
5683                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5684                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5685                 }
5686             } else {
5687                 /* Unsigned */
5688                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5689                 if (opc3 & 0x02) {
5690                     /* Saturate */
5691                     tcg_gen_movi_tl(t0, UINT32_MAX);
5692                 }
5693             }
5694             if (opc3 & 0x10) {
5695                 /* Check overflow */
5696                 tcg_gen_movi_tl(cpu_ov, 1);
5697                 tcg_gen_movi_tl(cpu_so, 1);
5698             }
5699             gen_set_label(l1);
5700             tcg_gen_mov_tl(cpu_gpr[rt], t0);
5701         }
5702     } else {
5703         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5704     }
5705     tcg_temp_free(t0);
5706     tcg_temp_free(t1);
5707     if (unlikely(Rc) != 0) {
5708         /* Update Rc0 */
5709         gen_set_Rc0(ctx, cpu_gpr[rt]);
5710     }
5711 }
5712 
5713 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
5714 static void glue(gen_, name)(DisasContext *ctx)                               \
5715 {                                                                             \
5716     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
5717                          rD(ctx->opcode), Rc(ctx->opcode));                   \
5718 }
5719 
5720 /* macchw    - macchw.    */
5721 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5722 /* macchwo   - macchwo.   */
5723 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5724 /* macchws   - macchws.   */
5725 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5726 /* macchwso  - macchwso.  */
5727 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5728 /* macchwsu  - macchwsu.  */
5729 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5730 /* macchwsuo - macchwsuo. */
5731 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5732 /* macchwu   - macchwu.   */
5733 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5734 /* macchwuo  - macchwuo.  */
5735 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5736 /* machhw    - machhw.    */
5737 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5738 /* machhwo   - machhwo.   */
5739 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5740 /* machhws   - machhws.   */
5741 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5742 /* machhwso  - machhwso.  */
5743 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5744 /* machhwsu  - machhwsu.  */
5745 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5746 /* machhwsuo - machhwsuo. */
5747 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5748 /* machhwu   - machhwu.   */
5749 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5750 /* machhwuo  - machhwuo.  */
5751 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5752 /* maclhw    - maclhw.    */
5753 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5754 /* maclhwo   - maclhwo.   */
5755 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5756 /* maclhws   - maclhws.   */
5757 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5758 /* maclhwso  - maclhwso.  */
5759 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5760 /* maclhwu   - maclhwu.   */
5761 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5762 /* maclhwuo  - maclhwuo.  */
5763 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5764 /* maclhwsu  - maclhwsu.  */
5765 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5766 /* maclhwsuo - maclhwsuo. */
5767 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5768 /* nmacchw   - nmacchw.   */
5769 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5770 /* nmacchwo  - nmacchwo.  */
5771 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5772 /* nmacchws  - nmacchws.  */
5773 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5774 /* nmacchwso - nmacchwso. */
5775 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5776 /* nmachhw   - nmachhw.   */
5777 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5778 /* nmachhwo  - nmachhwo.  */
5779 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5780 /* nmachhws  - nmachhws.  */
5781 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5782 /* nmachhwso - nmachhwso. */
5783 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5784 /* nmaclhw   - nmaclhw.   */
5785 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5786 /* nmaclhwo  - nmaclhwo.  */
5787 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5788 /* nmaclhws  - nmaclhws.  */
5789 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5790 /* nmaclhwso - nmaclhwso. */
5791 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5792 
5793 /* mulchw  - mulchw.  */
5794 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5795 /* mulchwu - mulchwu. */
5796 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5797 /* mulhhw  - mulhhw.  */
5798 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5799 /* mulhhwu - mulhhwu. */
5800 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5801 /* mullhw  - mullhw.  */
5802 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5803 /* mullhwu - mullhwu. */
5804 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5805 
5806 /* mfdcr */
5807 static void gen_mfdcr(DisasContext *ctx)
5808 {
5809 #if defined(CONFIG_USER_ONLY)
5810     GEN_PRIV(ctx);
5811 #else
5812     TCGv dcrn;
5813 
5814     CHK_SV(ctx);
5815     dcrn = tcg_const_tl(SPR(ctx->opcode));
5816     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5817     tcg_temp_free(dcrn);
5818 #endif /* defined(CONFIG_USER_ONLY) */
5819 }
5820 
5821 /* mtdcr */
5822 static void gen_mtdcr(DisasContext *ctx)
5823 {
5824 #if defined(CONFIG_USER_ONLY)
5825     GEN_PRIV(ctx);
5826 #else
5827     TCGv dcrn;
5828 
5829     CHK_SV(ctx);
5830     dcrn = tcg_const_tl(SPR(ctx->opcode));
5831     gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5832     tcg_temp_free(dcrn);
5833 #endif /* defined(CONFIG_USER_ONLY) */
5834 }
5835 
5836 /* mfdcrx */
5837 /* XXX: not implemented on 440 ? */
5838 static void gen_mfdcrx(DisasContext *ctx)
5839 {
5840 #if defined(CONFIG_USER_ONLY)
5841     GEN_PRIV(ctx);
5842 #else
5843     CHK_SV(ctx);
5844     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5845                         cpu_gpr[rA(ctx->opcode)]);
5846     /* Note: Rc update flag set leads to undefined state of Rc0 */
5847 #endif /* defined(CONFIG_USER_ONLY) */
5848 }
5849 
5850 /* mtdcrx */
5851 /* XXX: not implemented on 440 ? */
5852 static void gen_mtdcrx(DisasContext *ctx)
5853 {
5854 #if defined(CONFIG_USER_ONLY)
5855     GEN_PRIV(ctx);
5856 #else
5857     CHK_SV(ctx);
5858     gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5859                          cpu_gpr[rS(ctx->opcode)]);
5860     /* Note: Rc update flag set leads to undefined state of Rc0 */
5861 #endif /* defined(CONFIG_USER_ONLY) */
5862 }
5863 
5864 /* dccci */
5865 static void gen_dccci(DisasContext *ctx)
5866 {
5867     CHK_SV(ctx);
5868     /* interpreted as no-op */
5869 }
5870 
5871 /* dcread */
5872 static void gen_dcread(DisasContext *ctx)
5873 {
5874 #if defined(CONFIG_USER_ONLY)
5875     GEN_PRIV(ctx);
5876 #else
5877     TCGv EA, val;
5878 
5879     CHK_SV(ctx);
5880     gen_set_access_type(ctx, ACCESS_CACHE);
5881     EA = tcg_temp_new();
5882     gen_addr_reg_index(ctx, EA);
5883     val = tcg_temp_new();
5884     gen_qemu_ld32u(ctx, val, EA);
5885     tcg_temp_free(val);
5886     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5887     tcg_temp_free(EA);
5888 #endif /* defined(CONFIG_USER_ONLY) */
5889 }
5890 
5891 /* icbt */
5892 static void gen_icbt_40x(DisasContext *ctx)
5893 {
5894     /*
5895      * interpreted as no-op
5896      * XXX: specification say this is treated as a load by the MMU but
5897      *      does not generate any exception
5898      */
5899 }
5900 
5901 /* iccci */
5902 static void gen_iccci(DisasContext *ctx)
5903 {
5904     CHK_SV(ctx);
5905     /* interpreted as no-op */
5906 }
5907 
5908 /* icread */
5909 static void gen_icread(DisasContext *ctx)
5910 {
5911     CHK_SV(ctx);
5912     /* interpreted as no-op */
5913 }
5914 
5915 /* rfci (supervisor only) */
5916 static void gen_rfci_40x(DisasContext *ctx)
5917 {
5918 #if defined(CONFIG_USER_ONLY)
5919     GEN_PRIV(ctx);
5920 #else
5921     CHK_SV(ctx);
5922     /* Restore CPU state */
5923     gen_helper_40x_rfci(cpu_env);
5924     ctx->base.is_jmp = DISAS_EXIT;
5925 #endif /* defined(CONFIG_USER_ONLY) */
5926 }
5927 
5928 static void gen_rfci(DisasContext *ctx)
5929 {
5930 #if defined(CONFIG_USER_ONLY)
5931     GEN_PRIV(ctx);
5932 #else
5933     CHK_SV(ctx);
5934     /* Restore CPU state */
5935     gen_helper_rfci(cpu_env);
5936     ctx->base.is_jmp = DISAS_EXIT;
5937 #endif /* defined(CONFIG_USER_ONLY) */
5938 }
5939 
5940 /* BookE specific */
5941 
5942 /* XXX: not implemented on 440 ? */
5943 static void gen_rfdi(DisasContext *ctx)
5944 {
5945 #if defined(CONFIG_USER_ONLY)
5946     GEN_PRIV(ctx);
5947 #else
5948     CHK_SV(ctx);
5949     /* Restore CPU state */
5950     gen_helper_rfdi(cpu_env);
5951     ctx->base.is_jmp = DISAS_EXIT;
5952 #endif /* defined(CONFIG_USER_ONLY) */
5953 }
5954 
5955 /* XXX: not implemented on 440 ? */
5956 static void gen_rfmci(DisasContext *ctx)
5957 {
5958 #if defined(CONFIG_USER_ONLY)
5959     GEN_PRIV(ctx);
5960 #else
5961     CHK_SV(ctx);
5962     /* Restore CPU state */
5963     gen_helper_rfmci(cpu_env);
5964     ctx->base.is_jmp = DISAS_EXIT;
5965 #endif /* defined(CONFIG_USER_ONLY) */
5966 }
5967 
5968 /* TLB management - PowerPC 405 implementation */
5969 
5970 /* tlbre */
5971 static void gen_tlbre_40x(DisasContext *ctx)
5972 {
5973 #if defined(CONFIG_USER_ONLY)
5974     GEN_PRIV(ctx);
5975 #else
5976     CHK_SV(ctx);
5977     switch (rB(ctx->opcode)) {
5978     case 0:
5979         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5980                                 cpu_gpr[rA(ctx->opcode)]);
5981         break;
5982     case 1:
5983         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5984                                 cpu_gpr[rA(ctx->opcode)]);
5985         break;
5986     default:
5987         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5988         break;
5989     }
5990 #endif /* defined(CONFIG_USER_ONLY) */
5991 }
5992 
5993 /* tlbsx - tlbsx. */
5994 static void gen_tlbsx_40x(DisasContext *ctx)
5995 {
5996 #if defined(CONFIG_USER_ONLY)
5997     GEN_PRIV(ctx);
5998 #else
5999     TCGv t0;
6000 
6001     CHK_SV(ctx);
6002     t0 = tcg_temp_new();
6003     gen_addr_reg_index(ctx, t0);
6004     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6005     tcg_temp_free(t0);
6006     if (Rc(ctx->opcode)) {
6007         TCGLabel *l1 = gen_new_label();
6008         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6009         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6010         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6011         gen_set_label(l1);
6012     }
6013 #endif /* defined(CONFIG_USER_ONLY) */
6014 }
6015 
6016 /* tlbwe */
6017 static void gen_tlbwe_40x(DisasContext *ctx)
6018 {
6019 #if defined(CONFIG_USER_ONLY)
6020     GEN_PRIV(ctx);
6021 #else
6022     CHK_SV(ctx);
6023 
6024     switch (rB(ctx->opcode)) {
6025     case 0:
6026         gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
6027                                 cpu_gpr[rS(ctx->opcode)]);
6028         break;
6029     case 1:
6030         gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
6031                                 cpu_gpr[rS(ctx->opcode)]);
6032         break;
6033     default:
6034         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6035         break;
6036     }
6037 #endif /* defined(CONFIG_USER_ONLY) */
6038 }
6039 
6040 /* TLB management - PowerPC 440 implementation */
6041 
6042 /* tlbre */
6043 static void gen_tlbre_440(DisasContext *ctx)
6044 {
6045 #if defined(CONFIG_USER_ONLY)
6046     GEN_PRIV(ctx);
6047 #else
6048     CHK_SV(ctx);
6049 
6050     switch (rB(ctx->opcode)) {
6051     case 0:
6052     case 1:
6053     case 2:
6054         {
6055             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6056             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
6057                                  t0, cpu_gpr[rA(ctx->opcode)]);
6058             tcg_temp_free_i32(t0);
6059         }
6060         break;
6061     default:
6062         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6063         break;
6064     }
6065 #endif /* defined(CONFIG_USER_ONLY) */
6066 }
6067 
6068 /* tlbsx - tlbsx. */
6069 static void gen_tlbsx_440(DisasContext *ctx)
6070 {
6071 #if defined(CONFIG_USER_ONLY)
6072     GEN_PRIV(ctx);
6073 #else
6074     TCGv t0;
6075 
6076     CHK_SV(ctx);
6077     t0 = tcg_temp_new();
6078     gen_addr_reg_index(ctx, t0);
6079     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6080     tcg_temp_free(t0);
6081     if (Rc(ctx->opcode)) {
6082         TCGLabel *l1 = gen_new_label();
6083         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6084         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6085         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6086         gen_set_label(l1);
6087     }
6088 #endif /* defined(CONFIG_USER_ONLY) */
6089 }
6090 
6091 /* tlbwe */
6092 static void gen_tlbwe_440(DisasContext *ctx)
6093 {
6094 #if defined(CONFIG_USER_ONLY)
6095     GEN_PRIV(ctx);
6096 #else
6097     CHK_SV(ctx);
6098     switch (rB(ctx->opcode)) {
6099     case 0:
6100     case 1:
6101     case 2:
6102         {
6103             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6104             gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6105                                  cpu_gpr[rS(ctx->opcode)]);
6106             tcg_temp_free_i32(t0);
6107         }
6108         break;
6109     default:
6110         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6111         break;
6112     }
6113 #endif /* defined(CONFIG_USER_ONLY) */
6114 }
6115 
6116 /* TLB management - PowerPC BookE 2.06 implementation */
6117 
6118 /* tlbre */
6119 static void gen_tlbre_booke206(DisasContext *ctx)
6120 {
6121  #if defined(CONFIG_USER_ONLY)
6122     GEN_PRIV(ctx);
6123 #else
6124    CHK_SV(ctx);
6125     gen_helper_booke206_tlbre(cpu_env);
6126 #endif /* defined(CONFIG_USER_ONLY) */
6127 }
6128 
6129 /* tlbsx - tlbsx. */
6130 static void gen_tlbsx_booke206(DisasContext *ctx)
6131 {
6132 #if defined(CONFIG_USER_ONLY)
6133     GEN_PRIV(ctx);
6134 #else
6135     TCGv t0;
6136 
6137     CHK_SV(ctx);
6138     if (rA(ctx->opcode)) {
6139         t0 = tcg_temp_new();
6140         tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6141     } else {
6142         t0 = tcg_const_tl(0);
6143     }
6144 
6145     tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6146     gen_helper_booke206_tlbsx(cpu_env, t0);
6147     tcg_temp_free(t0);
6148 #endif /* defined(CONFIG_USER_ONLY) */
6149 }
6150 
6151 /* tlbwe */
6152 static void gen_tlbwe_booke206(DisasContext *ctx)
6153 {
6154 #if defined(CONFIG_USER_ONLY)
6155     GEN_PRIV(ctx);
6156 #else
6157     CHK_SV(ctx);
6158     gen_helper_booke206_tlbwe(cpu_env);
6159 #endif /* defined(CONFIG_USER_ONLY) */
6160 }
6161 
6162 static void gen_tlbivax_booke206(DisasContext *ctx)
6163 {
6164 #if defined(CONFIG_USER_ONLY)
6165     GEN_PRIV(ctx);
6166 #else
6167     TCGv t0;
6168 
6169     CHK_SV(ctx);
6170     t0 = tcg_temp_new();
6171     gen_addr_reg_index(ctx, t0);
6172     gen_helper_booke206_tlbivax(cpu_env, t0);
6173     tcg_temp_free(t0);
6174 #endif /* defined(CONFIG_USER_ONLY) */
6175 }
6176 
6177 static void gen_tlbilx_booke206(DisasContext *ctx)
6178 {
6179 #if defined(CONFIG_USER_ONLY)
6180     GEN_PRIV(ctx);
6181 #else
6182     TCGv t0;
6183 
6184     CHK_SV(ctx);
6185     t0 = tcg_temp_new();
6186     gen_addr_reg_index(ctx, t0);
6187 
6188     switch ((ctx->opcode >> 21) & 0x3) {
6189     case 0:
6190         gen_helper_booke206_tlbilx0(cpu_env, t0);
6191         break;
6192     case 1:
6193         gen_helper_booke206_tlbilx1(cpu_env, t0);
6194         break;
6195     case 3:
6196         gen_helper_booke206_tlbilx3(cpu_env, t0);
6197         break;
6198     default:
6199         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6200         break;
6201     }
6202 
6203     tcg_temp_free(t0);
6204 #endif /* defined(CONFIG_USER_ONLY) */
6205 }
6206 
6207 /* wrtee */
6208 static void gen_wrtee(DisasContext *ctx)
6209 {
6210 #if defined(CONFIG_USER_ONLY)
6211     GEN_PRIV(ctx);
6212 #else
6213     TCGv t0;
6214 
6215     CHK_SV(ctx);
6216     t0 = tcg_temp_new();
6217     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6218     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6219     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6220     gen_ppc_maybe_interrupt(ctx);
6221     tcg_temp_free(t0);
6222     /*
6223      * Stop translation to have a chance to raise an exception if we
6224      * just set msr_ee to 1
6225      */
6226     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6227 #endif /* defined(CONFIG_USER_ONLY) */
6228 }
6229 
6230 /* wrteei */
6231 static void gen_wrteei(DisasContext *ctx)
6232 {
6233 #if defined(CONFIG_USER_ONLY)
6234     GEN_PRIV(ctx);
6235 #else
6236     CHK_SV(ctx);
6237     if (ctx->opcode & 0x00008000) {
6238         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6239         gen_ppc_maybe_interrupt(ctx);
6240         /* Stop translation to have a chance to raise an exception */
6241         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6242     } else {
6243         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6244     }
6245 #endif /* defined(CONFIG_USER_ONLY) */
6246 }
6247 
6248 /* PowerPC 440 specific instructions */
6249 
6250 /* dlmzb */
6251 static void gen_dlmzb(DisasContext *ctx)
6252 {
6253     TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6254     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6255                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6256     tcg_temp_free_i32(t0);
6257 }
6258 
6259 /* mbar replaces eieio on 440 */
6260 static void gen_mbar(DisasContext *ctx)
6261 {
6262     /* interpreted as no-op */
6263 }
6264 
6265 /* msync replaces sync on 440 */
6266 static void gen_msync_4xx(DisasContext *ctx)
6267 {
6268     /* Only e500 seems to treat reserved bits as invalid */
6269     if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
6270         (ctx->opcode & 0x03FFF801)) {
6271         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6272     }
6273     /* otherwise interpreted as no-op */
6274 }
6275 
6276 /* icbt */
6277 static void gen_icbt_440(DisasContext *ctx)
6278 {
6279     /*
6280      * interpreted as no-op
6281      * XXX: specification say this is treated as a load by the MMU but
6282      *      does not generate any exception
6283      */
6284 }
6285 
6286 #if defined(TARGET_PPC64)
6287 static void gen_maddld(DisasContext *ctx)
6288 {
6289     TCGv_i64 t1 = tcg_temp_new_i64();
6290 
6291     tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6292     tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
6293     tcg_temp_free_i64(t1);
6294 }
6295 
6296 /* maddhd maddhdu */
6297 static void gen_maddhd_maddhdu(DisasContext *ctx)
6298 {
6299     TCGv_i64 lo = tcg_temp_new_i64();
6300     TCGv_i64 hi = tcg_temp_new_i64();
6301     TCGv_i64 t1 = tcg_temp_new_i64();
6302 
6303     if (Rc(ctx->opcode)) {
6304         tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6305                           cpu_gpr[rB(ctx->opcode)]);
6306         tcg_gen_movi_i64(t1, 0);
6307     } else {
6308         tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6309                           cpu_gpr[rB(ctx->opcode)]);
6310         tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6311     }
6312     tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6313                      cpu_gpr[rC(ctx->opcode)], t1);
6314     tcg_temp_free_i64(lo);
6315     tcg_temp_free_i64(hi);
6316     tcg_temp_free_i64(t1);
6317 }
6318 #endif /* defined(TARGET_PPC64) */
6319 
6320 static void gen_tbegin(DisasContext *ctx)
6321 {
6322     if (unlikely(!ctx->tm_enabled)) {
6323         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6324         return;
6325     }
6326     gen_helper_tbegin(cpu_env);
6327 }
6328 
6329 #define GEN_TM_NOOP(name)                                      \
6330 static inline void gen_##name(DisasContext *ctx)               \
6331 {                                                              \
6332     if (unlikely(!ctx->tm_enabled)) {                          \
6333         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6334         return;                                                \
6335     }                                                          \
6336     /*                                                         \
6337      * Because tbegin always fails in QEMU, these user         \
6338      * space instructions all have a simple implementation:    \
6339      *                                                         \
6340      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
6341      *           = 0b0 || 0b00    || 0b0                       \
6342      */                                                        \
6343     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6344 }
6345 
6346 GEN_TM_NOOP(tend);
6347 GEN_TM_NOOP(tabort);
6348 GEN_TM_NOOP(tabortwc);
6349 GEN_TM_NOOP(tabortwci);
6350 GEN_TM_NOOP(tabortdc);
6351 GEN_TM_NOOP(tabortdci);
6352 GEN_TM_NOOP(tsr);
6353 
6354 static inline void gen_cp_abort(DisasContext *ctx)
6355 {
6356     /* Do Nothing */
6357 }
6358 
6359 #define GEN_CP_PASTE_NOOP(name)                           \
6360 static inline void gen_##name(DisasContext *ctx)          \
6361 {                                                         \
6362     /*                                                    \
6363      * Generate invalid exception until we have an        \
6364      * implementation of the copy paste facility          \
6365      */                                                   \
6366     gen_invalid(ctx);                                     \
6367 }
6368 
6369 GEN_CP_PASTE_NOOP(copy)
6370 GEN_CP_PASTE_NOOP(paste)
6371 
6372 static void gen_tcheck(DisasContext *ctx)
6373 {
6374     if (unlikely(!ctx->tm_enabled)) {
6375         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6376         return;
6377     }
6378     /*
6379      * Because tbegin always fails, the tcheck implementation is
6380      * simple:
6381      *
6382      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6383      *         = 0b1 || 0b00 || 0b0
6384      */
6385     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6386 }
6387 
6388 #if defined(CONFIG_USER_ONLY)
6389 #define GEN_TM_PRIV_NOOP(name)                                 \
6390 static inline void gen_##name(DisasContext *ctx)               \
6391 {                                                              \
6392     gen_priv_opc(ctx);                                         \
6393 }
6394 
6395 #else
6396 
6397 #define GEN_TM_PRIV_NOOP(name)                                 \
6398 static inline void gen_##name(DisasContext *ctx)               \
6399 {                                                              \
6400     CHK_SV(ctx);                                               \
6401     if (unlikely(!ctx->tm_enabled)) {                          \
6402         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6403         return;                                                \
6404     }                                                          \
6405     /*                                                         \
6406      * Because tbegin always fails, the implementation is      \
6407      * simple:                                                 \
6408      *                                                         \
6409      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
6410      *         = 0b0 || 0b00 | 0b0                             \
6411      */                                                        \
6412     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6413 }
6414 
6415 #endif
6416 
6417 GEN_TM_PRIV_NOOP(treclaim);
6418 GEN_TM_PRIV_NOOP(trechkpt);
6419 
6420 static inline void get_fpr(TCGv_i64 dst, int regno)
6421 {
6422     tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
6423 }
6424 
6425 static inline void set_fpr(int regno, TCGv_i64 src)
6426 {
6427     tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
6428     /*
6429      * Before PowerISA v3.1 the result of doubleword 1 of the VSR
6430      * corresponding to the target FPR was undefined. However,
6431      * most (if not all) real hardware were setting the result to 0.
6432      * Starting at ISA v3.1, the result for doubleword 1 is now defined
6433      * to be 0.
6434      */
6435     tcg_gen_st_i64(tcg_constant_i64(0), cpu_env, vsr64_offset(regno, false));
6436 }
6437 
6438 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
6439 {
6440     tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
6441 }
6442 
6443 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
6444 {
6445     tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
6446 }
6447 
6448 /*
6449  * Helpers for decodetree used by !function for decoding arguments.
6450  */
6451 static int times_2(DisasContext *ctx, int x)
6452 {
6453     return x * 2;
6454 }
6455 
6456 static int times_4(DisasContext *ctx, int x)
6457 {
6458     return x * 4;
6459 }
6460 
6461 static int times_16(DisasContext *ctx, int x)
6462 {
6463     return x * 16;
6464 }
6465 
6466 static int64_t dw_compose_ea(DisasContext *ctx, int x)
6467 {
6468     return deposit64(0xfffffffffffffe00, 3, 6, x);
6469 }
6470 
6471 /*
6472  * Helpers for trans_* functions to check for specific insns flags.
6473  * Use token pasting to ensure that we use the proper flag with the
6474  * proper variable.
6475  */
6476 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
6477     do {                                                \
6478         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
6479             return false;                               \
6480         }                                               \
6481     } while (0)
6482 
6483 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
6484     do {                                                \
6485         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
6486             return false;                               \
6487         }                                               \
6488     } while (0)
6489 
6490 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
6491 #if TARGET_LONG_BITS == 32
6492 # define REQUIRE_64BIT(CTX)  return false
6493 #else
6494 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
6495 #endif
6496 
6497 #define REQUIRE_VECTOR(CTX)                             \
6498     do {                                                \
6499         if (unlikely(!(CTX)->altivec_enabled)) {        \
6500             gen_exception((CTX), POWERPC_EXCP_VPU);     \
6501             return true;                                \
6502         }                                               \
6503     } while (0)
6504 
6505 #define REQUIRE_VSX(CTX)                                \
6506     do {                                                \
6507         if (unlikely(!(CTX)->vsx_enabled)) {            \
6508             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
6509             return true;                                \
6510         }                                               \
6511     } while (0)
6512 
6513 #define REQUIRE_FPU(ctx)                                \
6514     do {                                                \
6515         if (unlikely(!(ctx)->fpu_enabled)) {            \
6516             gen_exception((ctx), POWERPC_EXCP_FPU);     \
6517             return true;                                \
6518         }                                               \
6519     } while (0)
6520 
6521 #if !defined(CONFIG_USER_ONLY)
6522 #define REQUIRE_SV(CTX)             \
6523     do {                            \
6524         if (unlikely((CTX)->pr)) {  \
6525             gen_priv_opc(CTX);      \
6526             return true;            \
6527         }                           \
6528     } while (0)
6529 
6530 #define REQUIRE_HV(CTX)                             \
6531     do {                                            \
6532         if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
6533             gen_priv_opc(CTX);                      \
6534             return true;                            \
6535         }                                           \
6536     } while (0)
6537 #else
6538 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6539 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6540 #endif
6541 
6542 /*
6543  * Helpers for implementing sets of trans_* functions.
6544  * Defer the implementation of NAME to FUNC, with optional extra arguments.
6545  */
6546 #define TRANS(NAME, FUNC, ...) \
6547     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6548     { return FUNC(ctx, a, __VA_ARGS__); }
6549 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
6550     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6551     {                                                          \
6552         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
6553         return FUNC(ctx, a, __VA_ARGS__);                      \
6554     }
6555 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6556     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6557     {                                                          \
6558         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6559         return FUNC(ctx, a, __VA_ARGS__);                      \
6560     }
6561 
6562 #define TRANS64(NAME, FUNC, ...) \
6563     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6564     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
6565 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6566     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6567     {                                                          \
6568         REQUIRE_64BIT(ctx);                                    \
6569         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6570         return FUNC(ctx, a, __VA_ARGS__);                      \
6571     }
6572 
6573 /* TODO: More TRANS* helpers for extra insn_flags checks. */
6574 
6575 
6576 #include "decode-insn32.c.inc"
6577 #include "decode-insn64.c.inc"
6578 #include "power8-pmu-regs.c.inc"
6579 
6580 /*
6581  * Incorporate CIA into the constant when R=1.
6582  * Validate that when R=1, RA=0.
6583  */
6584 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
6585 {
6586     d->rt = a->rt;
6587     d->ra = a->ra;
6588     d->si = a->si;
6589     if (a->r) {
6590         if (unlikely(a->ra != 0)) {
6591             gen_invalid(ctx);
6592             return false;
6593         }
6594         d->si += ctx->cia;
6595     }
6596     return true;
6597 }
6598 
6599 #include "translate/fixedpoint-impl.c.inc"
6600 
6601 #include "translate/fp-impl.c.inc"
6602 
6603 #include "translate/vmx-impl.c.inc"
6604 
6605 #include "translate/vsx-impl.c.inc"
6606 
6607 #include "translate/dfp-impl.c.inc"
6608 
6609 #include "translate/spe-impl.c.inc"
6610 
6611 #include "translate/branch-impl.c.inc"
6612 
6613 #include "translate/processor-ctrl-impl.c.inc"
6614 
6615 #include "translate/storage-ctrl-impl.c.inc"
6616 
6617 /* Handles lfdp */
6618 static void gen_dform39(DisasContext *ctx)
6619 {
6620     if ((ctx->opcode & 0x3) == 0) {
6621         if (ctx->insns_flags2 & PPC2_ISA205) {
6622             return gen_lfdp(ctx);
6623         }
6624     }
6625     return gen_invalid(ctx);
6626 }
6627 
6628 /* Handles stfdp */
6629 static void gen_dform3D(DisasContext *ctx)
6630 {
6631     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
6632         /* stfdp */
6633         if (ctx->insns_flags2 & PPC2_ISA205) {
6634             return gen_stfdp(ctx);
6635         }
6636     }
6637     return gen_invalid(ctx);
6638 }
6639 
6640 #if defined(TARGET_PPC64)
6641 /* brd */
6642 static void gen_brd(DisasContext *ctx)
6643 {
6644     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6645 }
6646 
6647 /* brw */
6648 static void gen_brw(DisasContext *ctx)
6649 {
6650     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6651     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
6652 
6653 }
6654 
6655 /* brh */
6656 static void gen_brh(DisasContext *ctx)
6657 {
6658     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
6659     TCGv_i64 t1 = tcg_temp_new_i64();
6660     TCGv_i64 t2 = tcg_temp_new_i64();
6661 
6662     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
6663     tcg_gen_and_i64(t2, t1, mask);
6664     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
6665     tcg_gen_shli_i64(t1, t1, 8);
6666     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
6667 
6668     tcg_temp_free_i64(t1);
6669     tcg_temp_free_i64(t2);
6670 }
6671 #endif
6672 
6673 static opcode_t opcodes[] = {
6674 #if defined(TARGET_PPC64)
6675 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
6676 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
6677 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
6678 #endif
6679 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6680 #if defined(TARGET_PPC64)
6681 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6682 #endif
6683 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6684 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6685 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6686 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6687 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6688 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6689 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6690 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6691 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6692 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6693 #if defined(TARGET_PPC64)
6694 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6695 #endif
6696 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6697 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6698 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6699 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6700 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6701 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6702 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6703 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
6704 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6705 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
6706 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6707 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6708 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6709 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6710 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6711 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6712 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6713 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6714 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6715 #if defined(TARGET_PPC64)
6716 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6717 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6718 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6719 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6720 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6721 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6722 #endif
6723 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6724 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6725 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6726 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6727 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6728 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6729 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6730 #if defined(TARGET_PPC64)
6731 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6732 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6733 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6734 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6735 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6736 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6737                PPC_NONE, PPC2_ISA300),
6738 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6739                PPC_NONE, PPC2_ISA300),
6740 #endif
6741 /* handles lfdp, lxsd, lxssp */
6742 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6743 /* handles stfdp, stxsd, stxssp */
6744 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6745 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6746 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6747 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6748 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6749 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6750 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6751 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
6752 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6753 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6754 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6755 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6756 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
6757 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
6758 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6759 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6760 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6761 #if defined(TARGET_PPC64)
6762 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
6763 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
6764 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6765 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6766 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6767 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6768 #endif
6769 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6770 /* ISA v3.0 changed the extended opcode from 62 to 30 */
6771 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
6772 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
6773 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6774 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6775 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6776 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6777 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6778 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6779 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6780 #if defined(TARGET_PPC64)
6781 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6782 #if !defined(CONFIG_USER_ONLY)
6783 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6784 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6785 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6786 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
6787 #endif
6788 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6789 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6790 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6791 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6792 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6793 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6794 #endif
6795 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6796 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
6797 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
6798 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6799 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6800 #if defined(TARGET_PPC64)
6801 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6802 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6803 #endif
6804 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6805 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6806 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6807 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6808 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6809 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6810 #if defined(TARGET_PPC64)
6811 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6812 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6813 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
6814 #endif
6815 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6816 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6817 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6818 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6819 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6820 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6821 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6822 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6823 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6824 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6825 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6826 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6827 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6828 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6829 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6830 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
6831 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6832 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6833 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6834 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6835 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6836 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6837 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6838 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6839 #if defined(TARGET_PPC64)
6840 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6841 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6842              PPC_SEGMENT_64B),
6843 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6844 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6845              PPC_SEGMENT_64B),
6846 #endif
6847 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6848 /*
6849  * XXX Those instructions will need to be handled differently for
6850  * different ISA versions
6851  */
6852 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6853 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6854 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6855 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6856 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6857 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6858 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6859 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6860 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6861 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6862 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6863 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6864 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6865 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6866 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6867 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6868 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6869 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6870 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6871 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6872 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6873 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6874 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6875 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6876 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6877 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6878 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6879                PPC_NONE, PPC2_BOOKE206),
6880 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6881                PPC_NONE, PPC2_BOOKE206),
6882 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6883                PPC_NONE, PPC2_BOOKE206),
6884 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6885                PPC_NONE, PPC2_BOOKE206),
6886 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6887                PPC_NONE, PPC2_BOOKE206),
6888 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6889 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6890 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6891 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6892               PPC_BOOKE, PPC2_BOOKE206),
6893 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
6894 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6895                PPC_BOOKE, PPC2_BOOKE206),
6896 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
6897              PPC_440_SPEC),
6898 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6899 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6900 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6901 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6902 #if defined(TARGET_PPC64)
6903 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6904               PPC2_ISA300),
6905 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6906 #endif
6907 
6908 #undef GEN_INT_ARITH_ADD
6909 #undef GEN_INT_ARITH_ADD_CONST
6910 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov)         \
6911 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6912 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val,                        \
6913                                 add_ca, compute_ca, compute_ov)               \
6914 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6915 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6916 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6917 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6918 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6919 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
6920 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
6921 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
6922 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
6923 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
6924 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
6925 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
6926 
6927 #undef GEN_INT_ARITH_DIVW
6928 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
6929 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
6930 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
6931 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
6932 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
6933 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
6934 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6935 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6936 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6937 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6938 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6939 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6940 
6941 #if defined(TARGET_PPC64)
6942 #undef GEN_INT_ARITH_DIVD
6943 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
6944 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6945 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
6946 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
6947 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
6948 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
6949 
6950 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6951 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6952 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6953 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6954 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6955 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6956 
6957 #undef GEN_INT_ARITH_MUL_HELPER
6958 #define GEN_INT_ARITH_MUL_HELPER(name, opc3)                                  \
6959 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6960 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
6961 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
6962 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
6963 #endif
6964 
6965 #undef GEN_INT_ARITH_SUBF
6966 #undef GEN_INT_ARITH_SUBF_CONST
6967 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
6968 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
6969 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
6970                                 add_ca, compute_ca, compute_ov)               \
6971 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
6972 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
6973 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
6974 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
6975 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
6976 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
6977 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
6978 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
6979 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
6980 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
6981 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
6982 
6983 #undef GEN_LOGICAL1
6984 #undef GEN_LOGICAL2
6985 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
6986 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
6987 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
6988 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
6989 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
6990 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
6991 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
6992 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
6993 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
6994 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
6995 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
6996 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
6997 #if defined(TARGET_PPC64)
6998 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
6999 #endif
7000 
7001 #if defined(TARGET_PPC64)
7002 #undef GEN_PPC64_R2
7003 #undef GEN_PPC64_R4
7004 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
7005 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
7006 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
7007              PPC_64B)
7008 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
7009 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
7010 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
7011              PPC_64B),                                                        \
7012 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
7013              PPC_64B),                                                        \
7014 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
7015              PPC_64B)
7016 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
7017 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
7018 GEN_PPC64_R4(rldic, 0x1E, 0x04),
7019 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
7020 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
7021 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
7022 #endif
7023 
7024 #undef GEN_LDX_E
7025 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
7026 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
7027 
7028 #if defined(TARGET_PPC64)
7029 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
7030 
7031 /* HV/P7 and later only */
7032 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
7033 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
7034 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
7035 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
7036 #endif
7037 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
7038 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
7039 
7040 /* External PID based load */
7041 #undef GEN_LDEPX
7042 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
7043 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7044               0x00000001, PPC_NONE, PPC2_BOOKE206),
7045 
7046 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
7047 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
7048 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
7049 #if defined(TARGET_PPC64)
7050 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
7051 #endif
7052 
7053 #undef GEN_STX_E
7054 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
7055 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
7056 
7057 #if defined(TARGET_PPC64)
7058 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
7059 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
7060 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
7061 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
7062 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
7063 #endif
7064 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
7065 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
7066 
7067 #undef GEN_STEPX
7068 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
7069 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7070               0x00000001, PPC_NONE, PPC2_BOOKE206),
7071 
7072 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
7073 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
7074 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
7075 #if defined(TARGET_PPC64)
7076 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
7077 #endif
7078 
7079 #undef GEN_CRLOGIC
7080 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
7081 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
7082 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
7083 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
7084 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
7085 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
7086 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
7087 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
7088 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
7089 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
7090 
7091 #undef GEN_MAC_HANDLER
7092 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
7093 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
7094 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
7095 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
7096 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
7097 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
7098 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
7099 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
7100 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
7101 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
7102 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
7103 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
7104 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
7105 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
7106 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
7107 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
7108 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
7109 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
7110 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
7111 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
7112 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
7113 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
7114 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
7115 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
7116 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
7117 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
7118 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
7119 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
7120 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
7121 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
7122 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
7123 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
7124 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
7125 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
7126 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
7127 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
7128 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
7129 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
7130 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
7131 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
7132 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
7133 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
7134 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
7135 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
7136 
7137 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
7138                PPC_NONE, PPC2_TM),
7139 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
7140                PPC_NONE, PPC2_TM),
7141 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
7142                PPC_NONE, PPC2_TM),
7143 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
7144                PPC_NONE, PPC2_TM),
7145 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
7146                PPC_NONE, PPC2_TM),
7147 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
7148                PPC_NONE, PPC2_TM),
7149 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
7150                PPC_NONE, PPC2_TM),
7151 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
7152                PPC_NONE, PPC2_TM),
7153 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
7154                PPC_NONE, PPC2_TM),
7155 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
7156                PPC_NONE, PPC2_TM),
7157 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
7158                PPC_NONE, PPC2_TM),
7159 
7160 #include "translate/fp-ops.c.inc"
7161 
7162 #include "translate/vmx-ops.c.inc"
7163 
7164 #include "translate/vsx-ops.c.inc"
7165 
7166 #include "translate/spe-ops.c.inc"
7167 };
7168 
7169 /*****************************************************************************/
7170 /* Opcode types */
7171 enum {
7172     PPC_DIRECT   = 0, /* Opcode routine        */
7173     PPC_INDIRECT = 1, /* Indirect opcode table */
7174 };
7175 
7176 #define PPC_OPCODE_MASK 0x3
7177 
7178 static inline int is_indirect_opcode(void *handler)
7179 {
7180     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
7181 }
7182 
7183 static inline opc_handler_t **ind_table(void *handler)
7184 {
7185     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
7186 }
7187 
7188 /* Instruction table creation */
7189 /* Opcodes tables creation */
7190 static void fill_new_table(opc_handler_t **table, int len)
7191 {
7192     int i;
7193 
7194     for (i = 0; i < len; i++) {
7195         table[i] = &invalid_handler;
7196     }
7197 }
7198 
7199 static int create_new_table(opc_handler_t **table, unsigned char idx)
7200 {
7201     opc_handler_t **tmp;
7202 
7203     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
7204     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
7205     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
7206 
7207     return 0;
7208 }
7209 
7210 static int insert_in_table(opc_handler_t **table, unsigned char idx,
7211                             opc_handler_t *handler)
7212 {
7213     if (table[idx] != &invalid_handler) {
7214         return -1;
7215     }
7216     table[idx] = handler;
7217 
7218     return 0;
7219 }
7220 
7221 static int register_direct_insn(opc_handler_t **ppc_opcodes,
7222                                 unsigned char idx, opc_handler_t *handler)
7223 {
7224     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
7225         printf("*** ERROR: opcode %02x already assigned in main "
7226                "opcode table\n", idx);
7227         return -1;
7228     }
7229 
7230     return 0;
7231 }
7232 
7233 static int register_ind_in_table(opc_handler_t **table,
7234                                  unsigned char idx1, unsigned char idx2,
7235                                  opc_handler_t *handler)
7236 {
7237     if (table[idx1] == &invalid_handler) {
7238         if (create_new_table(table, idx1) < 0) {
7239             printf("*** ERROR: unable to create indirect table "
7240                    "idx=%02x\n", idx1);
7241             return -1;
7242         }
7243     } else {
7244         if (!is_indirect_opcode(table[idx1])) {
7245             printf("*** ERROR: idx %02x already assigned to a direct "
7246                    "opcode\n", idx1);
7247             return -1;
7248         }
7249     }
7250     if (handler != NULL &&
7251         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
7252         printf("*** ERROR: opcode %02x already assigned in "
7253                "opcode table %02x\n", idx2, idx1);
7254         return -1;
7255     }
7256 
7257     return 0;
7258 }
7259 
7260 static int register_ind_insn(opc_handler_t **ppc_opcodes,
7261                              unsigned char idx1, unsigned char idx2,
7262                              opc_handler_t *handler)
7263 {
7264     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
7265 }
7266 
7267 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
7268                                 unsigned char idx1, unsigned char idx2,
7269                                 unsigned char idx3, opc_handler_t *handler)
7270 {
7271     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7272         printf("*** ERROR: unable to join indirect table idx "
7273                "[%02x-%02x]\n", idx1, idx2);
7274         return -1;
7275     }
7276     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
7277                               handler) < 0) {
7278         printf("*** ERROR: unable to insert opcode "
7279                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7280         return -1;
7281     }
7282 
7283     return 0;
7284 }
7285 
7286 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
7287                                  unsigned char idx1, unsigned char idx2,
7288                                  unsigned char idx3, unsigned char idx4,
7289                                  opc_handler_t *handler)
7290 {
7291     opc_handler_t **table;
7292 
7293     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7294         printf("*** ERROR: unable to join indirect table idx "
7295                "[%02x-%02x]\n", idx1, idx2);
7296         return -1;
7297     }
7298     table = ind_table(ppc_opcodes[idx1]);
7299     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
7300         printf("*** ERROR: unable to join 2nd-level indirect table idx "
7301                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7302         return -1;
7303     }
7304     table = ind_table(table[idx2]);
7305     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
7306         printf("*** ERROR: unable to insert opcode "
7307                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
7308         return -1;
7309     }
7310     return 0;
7311 }
7312 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
7313 {
7314     if (insn->opc2 != 0xFF) {
7315         if (insn->opc3 != 0xFF) {
7316             if (insn->opc4 != 0xFF) {
7317                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7318                                           insn->opc3, insn->opc4,
7319                                           &insn->handler) < 0) {
7320                     return -1;
7321                 }
7322             } else {
7323                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7324                                          insn->opc3, &insn->handler) < 0) {
7325                     return -1;
7326                 }
7327             }
7328         } else {
7329             if (register_ind_insn(ppc_opcodes, insn->opc1,
7330                                   insn->opc2, &insn->handler) < 0) {
7331                 return -1;
7332             }
7333         }
7334     } else {
7335         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
7336             return -1;
7337         }
7338     }
7339 
7340     return 0;
7341 }
7342 
7343 static int test_opcode_table(opc_handler_t **table, int len)
7344 {
7345     int i, count, tmp;
7346 
7347     for (i = 0, count = 0; i < len; i++) {
7348         /* Consistency fixup */
7349         if (table[i] == NULL) {
7350             table[i] = &invalid_handler;
7351         }
7352         if (table[i] != &invalid_handler) {
7353             if (is_indirect_opcode(table[i])) {
7354                 tmp = test_opcode_table(ind_table(table[i]),
7355                     PPC_CPU_INDIRECT_OPCODES_LEN);
7356                 if (tmp == 0) {
7357                     free(table[i]);
7358                     table[i] = &invalid_handler;
7359                 } else {
7360                     count++;
7361                 }
7362             } else {
7363                 count++;
7364             }
7365         }
7366     }
7367 
7368     return count;
7369 }
7370 
7371 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
7372 {
7373     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
7374         printf("*** WARNING: no opcode defined !\n");
7375     }
7376 }
7377 
7378 /*****************************************************************************/
7379 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
7380 {
7381     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
7382     opcode_t *opc;
7383 
7384     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
7385     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
7386         if (((opc->handler.type & pcc->insns_flags) != 0) ||
7387             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
7388             if (register_insn(cpu->opcodes, opc) < 0) {
7389                 error_setg(errp, "ERROR initializing PowerPC instruction "
7390                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
7391                            opc->opc3);
7392                 return;
7393             }
7394         }
7395     }
7396     fix_opcode_tables(cpu->opcodes);
7397     fflush(stdout);
7398     fflush(stderr);
7399 }
7400 
7401 void destroy_ppc_opcodes(PowerPCCPU *cpu)
7402 {
7403     opc_handler_t **table, **table_2;
7404     int i, j, k;
7405 
7406     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
7407         if (cpu->opcodes[i] == &invalid_handler) {
7408             continue;
7409         }
7410         if (is_indirect_opcode(cpu->opcodes[i])) {
7411             table = ind_table(cpu->opcodes[i]);
7412             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
7413                 if (table[j] == &invalid_handler) {
7414                     continue;
7415                 }
7416                 if (is_indirect_opcode(table[j])) {
7417                     table_2 = ind_table(table[j]);
7418                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
7419                         if (table_2[k] != &invalid_handler &&
7420                             is_indirect_opcode(table_2[k])) {
7421                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
7422                                                      ~PPC_INDIRECT));
7423                         }
7424                     }
7425                     g_free((opc_handler_t *)((uintptr_t)table[j] &
7426                                              ~PPC_INDIRECT));
7427                 }
7428             }
7429             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
7430                 ~PPC_INDIRECT));
7431         }
7432     }
7433 }
7434 
7435 int ppc_fixup_cpu(PowerPCCPU *cpu)
7436 {
7437     CPUPPCState *env = &cpu->env;
7438 
7439     /*
7440      * TCG doesn't (yet) emulate some groups of instructions that are
7441      * implemented on some otherwise supported CPUs (e.g. VSX and
7442      * decimal floating point instructions on POWER7).  We remove
7443      * unsupported instruction groups from the cpu state's instruction
7444      * masks and hope the guest can cope.  For at least the pseries
7445      * machine, the unavailability of these instructions can be
7446      * advertised to the guest via the device tree.
7447      */
7448     if ((env->insns_flags & ~PPC_TCG_INSNS)
7449         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
7450         warn_report("Disabling some instructions which are not "
7451                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
7452                     env->insns_flags & ~PPC_TCG_INSNS,
7453                     env->insns_flags2 & ~PPC_TCG_INSNS2);
7454     }
7455     env->insns_flags &= PPC_TCG_INSNS;
7456     env->insns_flags2 &= PPC_TCG_INSNS2;
7457     return 0;
7458 }
7459 
7460 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
7461 {
7462     opc_handler_t **table, *handler;
7463     uint32_t inval;
7464 
7465     ctx->opcode = insn;
7466 
7467     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7468               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7469               ctx->le_mode ? "little" : "big");
7470 
7471     table = cpu->opcodes;
7472     handler = table[opc1(insn)];
7473     if (is_indirect_opcode(handler)) {
7474         table = ind_table(handler);
7475         handler = table[opc2(insn)];
7476         if (is_indirect_opcode(handler)) {
7477             table = ind_table(handler);
7478             handler = table[opc3(insn)];
7479             if (is_indirect_opcode(handler)) {
7480                 table = ind_table(handler);
7481                 handler = table[opc4(insn)];
7482             }
7483         }
7484     }
7485 
7486     /* Is opcode *REALLY* valid ? */
7487     if (unlikely(handler->handler == &gen_invalid)) {
7488         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7489                       "%02x - %02x - %02x - %02x (%08x) "
7490                       TARGET_FMT_lx "\n",
7491                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7492                       insn, ctx->cia);
7493         return false;
7494     }
7495 
7496     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
7497                  && Rc(insn))) {
7498         inval = handler->inval2;
7499     } else {
7500         inval = handler->inval1;
7501     }
7502 
7503     if (unlikely((insn & inval) != 0)) {
7504         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7505                       "%02x - %02x - %02x - %02x (%08x) "
7506                       TARGET_FMT_lx "\n", insn & inval,
7507                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7508                       insn, ctx->cia);
7509         return false;
7510     }
7511 
7512     handler->handler(ctx);
7513     return true;
7514 }
7515 
7516 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
7517 {
7518     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7519     CPUPPCState *env = cs->env_ptr;
7520     uint32_t hflags = ctx->base.tb->flags;
7521 
7522     ctx->spr_cb = env->spr_cb;
7523     ctx->pr = (hflags >> HFLAGS_PR) & 1;
7524     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
7525     ctx->dr = (hflags >> HFLAGS_DR) & 1;
7526     ctx->hv = (hflags >> HFLAGS_HV) & 1;
7527     ctx->insns_flags = env->insns_flags;
7528     ctx->insns_flags2 = env->insns_flags2;
7529     ctx->access_type = -1;
7530     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
7531     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
7532     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
7533     ctx->flags = env->flags;
7534 #if defined(TARGET_PPC64)
7535     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
7536     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7537 #endif
7538     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
7539         || env->mmu_model & POWERPC_MMU_64;
7540 
7541     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
7542     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
7543     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
7544     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
7545     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
7546     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
7547     ctx->hr = (hflags >> HFLAGS_HR) & 1;
7548     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
7549     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
7550     ctx->mmcr0_pmcjce = (hflags >> HFLAGS_PMCJCE) & 1;
7551     ctx->pmc_other = (hflags >> HFLAGS_PMC_OTHER) & 1;
7552     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
7553 
7554     ctx->singlestep_enabled = 0;
7555     if ((hflags >> HFLAGS_SE) & 1) {
7556         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
7557         ctx->base.max_insns = 1;
7558     }
7559     if ((hflags >> HFLAGS_BE) & 1) {
7560         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
7561     }
7562 }
7563 
7564 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
7565 {
7566 }
7567 
7568 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
7569 {
7570     tcg_gen_insn_start(dcbase->pc_next);
7571 }
7572 
7573 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
7574 {
7575     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
7576     return opc1(insn) == 1;
7577 }
7578 
7579 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
7580 {
7581     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7582     PowerPCCPU *cpu = POWERPC_CPU(cs);
7583     CPUPPCState *env = cs->env_ptr;
7584     target_ulong pc;
7585     uint32_t insn;
7586     bool ok;
7587 
7588     LOG_DISAS("----------------\n");
7589     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7590               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
7591 
7592     ctx->cia = pc = ctx->base.pc_next;
7593     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
7594     ctx->base.pc_next = pc += 4;
7595 
7596     if (!is_prefix_insn(ctx, insn)) {
7597         ok = (decode_insn32(ctx, insn) ||
7598               decode_legacy(cpu, ctx, insn));
7599     } else if ((pc & 63) == 0) {
7600         /*
7601          * Power v3.1, section 1.9 Exceptions:
7602          * attempt to execute a prefixed instruction that crosses a
7603          * 64-byte address boundary (system alignment error).
7604          */
7605         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
7606         ok = true;
7607     } else {
7608         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
7609                                              need_byteswap(ctx));
7610         ctx->base.pc_next = pc += 4;
7611         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
7612     }
7613     if (!ok) {
7614         gen_invalid(ctx);
7615     }
7616 
7617     /* End the TB when crossing a page boundary. */
7618     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
7619         ctx->base.is_jmp = DISAS_TOO_MANY;
7620     }
7621 
7622     translator_loop_temp_check(&ctx->base);
7623 }
7624 
7625 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
7626 {
7627     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7628     DisasJumpType is_jmp = ctx->base.is_jmp;
7629     target_ulong nip = ctx->base.pc_next;
7630 
7631     if (is_jmp == DISAS_NORETURN) {
7632         /* We have already exited the TB. */
7633         return;
7634     }
7635 
7636     /* Honor single stepping. */
7637     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)
7638         && (nip <= 0x100 || nip > 0xf00)) {
7639         switch (is_jmp) {
7640         case DISAS_TOO_MANY:
7641         case DISAS_EXIT_UPDATE:
7642         case DISAS_CHAIN_UPDATE:
7643             gen_update_nip(ctx, nip);
7644             break;
7645         case DISAS_EXIT:
7646         case DISAS_CHAIN:
7647             break;
7648         default:
7649             g_assert_not_reached();
7650         }
7651 
7652         gen_debug_exception(ctx);
7653         return;
7654     }
7655 
7656     switch (is_jmp) {
7657     case DISAS_TOO_MANY:
7658         if (use_goto_tb(ctx, nip)) {
7659             pmu_count_insns(ctx);
7660             tcg_gen_goto_tb(0);
7661             gen_update_nip(ctx, nip);
7662             tcg_gen_exit_tb(ctx->base.tb, 0);
7663             break;
7664         }
7665         /* fall through */
7666     case DISAS_CHAIN_UPDATE:
7667         gen_update_nip(ctx, nip);
7668         /* fall through */
7669     case DISAS_CHAIN:
7670         /*
7671          * tcg_gen_lookup_and_goto_ptr will exit the TB if
7672          * CF_NO_GOTO_PTR is set. Count insns now.
7673          */
7674         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
7675             pmu_count_insns(ctx);
7676         }
7677 
7678         tcg_gen_lookup_and_goto_ptr();
7679         break;
7680 
7681     case DISAS_EXIT_UPDATE:
7682         gen_update_nip(ctx, nip);
7683         /* fall through */
7684     case DISAS_EXIT:
7685         pmu_count_insns(ctx);
7686         tcg_gen_exit_tb(NULL, 0);
7687         break;
7688 
7689     default:
7690         g_assert_not_reached();
7691     }
7692 }
7693 
7694 static void ppc_tr_disas_log(const DisasContextBase *dcbase,
7695                              CPUState *cs, FILE *logfile)
7696 {
7697     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
7698     target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size);
7699 }
7700 
7701 static const TranslatorOps ppc_tr_ops = {
7702     .init_disas_context = ppc_tr_init_disas_context,
7703     .tb_start           = ppc_tr_tb_start,
7704     .insn_start         = ppc_tr_insn_start,
7705     .translate_insn     = ppc_tr_translate_insn,
7706     .tb_stop            = ppc_tr_tb_stop,
7707     .disas_log          = ppc_tr_disas_log,
7708 };
7709 
7710 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns,
7711                            target_ulong pc, void *host_pc)
7712 {
7713     DisasContext ctx;
7714 
7715     translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
7716 }
7717