xref: /openbmc/qemu/target/ppc/translate.c (revision 3ad3326bd658dd3bb210d5f2997e6b9a913ccb95)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "exec/target_page.h"
25 #include "tcg/tcg-op.h"
26 #include "tcg/tcg-op-gvec.h"
27 #include "qemu/host-utils.h"
28 
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 
32 #include "exec/translator.h"
33 #include "exec/translation-block.h"
34 #include "exec/log.h"
35 #include "qemu/atomic128.h"
36 #include "spr_common.h"
37 #include "power8-pmu.h"
38 
39 #include "qemu/qemu-print.h"
40 #include "qapi/error.h"
41 
42 #define HELPER_H "helper.h"
43 #include "exec/helper-info.c.inc"
44 #undef  HELPER_H
45 
46 #define CPU_SINGLE_STEP 0x1
47 #define CPU_BRANCH_STEP 0x2
48 
49 /* Include definitions for instructions classes and implementations flags */
50 /* #define PPC_DEBUG_DISAS */
51 
52 #ifdef PPC_DEBUG_DISAS
53 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
54 #else
55 #  define LOG_DISAS(...) do { } while (0)
56 #endif
57 /*****************************************************************************/
58 /* Code translation helpers                                                  */
59 
60 /* global register indexes */
61 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
62                           + 10 * 4 + 22 * 5 /* SPE GPRh */
63                           + 8 * 5           /* CRF */];
64 static TCGv cpu_gpr[32];
65 static TCGv cpu_gprh[32];
66 static TCGv_i32 cpu_crf[8];
67 static TCGv cpu_nip;
68 static TCGv cpu_msr;
69 static TCGv cpu_ctr;
70 static TCGv cpu_lr;
71 #if defined(TARGET_PPC64)
72 static TCGv cpu_cfar;
73 #endif
74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
75 static TCGv cpu_reserve;
76 static TCGv cpu_reserve_length;
77 static TCGv cpu_reserve_val;
78 #if defined(TARGET_PPC64)
79 static TCGv cpu_reserve_val2;
80 #endif
81 static TCGv cpu_fpscr;
82 static TCGv_i32 cpu_access_type;
83 
84 void ppc_translate_init(void)
85 {
86     int i;
87     char *p;
88     size_t cpu_reg_names_size;
89 
90     p = cpu_reg_names;
91     cpu_reg_names_size = sizeof(cpu_reg_names);
92 
93     for (i = 0; i < 8; i++) {
94         snprintf(p, cpu_reg_names_size, "crf%d", i);
95         cpu_crf[i] = tcg_global_mem_new_i32(tcg_env,
96                                             offsetof(CPUPPCState, crf[i]), p);
97         p += 5;
98         cpu_reg_names_size -= 5;
99     }
100 
101     for (i = 0; i < 32; i++) {
102         snprintf(p, cpu_reg_names_size, "r%d", i);
103         cpu_gpr[i] = tcg_global_mem_new(tcg_env,
104                                         offsetof(CPUPPCState, gpr[i]), p);
105         p += (i < 10) ? 3 : 4;
106         cpu_reg_names_size -= (i < 10) ? 3 : 4;
107         snprintf(p, cpu_reg_names_size, "r%dH", i);
108         cpu_gprh[i] = tcg_global_mem_new(tcg_env,
109                                          offsetof(CPUPPCState, gprh[i]), p);
110         p += (i < 10) ? 4 : 5;
111         cpu_reg_names_size -= (i < 10) ? 4 : 5;
112     }
113 
114     cpu_nip = tcg_global_mem_new(tcg_env,
115                                  offsetof(CPUPPCState, nip), "nip");
116 
117     cpu_msr = tcg_global_mem_new(tcg_env,
118                                  offsetof(CPUPPCState, msr), "msr");
119 
120     cpu_ctr = tcg_global_mem_new(tcg_env,
121                                  offsetof(CPUPPCState, ctr), "ctr");
122 
123     cpu_lr = tcg_global_mem_new(tcg_env,
124                                 offsetof(CPUPPCState, lr), "lr");
125 
126 #if defined(TARGET_PPC64)
127     cpu_cfar = tcg_global_mem_new(tcg_env,
128                                   offsetof(CPUPPCState, cfar), "cfar");
129 #endif
130 
131     cpu_xer = tcg_global_mem_new(tcg_env,
132                                  offsetof(CPUPPCState, xer), "xer");
133     cpu_so = tcg_global_mem_new(tcg_env,
134                                 offsetof(CPUPPCState, so), "SO");
135     cpu_ov = tcg_global_mem_new(tcg_env,
136                                 offsetof(CPUPPCState, ov), "OV");
137     cpu_ca = tcg_global_mem_new(tcg_env,
138                                 offsetof(CPUPPCState, ca), "CA");
139     cpu_ov32 = tcg_global_mem_new(tcg_env,
140                                   offsetof(CPUPPCState, ov32), "OV32");
141     cpu_ca32 = tcg_global_mem_new(tcg_env,
142                                   offsetof(CPUPPCState, ca32), "CA32");
143 
144     cpu_reserve = tcg_global_mem_new(tcg_env,
145                                      offsetof(CPUPPCState, reserve_addr),
146                                      "reserve_addr");
147     cpu_reserve_length = tcg_global_mem_new(tcg_env,
148                                             offsetof(CPUPPCState,
149                                                      reserve_length),
150                                             "reserve_length");
151     cpu_reserve_val = tcg_global_mem_new(tcg_env,
152                                          offsetof(CPUPPCState, reserve_val),
153                                          "reserve_val");
154 #if defined(TARGET_PPC64)
155     cpu_reserve_val2 = tcg_global_mem_new(tcg_env,
156                                           offsetof(CPUPPCState, reserve_val2),
157                                           "reserve_val2");
158 #endif
159 
160     cpu_fpscr = tcg_global_mem_new(tcg_env,
161                                    offsetof(CPUPPCState, fpscr), "fpscr");
162 
163     cpu_access_type = tcg_global_mem_new_i32(tcg_env,
164                                              offsetof(CPUPPCState, access_type),
165                                              "access_type");
166 }
167 
168 /* internal defines */
169 struct DisasContext {
170     DisasContextBase base;
171     target_ulong cia;  /* current instruction address */
172     uint32_t opcode;
173     /* Routine used to access memory */
174     bool pr, hv, dr, le_mode;
175     bool lazy_tlb_flush;
176     bool need_access_type;
177     int mem_idx;
178     int access_type;
179     /* Translation flags */
180     MemOp default_tcg_memop_mask;
181 #if defined(TARGET_PPC64)
182     powerpc_excp_t excp_model;
183     bool sf_mode;
184     bool has_cfar;
185     bool has_bhrb;
186 #endif
187     bool fpu_enabled;
188     bool altivec_enabled;
189     bool vsx_enabled;
190     bool spe_enabled;
191     bool tm_enabled;
192     bool gtse;
193     bool hr;
194     bool mmcr0_pmcc0;
195     bool mmcr0_pmcc1;
196     bool mmcr0_pmcjce;
197     bool pmc_other;
198     bool pmu_insn_cnt;
199     bool bhrb_enable;
200     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
201     int singlestep_enabled;
202     uint32_t flags;
203     uint64_t insns_flags;
204     uint64_t insns_flags2;
205 };
206 
207 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
208 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
209 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
210 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
211 
212 static inline bool is_ppe(const DisasContext *ctx)
213 {
214     return !!(ctx->flags & POWERPC_FLAG_PPE42);
215 }
216 
217 /* Return true iff byteswap is needed in a scalar memop */
218 static inline bool need_byteswap(const DisasContext *ctx)
219 {
220 #if TARGET_BIG_ENDIAN
221      return ctx->le_mode;
222 #else
223      return !ctx->le_mode;
224 #endif
225 }
226 
227 /* True when active word size < size of target_long.  */
228 #ifdef TARGET_PPC64
229 # define NARROW_MODE(C)  (!(C)->sf_mode)
230 #else
231 # define NARROW_MODE(C)  0
232 #endif
233 
234 struct opc_handler_t {
235     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
236     uint32_t inval1;
237     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
238     uint32_t inval2;
239     /* instruction type */
240     uint64_t type;
241     /* extended instruction type */
242     uint64_t type2;
243     /* handler */
244     void (*handler)(DisasContext *ctx);
245 };
246 
247 static inline bool gen_serialize(DisasContext *ctx)
248 {
249     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
250         /* Restart with exclusive lock.  */
251         gen_helper_exit_atomic(tcg_env);
252         ctx->base.is_jmp = DISAS_NORETURN;
253         return false;
254     }
255     return true;
256 }
257 
258 #if !defined(CONFIG_USER_ONLY)
259 #if defined(TARGET_PPC64)
260 static inline bool gen_serialize_core(DisasContext *ctx)
261 {
262     if (ctx->flags & POWERPC_FLAG_SMT) {
263         return gen_serialize(ctx);
264     }
265     return true;
266 }
267 #endif
268 
269 static inline bool gen_serialize_core_lpar(DisasContext *ctx)
270 {
271 #if defined(TARGET_PPC64)
272     if (ctx->flags & POWERPC_FLAG_SMT_1LPAR) {
273         return gen_serialize(ctx);
274     }
275 #endif
276     return true;
277 }
278 #endif
279 
280 /* SPR load/store helpers */
281 static inline void gen_load_spr(TCGv t, int reg)
282 {
283     tcg_gen_ld_tl(t, tcg_env, offsetof(CPUPPCState, spr[reg]));
284 }
285 
286 static inline void gen_store_spr(int reg, TCGv t)
287 {
288     tcg_gen_st_tl(t, tcg_env, offsetof(CPUPPCState, spr[reg]));
289 }
290 
291 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
292 {
293     if (ctx->need_access_type && ctx->access_type != access_type) {
294         tcg_gen_movi_i32(cpu_access_type, access_type);
295         ctx->access_type = access_type;
296     }
297 }
298 
299 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
300 {
301     if (NARROW_MODE(ctx)) {
302         nip = (uint32_t)nip;
303     }
304     tcg_gen_movi_tl(cpu_nip, nip);
305 }
306 
307 static void gen_exception_err_nip(DisasContext *ctx, uint32_t excp,
308                                   uint32_t error, target_ulong nip)
309 {
310     TCGv_i32 t0, t1;
311 
312     gen_update_nip(ctx, nip);
313     t0 = tcg_constant_i32(excp);
314     t1 = tcg_constant_i32(error);
315     gen_helper_raise_exception_err(tcg_env, t0, t1);
316     ctx->base.is_jmp = DISAS_NORETURN;
317 }
318 
319 static inline void gen_exception_err(DisasContext *ctx, uint32_t excp,
320                                      uint32_t error)
321 {
322     /*
323      * These are all synchronous exceptions, we set the PC back to the
324      * faulting instruction
325      */
326     gen_exception_err_nip(ctx, excp, error, ctx->cia);
327 }
328 
329 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
330                               target_ulong nip)
331 {
332     TCGv_i32 t0;
333 
334     gen_update_nip(ctx, nip);
335     t0 = tcg_constant_i32(excp);
336     gen_helper_raise_exception(tcg_env, t0);
337     ctx->base.is_jmp = DISAS_NORETURN;
338 }
339 
340 static inline void gen_exception(DisasContext *ctx, uint32_t excp)
341 {
342     /*
343      * These are all synchronous exceptions, we set the PC back to the
344      * faulting instruction
345      */
346     gen_exception_nip(ctx, excp, ctx->cia);
347 }
348 
349 #if !defined(CONFIG_USER_ONLY)
350 static void gen_ppc_maybe_interrupt(DisasContext *ctx)
351 {
352     translator_io_start(&ctx->base);
353     gen_helper_ppc_maybe_interrupt(tcg_env);
354 }
355 #endif
356 
357 /*
358  * Tells the caller what is the appropriate exception to generate and prepares
359  * SPR registers for this exception.
360  *
361  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
362  * POWERPC_EXCP_DEBUG (on BookE).
363  */
364 static void gen_debug_exception(DisasContext *ctx, bool rfi_type)
365 {
366 #if !defined(CONFIG_USER_ONLY)
367     if (ctx->flags & POWERPC_FLAG_DE) {
368         target_ulong dbsr = 0;
369         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
370             dbsr = DBCR0_ICMP;
371         } else {
372             /* Must have been branch */
373             dbsr = DBCR0_BRT;
374         }
375         TCGv t0 = tcg_temp_new();
376         gen_load_spr(t0, SPR_BOOKE_DBSR);
377         tcg_gen_ori_tl(t0, t0, dbsr);
378         gen_store_spr(SPR_BOOKE_DBSR, t0);
379         gen_helper_raise_exception(tcg_env,
380                                    tcg_constant_i32(POWERPC_EXCP_DEBUG));
381         ctx->base.is_jmp = DISAS_NORETURN;
382     } else {
383         if (!rfi_type) { /* BookS does not single step rfi type instructions */
384             TCGv t0 = tcg_temp_new();
385             tcg_gen_movi_tl(t0, ctx->cia);
386             gen_helper_book3s_trace(tcg_env, t0);
387             ctx->base.is_jmp = DISAS_NORETURN;
388         }
389     }
390 #endif
391 }
392 
393 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
394 {
395     /* Will be converted to program check if needed */
396     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
397 }
398 
399 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
400 {
401     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
402 }
403 
404 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
405 {
406     /* Will be converted to program check if needed */
407     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
408 }
409 
410 /*****************************************************************************/
411 /* SPR READ/WRITE CALLBACKS */
412 
413 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
414 {
415 #if 0
416     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
417     printf("ERROR: try to access SPR %d !\n", sprn);
418 #endif
419 }
420 
421 /* #define PPC_DUMP_SPR_ACCESSES */
422 
423 /*
424  * Generic callbacks:
425  * do nothing but store/retrieve spr value
426  */
427 static void spr_load_dump_spr(int sprn)
428 {
429 #ifdef PPC_DUMP_SPR_ACCESSES
430     TCGv_i32 t0 = tcg_constant_i32(sprn);
431     gen_helper_load_dump_spr(tcg_env, t0);
432 #endif
433 }
434 
435 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
436 {
437     gen_load_spr(cpu_gpr[gprn], sprn);
438     spr_load_dump_spr(sprn);
439 }
440 
441 static void spr_store_dump_spr(int sprn)
442 {
443 #ifdef PPC_DUMP_SPR_ACCESSES
444     TCGv_i32 t0 = tcg_constant_i32(sprn);
445     gen_helper_store_dump_spr(tcg_env, t0);
446 #endif
447 }
448 
449 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
450 {
451     gen_store_spr(sprn, cpu_gpr[gprn]);
452     spr_store_dump_spr(sprn);
453 }
454 
455 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
456 {
457 #ifdef TARGET_PPC64
458     TCGv t0 = tcg_temp_new();
459     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
460     gen_store_spr(sprn, t0);
461     spr_store_dump_spr(sprn);
462 #else
463     spr_write_generic(ctx, sprn, gprn);
464 #endif
465 }
466 
467 void spr_core_write_generic(DisasContext *ctx, int sprn, int gprn)
468 {
469     if (!(ctx->flags & POWERPC_FLAG_SMT)) {
470         spr_write_generic(ctx, sprn, gprn);
471         return;
472     }
473 
474     if (!gen_serialize(ctx)) {
475         return;
476     }
477 
478     gen_helper_spr_core_write_generic(tcg_env, tcg_constant_i32(sprn),
479                                       cpu_gpr[gprn]);
480     spr_store_dump_spr(sprn);
481 }
482 
483 void spr_core_write_generic32(DisasContext *ctx, int sprn, int gprn)
484 {
485     TCGv t0;
486 
487     if (!(ctx->flags & POWERPC_FLAG_SMT)) {
488         spr_write_generic32(ctx, sprn, gprn);
489         return;
490     }
491 
492     if (!gen_serialize(ctx)) {
493         return;
494     }
495 
496     t0 = tcg_temp_new();
497     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
498     gen_helper_spr_core_write_generic(tcg_env, tcg_constant_i32(sprn), t0);
499     spr_store_dump_spr(sprn);
500 }
501 
502 void spr_core_lpar_write_generic(DisasContext *ctx, int sprn, int gprn)
503 {
504     if (ctx->flags & POWERPC_FLAG_SMT_1LPAR) {
505         spr_core_write_generic(ctx, sprn, gprn);
506     } else {
507         spr_write_generic(ctx, sprn, gprn);
508     }
509 }
510 
511 static void spr_write_CTRL_ST(DisasContext *ctx, int sprn, int gprn)
512 {
513     /* This does not implement >1 thread */
514     TCGv t0 = tcg_temp_new();
515     TCGv t1 = tcg_temp_new();
516     tcg_gen_extract_tl(t0, cpu_gpr[gprn], 0, 1); /* Extract RUN field */
517     tcg_gen_shli_tl(t1, t0, 8); /* Duplicate the bit in TS */
518     tcg_gen_or_tl(t1, t1, t0);
519     gen_store_spr(sprn, t1);
520 }
521 
522 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
523 {
524     if (!(ctx->flags & POWERPC_FLAG_SMT_1LPAR)) {
525         /* CTRL behaves as 1-thread in LPAR-per-thread mode */
526         spr_write_CTRL_ST(ctx, sprn, gprn);
527         goto out;
528     }
529 
530     if (!gen_serialize(ctx)) {
531         return;
532     }
533 
534     gen_helper_spr_write_CTRL(tcg_env, tcg_constant_i32(sprn),
535                               cpu_gpr[gprn]);
536 out:
537     spr_store_dump_spr(sprn);
538 
539     /*
540      * SPR_CTRL writes must force a new translation block,
541      * allowing the PMU to calculate the run latch events with
542      * more accuracy.
543      */
544     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
545 }
546 
547 #if !defined(CONFIG_USER_ONLY)
548 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
549 {
550     TCGv t0 = tcg_temp_new();
551     TCGv t1 = tcg_temp_new();
552     gen_load_spr(t0, sprn);
553     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
554     tcg_gen_and_tl(t0, t0, t1);
555     gen_store_spr(sprn, t0);
556 }
557 
558 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
559 {
560 }
561 
562 #endif
563 
564 static void gen_get_xer(DisasContext *ctx, TCGv dst)
565 {
566     TCGv t0 = tcg_temp_new();
567     TCGv t1 = tcg_temp_new();
568     TCGv t2 = tcg_temp_new();
569     tcg_gen_mov_tl(dst, cpu_xer);
570     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
571     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
572     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
573     tcg_gen_or_tl(t0, t0, t1);
574     tcg_gen_or_tl(dst, dst, t2);
575     tcg_gen_or_tl(dst, dst, t0);
576     if (is_isa300(ctx)) {
577         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
578         tcg_gen_or_tl(dst, dst, t0);
579         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
580         tcg_gen_or_tl(dst, dst, t0);
581     }
582 }
583 
584 /* SPR common to all PowerPC */
585 /* XER */
586 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
587 {
588     TCGv dst = cpu_gpr[gprn];
589     gen_get_xer(ctx, dst);
590 }
591 
592 static void gen_set_xer(DisasContext *ctx, TCGv src)
593 {
594     /* Write all flags, while reading back check for isa300 */
595     tcg_gen_andi_tl(cpu_xer, src,
596                     ~((1u << XER_SO) |
597                       (1u << XER_OV) | (1u << XER_OV32) |
598                       (1u << XER_CA) | (1u << XER_CA32)));
599     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
600     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
601     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
602     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
603     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
604 }
605 
606 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
607 {
608     TCGv src = cpu_gpr[gprn];
609     gen_set_xer(ctx, src);
610 }
611 
612 /* LR */
613 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
614 {
615     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
616 }
617 
618 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
619 {
620     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
621 }
622 
623 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
624 /* Debug facilities */
625 /* CFAR */
626 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
627 {
628     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
629 }
630 
631 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
632 {
633     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
634 }
635 
636 /* Breakpoint */
637 void spr_write_ciabr(DisasContext *ctx, int sprn, int gprn)
638 {
639     translator_io_start(&ctx->base);
640     gen_helper_store_ciabr(tcg_env, cpu_gpr[gprn]);
641 }
642 
643 /* Watchpoint */
644 void spr_write_dawr0(DisasContext *ctx, int sprn, int gprn)
645 {
646     translator_io_start(&ctx->base);
647     gen_helper_store_dawr0(tcg_env, cpu_gpr[gprn]);
648 }
649 
650 void spr_write_dawrx0(DisasContext *ctx, int sprn, int gprn)
651 {
652     translator_io_start(&ctx->base);
653     gen_helper_store_dawrx0(tcg_env, cpu_gpr[gprn]);
654 }
655 
656 void spr_write_dawr1(DisasContext *ctx, int sprn, int gprn)
657 {
658     translator_io_start(&ctx->base);
659     gen_helper_store_dawr1(tcg_env, cpu_gpr[gprn]);
660 }
661 
662 void spr_write_dawrx1(DisasContext *ctx, int sprn, int gprn)
663 {
664     translator_io_start(&ctx->base);
665     gen_helper_store_dawrx1(tcg_env, cpu_gpr[gprn]);
666 }
667 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
668 
669 /* CTR */
670 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
671 {
672     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
673 }
674 
675 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
676 {
677     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
678 }
679 
680 /* User read access to SPR */
681 /* USPRx */
682 /* UMMCRx */
683 /* UPMCx */
684 /* USIA */
685 /* UDECR */
686 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
687 {
688     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
689 }
690 
691 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
692 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
693 {
694     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
695 }
696 #endif
697 
698 /* SPR common to all non-embedded PowerPC */
699 /* DECR */
700 #if !defined(CONFIG_USER_ONLY)
701 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
702 {
703     translator_io_start(&ctx->base);
704     gen_helper_load_decr(cpu_gpr[gprn], tcg_env);
705 }
706 
707 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
708 {
709     translator_io_start(&ctx->base);
710     gen_helper_store_decr(tcg_env, cpu_gpr[gprn]);
711 }
712 #endif
713 
714 /* SPR common to all non-embedded PowerPC, except 601 */
715 /* Time base */
716 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
717 {
718     translator_io_start(&ctx->base);
719     gen_helper_load_tbl(cpu_gpr[gprn], tcg_env);
720 }
721 
722 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
723 {
724     translator_io_start(&ctx->base);
725     gen_helper_load_tbu(cpu_gpr[gprn], tcg_env);
726 }
727 
728 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
729 {
730     gen_helper_load_atbl(cpu_gpr[gprn], tcg_env);
731 }
732 
733 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
734 {
735     gen_helper_load_atbu(cpu_gpr[gprn], tcg_env);
736 }
737 
738 #if !defined(CONFIG_USER_ONLY)
739 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
740 {
741     if (!gen_serialize_core_lpar(ctx)) {
742         return;
743     }
744 
745     translator_io_start(&ctx->base);
746     gen_helper_store_tbl(tcg_env, cpu_gpr[gprn]);
747 }
748 
749 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
750 {
751     if (!gen_serialize_core_lpar(ctx)) {
752         return;
753     }
754 
755     translator_io_start(&ctx->base);
756     gen_helper_store_tbu(tcg_env, cpu_gpr[gprn]);
757 }
758 
759 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
760 {
761     gen_helper_store_atbl(tcg_env, cpu_gpr[gprn]);
762 }
763 
764 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
765 {
766     gen_helper_store_atbu(tcg_env, cpu_gpr[gprn]);
767 }
768 
769 #if defined(TARGET_PPC64)
770 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
771 {
772     translator_io_start(&ctx->base);
773     gen_helper_load_purr(cpu_gpr[gprn], tcg_env);
774 }
775 
776 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
777 {
778     if (!gen_serialize_core_lpar(ctx)) {
779         return;
780     }
781     translator_io_start(&ctx->base);
782     gen_helper_store_purr(tcg_env, cpu_gpr[gprn]);
783 }
784 
785 /* HDECR */
786 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
787 {
788     translator_io_start(&ctx->base);
789     gen_helper_load_hdecr(cpu_gpr[gprn], tcg_env);
790 }
791 
792 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
793 {
794     if (!gen_serialize_core_lpar(ctx)) {
795         return;
796     }
797     translator_io_start(&ctx->base);
798     gen_helper_store_hdecr(tcg_env, cpu_gpr[gprn]);
799 }
800 
801 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
802 {
803     translator_io_start(&ctx->base);
804     gen_helper_load_vtb(cpu_gpr[gprn], tcg_env);
805 }
806 
807 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
808 {
809     if (!gen_serialize_core_lpar(ctx)) {
810         return;
811     }
812     translator_io_start(&ctx->base);
813     gen_helper_store_vtb(tcg_env, cpu_gpr[gprn]);
814 }
815 
816 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
817 {
818     if (!gen_serialize_core_lpar(ctx)) {
819         return;
820     }
821     translator_io_start(&ctx->base);
822     gen_helper_store_tbu40(tcg_env, cpu_gpr[gprn]);
823 }
824 
825 #endif
826 #endif
827 
828 #if !defined(CONFIG_USER_ONLY)
829 /* IBAT0U...IBAT0U */
830 /* IBAT0L...IBAT7L */
831 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
832 {
833     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
834                   offsetof(CPUPPCState,
835                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
836 }
837 
838 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
839 {
840     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
841                   offsetof(CPUPPCState,
842                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
843 }
844 
845 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
846 {
847     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0U) / 2);
848     gen_helper_store_ibatu(tcg_env, t0, cpu_gpr[gprn]);
849 }
850 
851 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
852 {
853     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4U) / 2) + 4);
854     gen_helper_store_ibatu(tcg_env, t0, cpu_gpr[gprn]);
855 }
856 
857 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
858 {
859     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0L) / 2);
860     gen_helper_store_ibatl(tcg_env, t0, cpu_gpr[gprn]);
861 }
862 
863 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
864 {
865     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4L) / 2) + 4);
866     gen_helper_store_ibatl(tcg_env, t0, cpu_gpr[gprn]);
867 }
868 
869 /* DBAT0U...DBAT7U */
870 /* DBAT0L...DBAT7L */
871 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
872 {
873     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
874                   offsetof(CPUPPCState,
875                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
876 }
877 
878 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
879 {
880     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
881                   offsetof(CPUPPCState,
882                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
883 }
884 
885 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
886 {
887     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0U) / 2);
888     gen_helper_store_dbatu(tcg_env, t0, cpu_gpr[gprn]);
889 }
890 
891 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
892 {
893     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4U) / 2) + 4);
894     gen_helper_store_dbatu(tcg_env, t0, cpu_gpr[gprn]);
895 }
896 
897 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
898 {
899     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0L) / 2);
900     gen_helper_store_dbatl(tcg_env, t0, cpu_gpr[gprn]);
901 }
902 
903 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
904 {
905     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4L) / 2) + 4);
906     gen_helper_store_dbatl(tcg_env, t0, cpu_gpr[gprn]);
907 }
908 
909 /* SDR1 */
910 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
911 {
912     gen_helper_store_sdr1(tcg_env, cpu_gpr[gprn]);
913 }
914 
915 #if defined(TARGET_PPC64)
916 /* 64 bits PowerPC specific SPRs */
917 /* PIDR */
918 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
919 {
920     gen_helper_store_pidr(tcg_env, cpu_gpr[gprn]);
921 }
922 
923 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
924 {
925     gen_helper_store_lpidr(tcg_env, cpu_gpr[gprn]);
926 }
927 
928 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
929 {
930     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env, offsetof(CPUPPCState, excp_prefix));
931 }
932 
933 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
934 {
935     TCGv t0 = tcg_temp_new();
936     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
937     tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_prefix));
938 }
939 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
940 {
941     if (!gen_serialize_core(ctx)) {
942         return;
943     }
944 
945     gen_helper_store_ptcr(tcg_env, cpu_gpr[gprn]);
946 }
947 
948 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
949 {
950     gen_helper_store_pcr(tcg_env, cpu_gpr[gprn]);
951 }
952 
953 /* DPDES */
954 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
955 {
956     if (!gen_serialize_core_lpar(ctx)) {
957         return;
958     }
959 
960     gen_helper_load_dpdes(cpu_gpr[gprn], tcg_env);
961 }
962 
963 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
964 {
965     if (!gen_serialize_core_lpar(ctx)) {
966         return;
967     }
968 
969     gen_helper_store_dpdes(tcg_env, cpu_gpr[gprn]);
970 }
971 #endif
972 #endif
973 
974 /* PowerPC 40x specific registers */
975 #if !defined(CONFIG_USER_ONLY)
976 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
977 {
978     translator_io_start(&ctx->base);
979     gen_helper_load_40x_pit(cpu_gpr[gprn], tcg_env);
980 }
981 
982 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
983 {
984     translator_io_start(&ctx->base);
985     gen_helper_store_40x_pit(tcg_env, cpu_gpr[gprn]);
986 }
987 
988 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
989 {
990     translator_io_start(&ctx->base);
991     gen_store_spr(sprn, cpu_gpr[gprn]);
992     gen_helper_store_40x_dbcr0(tcg_env, cpu_gpr[gprn]);
993     /* We must stop translation as we may have rebooted */
994     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
995 }
996 
997 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
998 {
999     translator_io_start(&ctx->base);
1000     gen_helper_store_40x_sler(tcg_env, cpu_gpr[gprn]);
1001 }
1002 
1003 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
1004 {
1005     translator_io_start(&ctx->base);
1006     gen_helper_store_40x_tcr(tcg_env, cpu_gpr[gprn]);
1007 }
1008 
1009 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
1010 {
1011     translator_io_start(&ctx->base);
1012     gen_helper_store_40x_tsr(tcg_env, cpu_gpr[gprn]);
1013 }
1014 
1015 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
1016 {
1017     TCGv t0 = tcg_temp_new();
1018     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
1019     gen_helper_store_40x_pid(tcg_env, t0);
1020 }
1021 
1022 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
1023 {
1024     translator_io_start(&ctx->base);
1025     gen_helper_store_booke_tcr(tcg_env, cpu_gpr[gprn]);
1026 }
1027 
1028 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
1029 {
1030     translator_io_start(&ctx->base);
1031     gen_helper_store_booke_tsr(tcg_env, cpu_gpr[gprn]);
1032 }
1033 #endif
1034 
1035 /* PIR */
1036 #if !defined(CONFIG_USER_ONLY)
1037 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
1038 {
1039     TCGv t0 = tcg_temp_new();
1040     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
1041     gen_store_spr(SPR_PIR, t0);
1042 }
1043 #endif
1044 
1045 /* SPE specific registers */
1046 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
1047 {
1048     TCGv_i32 t0 = tcg_temp_new_i32();
1049     tcg_gen_ld_i32(t0, tcg_env, offsetof(CPUPPCState, spe_fscr));
1050     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
1051 }
1052 
1053 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
1054 {
1055     TCGv_i32 t0 = tcg_temp_new_i32();
1056     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
1057     tcg_gen_st_i32(t0, tcg_env, offsetof(CPUPPCState, spe_fscr));
1058 }
1059 
1060 #if !defined(CONFIG_USER_ONLY)
1061 /* Callback used to write the exception vector base */
1062 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
1063 {
1064     TCGv t0 = tcg_temp_new();
1065     tcg_gen_ld_tl(t0, tcg_env, offsetof(CPUPPCState, ivpr_mask));
1066     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1067     tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_prefix));
1068     gen_store_spr(sprn, t0);
1069 }
1070 
1071 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
1072 {
1073     int sprn_offs;
1074 
1075     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
1076         sprn_offs = sprn - SPR_BOOKE_IVOR0;
1077     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
1078         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
1079     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
1080         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
1081     } else {
1082         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
1083                       " vector 0x%03x\n", sprn);
1084         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1085         return;
1086     }
1087 
1088     TCGv t0 = tcg_temp_new();
1089     tcg_gen_ld_tl(t0, tcg_env, offsetof(CPUPPCState, ivor_mask));
1090     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1091     tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
1092     gen_store_spr(sprn, t0);
1093 }
1094 #endif
1095 
1096 #ifdef TARGET_PPC64
1097 #ifndef CONFIG_USER_ONLY
1098 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
1099 {
1100     TCGv t0 = tcg_temp_new();
1101     TCGv t1 = tcg_temp_new();
1102     TCGv t2 = tcg_temp_new();
1103 
1104     /*
1105      * Note, the HV=1 PR=0 case is handled earlier by simply using
1106      * spr_write_generic for HV mode in the SPR table
1107      */
1108 
1109     /* Build insertion mask into t1 based on context */
1110     if (ctx->pr) {
1111         gen_load_spr(t1, SPR_UAMOR);
1112     } else {
1113         gen_load_spr(t1, SPR_AMOR);
1114     }
1115 
1116     /* Mask new bits into t2 */
1117     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1118 
1119     /* Load AMR and clear new bits in t0 */
1120     gen_load_spr(t0, SPR_AMR);
1121     tcg_gen_andc_tl(t0, t0, t1);
1122 
1123     /* Or'in new bits and write it out */
1124     tcg_gen_or_tl(t0, t0, t2);
1125     gen_store_spr(SPR_AMR, t0);
1126     spr_store_dump_spr(SPR_AMR);
1127 }
1128 
1129 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
1130 {
1131     TCGv t0 = tcg_temp_new();
1132     TCGv t1 = tcg_temp_new();
1133     TCGv t2 = tcg_temp_new();
1134 
1135     /*
1136      * Note, the HV=1 case is handled earlier by simply using
1137      * spr_write_generic for HV mode in the SPR table
1138      */
1139 
1140     /* Build insertion mask into t1 based on context */
1141     gen_load_spr(t1, SPR_AMOR);
1142 
1143     /* Mask new bits into t2 */
1144     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1145 
1146     /* Load AMR and clear new bits in t0 */
1147     gen_load_spr(t0, SPR_UAMOR);
1148     tcg_gen_andc_tl(t0, t0, t1);
1149 
1150     /* Or'in new bits and write it out */
1151     tcg_gen_or_tl(t0, t0, t2);
1152     gen_store_spr(SPR_UAMOR, t0);
1153     spr_store_dump_spr(SPR_UAMOR);
1154 }
1155 
1156 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1157 {
1158     TCGv t0 = tcg_temp_new();
1159     TCGv t1 = tcg_temp_new();
1160     TCGv t2 = tcg_temp_new();
1161 
1162     /*
1163      * Note, the HV=1 case is handled earlier by simply using
1164      * spr_write_generic for HV mode in the SPR table
1165      */
1166 
1167     /* Build insertion mask into t1 based on context */
1168     gen_load_spr(t1, SPR_AMOR);
1169 
1170     /* Mask new bits into t2 */
1171     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1172 
1173     /* Load AMR and clear new bits in t0 */
1174     gen_load_spr(t0, SPR_IAMR);
1175     tcg_gen_andc_tl(t0, t0, t1);
1176 
1177     /* Or'in new bits and write it out */
1178     tcg_gen_or_tl(t0, t0, t2);
1179     gen_store_spr(SPR_IAMR, t0);
1180     spr_store_dump_spr(SPR_IAMR);
1181 }
1182 #endif
1183 #endif
1184 
1185 #ifndef CONFIG_USER_ONLY
1186 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1187 {
1188     gen_helper_fixup_thrm(tcg_env);
1189     gen_load_spr(cpu_gpr[gprn], sprn);
1190     spr_load_dump_spr(sprn);
1191 }
1192 #endif /* !CONFIG_USER_ONLY */
1193 
1194 #if !defined(CONFIG_USER_ONLY)
1195 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1196 {
1197     TCGv t0 = tcg_temp_new();
1198 
1199     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1200     gen_store_spr(sprn, t0);
1201 }
1202 
1203 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1204 {
1205     TCGv t0 = tcg_temp_new();
1206 
1207     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1208     gen_store_spr(sprn, t0);
1209 }
1210 
1211 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1212 {
1213     TCGv t0 = tcg_temp_new();
1214 
1215     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1216                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1217     gen_store_spr(sprn, t0);
1218 }
1219 
1220 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1221 {
1222     gen_helper_booke206_tlbflush(tcg_env, cpu_gpr[gprn]);
1223 }
1224 
1225 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1226 {
1227     TCGv_i32 t0 = tcg_constant_i32(sprn);
1228     gen_helper_booke_setpid(tcg_env, t0, cpu_gpr[gprn]);
1229 }
1230 
1231 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1232 {
1233     gen_helper_booke_set_eplc(tcg_env, cpu_gpr[gprn]);
1234 }
1235 
1236 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1237 {
1238     gen_helper_booke_set_epsc(tcg_env, cpu_gpr[gprn]);
1239 }
1240 
1241 #endif
1242 
1243 #if !defined(CONFIG_USER_ONLY)
1244 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1245 {
1246     TCGv val = tcg_temp_new();
1247     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1248     gen_store_spr(SPR_BOOKE_MAS3, val);
1249     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1250     gen_store_spr(SPR_BOOKE_MAS7, val);
1251 }
1252 
1253 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1254 {
1255     TCGv mas7 = tcg_temp_new();
1256     TCGv mas3 = tcg_temp_new();
1257     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1258     tcg_gen_shli_tl(mas7, mas7, 32);
1259     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1260     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1261 }
1262 
1263 #endif
1264 
1265 #ifdef TARGET_PPC64
1266 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1267                                     int bit, int sprn, int cause)
1268 {
1269     TCGv_i32 t1 = tcg_constant_i32(bit);
1270     TCGv_i32 t2 = tcg_constant_i32(sprn);
1271     TCGv_i32 t3 = tcg_constant_i32(cause);
1272 
1273     gen_helper_fscr_facility_check(tcg_env, t1, t2, t3);
1274 }
1275 
1276 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1277                                    int bit, int sprn, int cause)
1278 {
1279     TCGv_i32 t1 = tcg_constant_i32(bit);
1280     TCGv_i32 t2 = tcg_constant_i32(sprn);
1281     TCGv_i32 t3 = tcg_constant_i32(cause);
1282 
1283     gen_helper_msr_facility_check(tcg_env, t1, t2, t3);
1284 }
1285 
1286 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1287 {
1288     TCGv spr_up = tcg_temp_new();
1289     TCGv spr = tcg_temp_new();
1290 
1291     gen_load_spr(spr, sprn - 1);
1292     tcg_gen_shri_tl(spr_up, spr, 32);
1293     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1294 }
1295 
1296 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1297 {
1298     TCGv spr = tcg_temp_new();
1299 
1300     gen_load_spr(spr, sprn - 1);
1301     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1302     gen_store_spr(sprn - 1, spr);
1303 }
1304 
1305 #if !defined(CONFIG_USER_ONLY)
1306 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1307 {
1308     TCGv hmer = tcg_temp_new();
1309 
1310     gen_load_spr(hmer, sprn);
1311     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1312     gen_store_spr(sprn, hmer);
1313     spr_store_dump_spr(sprn);
1314 }
1315 
1316 void spr_read_tfmr(DisasContext *ctx, int gprn, int sprn)
1317 {
1318     /* Reading TFMR can cause it to be updated, so serialize threads here too */
1319     if (!gen_serialize_core(ctx)) {
1320         return;
1321     }
1322     gen_helper_load_tfmr(cpu_gpr[gprn], tcg_env);
1323 }
1324 
1325 void spr_write_tfmr(DisasContext *ctx, int sprn, int gprn)
1326 {
1327     if (!gen_serialize_core(ctx)) {
1328         return;
1329     }
1330     gen_helper_store_tfmr(tcg_env, cpu_gpr[gprn]);
1331 }
1332 
1333 void spr_write_sprc(DisasContext *ctx, int sprn, int gprn)
1334 {
1335     gen_helper_store_sprc(tcg_env, cpu_gpr[gprn]);
1336 }
1337 
1338 void spr_read_sprd(DisasContext *ctx, int gprn, int sprn)
1339 {
1340     gen_helper_load_sprd(cpu_gpr[gprn], tcg_env);
1341 }
1342 
1343 void spr_write_sprd(DisasContext *ctx, int sprn, int gprn)
1344 {
1345     if (!gen_serialize_core(ctx)) {
1346         return;
1347     }
1348     gen_helper_store_sprd(tcg_env, cpu_gpr[gprn]);
1349 }
1350 
1351 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1352 {
1353     translator_io_start(&ctx->base);
1354     gen_helper_store_lpcr(tcg_env, cpu_gpr[gprn]);
1355 }
1356 
1357 void spr_read_pmsr(DisasContext *ctx, int gprn, int sprn)
1358 {
1359     translator_io_start(&ctx->base);
1360     gen_helper_load_pmsr(cpu_gpr[gprn], tcg_env);
1361 }
1362 
1363 void spr_write_pmcr(DisasContext *ctx, int sprn, int gprn)
1364 {
1365     if (!gen_serialize_core_lpar(ctx)) {
1366         return;
1367     }
1368     translator_io_start(&ctx->base);
1369     gen_helper_store_pmcr(tcg_env, cpu_gpr[gprn]);
1370 }
1371 
1372 #endif /* !defined(CONFIG_USER_ONLY) */
1373 
1374 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1375 {
1376     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1377     spr_read_generic(ctx, gprn, sprn);
1378 }
1379 
1380 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1381 {
1382     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1383     spr_write_generic(ctx, sprn, gprn);
1384 }
1385 
1386 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1387 {
1388     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1389     spr_read_generic(ctx, gprn, sprn);
1390 }
1391 
1392 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1393 {
1394     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1395     spr_write_generic(ctx, sprn, gprn);
1396 }
1397 
1398 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1399 {
1400     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1401     spr_read_prev_upper32(ctx, gprn, sprn);
1402 }
1403 
1404 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1405 {
1406     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1407     spr_write_prev_upper32(ctx, sprn, gprn);
1408 }
1409 
1410 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1411 {
1412     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1413     spr_read_generic(ctx, gprn, sprn);
1414 }
1415 
1416 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1417 {
1418     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1419     spr_write_generic(ctx, sprn, gprn);
1420 }
1421 
1422 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1423 {
1424     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1425     spr_read_prev_upper32(ctx, gprn, sprn);
1426 }
1427 
1428 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1429 {
1430     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1431     spr_write_prev_upper32(ctx, sprn, gprn);
1432 }
1433 
1434 void spr_read_dexcr_ureg(DisasContext *ctx, int gprn, int sprn)
1435 {
1436     TCGv t0 = tcg_temp_new();
1437 
1438     /*
1439      * Access to the (H)DEXCR in problem state is done using separated
1440      * SPR indexes which are 16 below the SPR indexes which have full
1441      * access to the (H)DEXCR in privileged state. Problem state can
1442      * only read bits 32:63, bits 0:31 return 0.
1443      *
1444      * See section 9.3.1-9.3.2 of PowerISA v3.1B
1445      */
1446 
1447     gen_load_spr(t0, sprn + 16);
1448     tcg_gen_ext32u_tl(cpu_gpr[gprn], t0);
1449 }
1450 
1451 /* The PPR32 SPR accesses the upper 32-bits of PPR */
1452 void spr_read_ppr32(DisasContext *ctx, int gprn, int sprn)
1453 {
1454     gen_load_spr(cpu_gpr[gprn], SPR_PPR);
1455     tcg_gen_shri_tl(cpu_gpr[gprn], cpu_gpr[gprn], 32);
1456     spr_load_dump_spr(SPR_PPR);
1457 }
1458 
1459 void spr_write_ppr32(DisasContext *ctx, int sprn, int gprn)
1460 {
1461     TCGv t0 = tcg_temp_new();
1462 
1463     /*
1464      * Don't clobber the low 32-bits of the PPR. These are all reserved bits
1465      * but TCG does implement them, so it would be surprising to zero them
1466      * here. "Priority nops" are similarly careful not to clobber reserved
1467      * bits.
1468      */
1469     gen_load_spr(t0, SPR_PPR);
1470     tcg_gen_deposit_tl(t0, t0, cpu_gpr[gprn], 32, 32);
1471     gen_store_spr(SPR_PPR, t0);
1472     spr_store_dump_spr(SPR_PPR);
1473 }
1474 #endif
1475 
1476 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1477 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1478 
1479 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1480 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1481 
1482 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1483 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1484 
1485 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1486 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1487 
1488 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1489 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1490 
1491 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1492 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1493 
1494 typedef struct opcode_t {
1495     unsigned char opc1, opc2, opc3, opc4;
1496 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1497     unsigned char pad[4];
1498 #endif
1499     opc_handler_t handler;
1500     const char *oname;
1501 } opcode_t;
1502 
1503 static void gen_priv_opc(DisasContext *ctx)
1504 {
1505     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1506 }
1507 
1508 /* Helpers for priv. check */
1509 #define GEN_PRIV(CTX)              \
1510     do {                           \
1511         gen_priv_opc(CTX); return; \
1512     } while (0)
1513 
1514 #if defined(CONFIG_USER_ONLY)
1515 #define CHK_HV(CTX) GEN_PRIV(CTX)
1516 #define CHK_SV(CTX) GEN_PRIV(CTX)
1517 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1518 #else
1519 #define CHK_HV(CTX)                         \
1520     do {                                    \
1521         if (unlikely(ctx->pr || !ctx->hv)) {\
1522             GEN_PRIV(CTX);                  \
1523         }                                   \
1524     } while (0)
1525 #define CHK_SV(CTX)              \
1526     do {                         \
1527         if (unlikely(ctx->pr)) { \
1528             GEN_PRIV(CTX);       \
1529         }                        \
1530     } while (0)
1531 #define CHK_HVRM(CTX)                                   \
1532     do {                                                \
1533         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1534             GEN_PRIV(CTX);                              \
1535         }                                               \
1536     } while (0)
1537 #endif
1538 
1539 #define CHK_NONE(CTX)
1540 
1541 /*****************************************************************************/
1542 /* PowerPC instructions table                                                */
1543 
1544 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1545 {                                                                             \
1546     .opc1 = op1,                                                              \
1547     .opc2 = op2,                                                              \
1548     .opc3 = op3,                                                              \
1549     .opc4 = 0xff,                                                             \
1550     .handler = {                                                              \
1551         .inval1  = invl,                                                      \
1552         .type = _typ,                                                         \
1553         .type2 = _typ2,                                                       \
1554         .handler = &gen_##name,                                               \
1555     },                                                                        \
1556     .oname = stringify(name),                                                 \
1557 }
1558 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1559 {                                                                             \
1560     .opc1 = op1,                                                              \
1561     .opc2 = op2,                                                              \
1562     .opc3 = op3,                                                              \
1563     .opc4 = 0xff,                                                             \
1564     .handler = {                                                              \
1565         .inval1  = invl1,                                                     \
1566         .inval2  = invl2,                                                     \
1567         .type = _typ,                                                         \
1568         .type2 = _typ2,                                                       \
1569         .handler = &gen_##name,                                               \
1570     },                                                                        \
1571     .oname = stringify(name),                                                 \
1572 }
1573 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1574 {                                                                             \
1575     .opc1 = op1,                                                              \
1576     .opc2 = op2,                                                              \
1577     .opc3 = op3,                                                              \
1578     .opc4 = 0xff,                                                             \
1579     .handler = {                                                              \
1580         .inval1  = invl,                                                      \
1581         .type = _typ,                                                         \
1582         .type2 = _typ2,                                                       \
1583         .handler = &gen_##name,                                               \
1584     },                                                                        \
1585     .oname = onam,                                                            \
1586 }
1587 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1588 {                                                                             \
1589     .opc1 = op1,                                                              \
1590     .opc2 = op2,                                                              \
1591     .opc3 = op3,                                                              \
1592     .opc4 = op4,                                                              \
1593     .handler = {                                                              \
1594         .inval1  = invl,                                                      \
1595         .type = _typ,                                                         \
1596         .type2 = _typ2,                                                       \
1597         .handler = &gen_##name,                                               \
1598     },                                                                        \
1599     .oname = stringify(name),                                                 \
1600 }
1601 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1602 {                                                                             \
1603     .opc1 = op1,                                                              \
1604     .opc2 = op2,                                                              \
1605     .opc3 = op3,                                                              \
1606     .opc4 = op4,                                                              \
1607     .handler = {                                                              \
1608         .inval1  = invl,                                                      \
1609         .type = _typ,                                                         \
1610         .type2 = _typ2,                                                       \
1611         .handler = &gen_##name,                                               \
1612     },                                                                        \
1613     .oname = onam,                                                            \
1614 }
1615 
1616 /* Invalid instruction */
1617 static void gen_invalid(DisasContext *ctx)
1618 {
1619     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1620 }
1621 
1622 static opc_handler_t invalid_handler = {
1623     .inval1  = 0xFFFFFFFF,
1624     .inval2  = 0xFFFFFFFF,
1625     .type    = PPC_NONE,
1626     .type2   = PPC_NONE,
1627     .handler = gen_invalid,
1628 };
1629 
1630 /***                           Integer comparison                          ***/
1631 
1632 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1633 {
1634     TCGv t0 = tcg_temp_new();
1635     TCGv_i32 t = tcg_temp_new_i32();
1636 
1637     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1638                        t0, arg0, arg1,
1639                        tcg_constant_tl(CRF_LT), tcg_constant_tl(CRF_EQ));
1640     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1641                        t0, arg0, arg1, tcg_constant_tl(CRF_GT), t0);
1642 
1643     tcg_gen_trunc_tl_i32(t, t0);
1644     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1645     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1646 }
1647 
1648 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1649 {
1650     TCGv t0 = tcg_constant_tl(arg1);
1651     gen_op_cmp(arg0, t0, s, crf);
1652 }
1653 
1654 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1655 {
1656     TCGv t0, t1;
1657     t0 = tcg_temp_new();
1658     t1 = tcg_temp_new();
1659     if (s) {
1660         tcg_gen_ext32s_tl(t0, arg0);
1661         tcg_gen_ext32s_tl(t1, arg1);
1662     } else {
1663         tcg_gen_ext32u_tl(t0, arg0);
1664         tcg_gen_ext32u_tl(t1, arg1);
1665     }
1666     gen_op_cmp(t0, t1, s, crf);
1667 }
1668 
1669 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1670 {
1671     TCGv t0 = tcg_constant_tl(arg1);
1672     gen_op_cmp32(arg0, t0, s, crf);
1673 }
1674 
1675 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1676 {
1677     if (NARROW_MODE(ctx)) {
1678         gen_op_cmpi32(reg, 0, 1, 0);
1679     } else {
1680         gen_op_cmpi(reg, 0, 1, 0);
1681     }
1682 }
1683 
1684 /***                           Integer arithmetic                          ***/
1685 
1686 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1687                                            TCGv arg1, TCGv arg2, int sub)
1688 {
1689     TCGv t0 = tcg_temp_new();
1690 
1691     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1692     tcg_gen_xor_tl(t0, arg1, arg2);
1693     if (sub) {
1694         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1695     } else {
1696         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1697     }
1698     if (NARROW_MODE(ctx)) {
1699         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1700         if (is_isa300(ctx)) {
1701             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1702         }
1703     } else {
1704         if (is_isa300(ctx)) {
1705             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1706         }
1707         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1708     }
1709     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1710 }
1711 
1712 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1713                                              TCGv res, TCGv arg0, TCGv arg1,
1714                                              TCGv ca32, int sub)
1715 {
1716     TCGv t0;
1717 
1718     if (!is_isa300(ctx)) {
1719         return;
1720     }
1721 
1722     t0 = tcg_temp_new();
1723     if (sub) {
1724         tcg_gen_eqv_tl(t0, arg0, arg1);
1725     } else {
1726         tcg_gen_xor_tl(t0, arg0, arg1);
1727     }
1728     tcg_gen_xor_tl(t0, t0, res);
1729     tcg_gen_extract_tl(ca32, t0, 32, 1);
1730 }
1731 
1732 /* Common add function */
1733 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1734                                     TCGv arg2, TCGv ca, TCGv ca32,
1735                                     bool add_ca, bool compute_ca,
1736                                     bool compute_ov, bool compute_rc0)
1737 {
1738     TCGv t0 = ret;
1739 
1740     if (compute_ca || compute_ov) {
1741         t0 = tcg_temp_new();
1742     }
1743 
1744     if (compute_ca) {
1745         if (NARROW_MODE(ctx)) {
1746             /*
1747              * Caution: a non-obvious corner case of the spec is that
1748              * we must produce the *entire* 64-bit addition, but
1749              * produce the carry into bit 32.
1750              */
1751             TCGv t1 = tcg_temp_new();
1752             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1753             tcg_gen_add_tl(t0, arg1, arg2);
1754             if (add_ca) {
1755                 tcg_gen_add_tl(t0, t0, ca);
1756             }
1757             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1758             tcg_gen_extract_tl(ca, ca, 32, 1);
1759             if (is_isa300(ctx)) {
1760                 tcg_gen_mov_tl(ca32, ca);
1761             }
1762         } else {
1763             if (add_ca) {
1764                 tcg_gen_addcio_tl(t0, ca, arg1, arg2, ca);
1765             } else {
1766                 TCGv zero = tcg_constant_tl(0);
1767                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1768             }
1769             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1770         }
1771     } else {
1772         tcg_gen_add_tl(t0, arg1, arg2);
1773         if (add_ca) {
1774             tcg_gen_add_tl(t0, t0, ca);
1775         }
1776     }
1777 
1778     if (compute_ov) {
1779         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1780     }
1781     if (unlikely(compute_rc0)) {
1782         gen_set_Rc0(ctx, t0);
1783     }
1784 
1785     if (t0 != ret) {
1786         tcg_gen_mov_tl(ret, t0);
1787     }
1788 }
1789 
1790 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret,
1791                                      TCGv arg1, TCGv arg2, bool sign,
1792                                      bool compute_ov, bool compute_rc0)
1793 {
1794     TCGv_i32 t0 = tcg_temp_new_i32();
1795     TCGv_i32 t1 = tcg_temp_new_i32();
1796     TCGv_i32 t2 = tcg_temp_new_i32();
1797     TCGv_i32 t3 = tcg_temp_new_i32();
1798 
1799     tcg_gen_trunc_tl_i32(t0, arg1);
1800     tcg_gen_trunc_tl_i32(t1, arg2);
1801     if (sign) {
1802         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1803         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1804         tcg_gen_and_i32(t2, t2, t3);
1805         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1806         tcg_gen_or_i32(t2, t2, t3);
1807         tcg_gen_movi_i32(t3, 0);
1808         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1809         tcg_gen_div_i32(t3, t0, t1);
1810         tcg_gen_extu_i32_tl(ret, t3);
1811     } else {
1812         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1813         tcg_gen_movi_i32(t3, 0);
1814         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1815         tcg_gen_divu_i32(t3, t0, t1);
1816         tcg_gen_extu_i32_tl(ret, t3);
1817     }
1818     if (compute_ov) {
1819         tcg_gen_extu_i32_tl(cpu_ov, t2);
1820         if (is_isa300(ctx)) {
1821             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1822         }
1823         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1824     }
1825 
1826     if (unlikely(compute_rc0)) {
1827         gen_set_Rc0(ctx, ret);
1828     }
1829 }
1830 
1831 #if defined(TARGET_PPC64)
1832 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret,
1833                                      TCGv arg1, TCGv arg2, bool sign,
1834                                      bool compute_ov, bool compute_rc0)
1835 {
1836     TCGv_i64 t0 = tcg_temp_new_i64();
1837     TCGv_i64 t1 = tcg_temp_new_i64();
1838     TCGv_i64 t2 = tcg_temp_new_i64();
1839     TCGv_i64 t3 = tcg_temp_new_i64();
1840 
1841     tcg_gen_mov_i64(t0, arg1);
1842     tcg_gen_mov_i64(t1, arg2);
1843     if (sign) {
1844         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1845         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1846         tcg_gen_and_i64(t2, t2, t3);
1847         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1848         tcg_gen_or_i64(t2, t2, t3);
1849         tcg_gen_movi_i64(t3, 0);
1850         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1851         tcg_gen_div_i64(ret, t0, t1);
1852     } else {
1853         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1854         tcg_gen_movi_i64(t3, 0);
1855         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1856         tcg_gen_divu_i64(ret, t0, t1);
1857     }
1858     if (compute_ov) {
1859         tcg_gen_mov_tl(cpu_ov, t2);
1860         if (is_isa300(ctx)) {
1861             tcg_gen_mov_tl(cpu_ov32, t2);
1862         }
1863         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1864     }
1865 
1866     if (unlikely(compute_rc0)) {
1867         gen_set_Rc0(ctx, ret);
1868     }
1869 }
1870 #endif
1871 
1872 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1873                                      TCGv arg2, int sign)
1874 {
1875     TCGv_i32 t0 = tcg_temp_new_i32();
1876     TCGv_i32 t1 = tcg_temp_new_i32();
1877 
1878     tcg_gen_trunc_tl_i32(t0, arg1);
1879     tcg_gen_trunc_tl_i32(t1, arg2);
1880     if (sign) {
1881         TCGv_i32 t2 = tcg_temp_new_i32();
1882         TCGv_i32 t3 = tcg_temp_new_i32();
1883         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1884         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1885         tcg_gen_and_i32(t2, t2, t3);
1886         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1887         tcg_gen_or_i32(t2, t2, t3);
1888         tcg_gen_movi_i32(t3, 0);
1889         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1890         tcg_gen_rem_i32(t3, t0, t1);
1891         tcg_gen_ext_i32_tl(ret, t3);
1892     } else {
1893         TCGv_i32 t2 = tcg_constant_i32(1);
1894         TCGv_i32 t3 = tcg_constant_i32(0);
1895         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1896         tcg_gen_remu_i32(t0, t0, t1);
1897         tcg_gen_extu_i32_tl(ret, t0);
1898     }
1899 }
1900 
1901 #if defined(TARGET_PPC64)
1902 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1903                                      TCGv arg2, int sign)
1904 {
1905     TCGv_i64 t0 = tcg_temp_new_i64();
1906     TCGv_i64 t1 = tcg_temp_new_i64();
1907 
1908     tcg_gen_mov_i64(t0, arg1);
1909     tcg_gen_mov_i64(t1, arg2);
1910     if (sign) {
1911         TCGv_i64 t2 = tcg_temp_new_i64();
1912         TCGv_i64 t3 = tcg_temp_new_i64();
1913         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1914         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1915         tcg_gen_and_i64(t2, t2, t3);
1916         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1917         tcg_gen_or_i64(t2, t2, t3);
1918         tcg_gen_movi_i64(t3, 0);
1919         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1920         tcg_gen_rem_i64(ret, t0, t1);
1921     } else {
1922         TCGv_i64 t2 = tcg_constant_i64(1);
1923         TCGv_i64 t3 = tcg_constant_i64(0);
1924         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1925         tcg_gen_remu_i64(ret, t0, t1);
1926     }
1927 }
1928 #endif
1929 
1930 /* Common subf function */
1931 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1932                                      TCGv arg2, bool add_ca, bool compute_ca,
1933                                      bool compute_ov, bool compute_rc0)
1934 {
1935     TCGv t0 = ret;
1936 
1937     if (compute_ca || compute_ov) {
1938         t0 = tcg_temp_new();
1939     }
1940 
1941     if (compute_ca) {
1942         /* dest = ~arg1 + arg2 [+ ca].  */
1943         if (NARROW_MODE(ctx)) {
1944             /*
1945              * Caution: a non-obvious corner case of the spec is that
1946              * we must produce the *entire* 64-bit addition, but
1947              * produce the carry into bit 32.
1948              */
1949             TCGv inv1 = tcg_temp_new();
1950             TCGv t1 = tcg_temp_new();
1951             tcg_gen_not_tl(inv1, arg1);
1952             if (add_ca) {
1953                 tcg_gen_add_tl(t0, arg2, cpu_ca);
1954             } else {
1955                 tcg_gen_addi_tl(t0, arg2, 1);
1956             }
1957             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
1958             tcg_gen_add_tl(t0, t0, inv1);
1959             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
1960             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
1961             if (is_isa300(ctx)) {
1962                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
1963             }
1964         } else if (add_ca) {
1965             TCGv inv1 = tcg_temp_new();
1966             tcg_gen_not_tl(inv1, arg1);
1967             tcg_gen_addcio_tl(t0, cpu_ca, arg2, inv1, cpu_ca);
1968             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
1969         } else {
1970             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
1971             tcg_gen_sub_tl(t0, arg2, arg1);
1972             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
1973         }
1974     } else if (add_ca) {
1975         /*
1976          * Since we're ignoring carry-out, we can simplify the
1977          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
1978          */
1979         tcg_gen_sub_tl(t0, arg2, arg1);
1980         tcg_gen_add_tl(t0, t0, cpu_ca);
1981         tcg_gen_subi_tl(t0, t0, 1);
1982     } else {
1983         tcg_gen_sub_tl(t0, arg2, arg1);
1984     }
1985 
1986     if (compute_ov) {
1987         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1988     }
1989     if (unlikely(compute_rc0)) {
1990         gen_set_Rc0(ctx, t0);
1991     }
1992 
1993     if (t0 != ret) {
1994         tcg_gen_mov_tl(ret, t0);
1995     }
1996 }
1997 
1998 /***                            Integer logical                            ***/
1999 
2000 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2001 static void gen_pause(DisasContext *ctx)
2002 {
2003     TCGv_i32 t0 = tcg_constant_i32(0);
2004     tcg_gen_st_i32(t0, tcg_env,
2005                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2006 
2007     /* Stop translation, this gives other CPUs a chance to run */
2008     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2009 }
2010 #endif /* defined(TARGET_PPC64) */
2011 
2012 /***                             Integer rotate                            ***/
2013 
2014 /* rlwimi & rlwimi. */
2015 static void gen_rlwimi(DisasContext *ctx)
2016 {
2017     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2018     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2019     uint32_t sh = SH(ctx->opcode);
2020     uint32_t mb = MB(ctx->opcode);
2021     uint32_t me = ME(ctx->opcode);
2022 
2023     if (sh == (31 - me) && mb <= me) {
2024         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2025     } else {
2026         target_ulong mask;
2027         bool mask_in_32b = true;
2028         TCGv t1;
2029 
2030 #if defined(TARGET_PPC64)
2031         mb += 32;
2032         me += 32;
2033 #endif
2034         mask = MASK(mb, me);
2035 
2036 #if defined(TARGET_PPC64)
2037         if (mask > 0xffffffffu) {
2038             mask_in_32b = false;
2039         }
2040 #endif
2041         t1 = tcg_temp_new();
2042         if (mask_in_32b) {
2043             TCGv_i32 t0 = tcg_temp_new_i32();
2044             tcg_gen_trunc_tl_i32(t0, t_rs);
2045             tcg_gen_rotli_i32(t0, t0, sh);
2046             tcg_gen_extu_i32_tl(t1, t0);
2047         } else {
2048 #if defined(TARGET_PPC64)
2049             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2050             tcg_gen_rotli_i64(t1, t1, sh);
2051 #else
2052             g_assert_not_reached();
2053 #endif
2054         }
2055 
2056         tcg_gen_andi_tl(t1, t1, mask);
2057         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2058         tcg_gen_or_tl(t_ra, t_ra, t1);
2059     }
2060     if (unlikely(Rc(ctx->opcode) != 0)) {
2061         gen_set_Rc0(ctx, t_ra);
2062     }
2063 }
2064 
2065 /* rlwinm & rlwinm. */
2066 static void gen_rlwinm(DisasContext *ctx)
2067 {
2068     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2069     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2070     int sh = SH(ctx->opcode);
2071     int mb = MB(ctx->opcode);
2072     int me = ME(ctx->opcode);
2073     int len = me - mb + 1;
2074     int rsh = (32 - sh) & 31;
2075 
2076     if (sh != 0 && len > 0 && me == (31 - sh)) {
2077         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2078     } else if (me == 31 && rsh + len <= 32) {
2079         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2080     } else {
2081         target_ulong mask;
2082         bool mask_in_32b = true;
2083 #if defined(TARGET_PPC64)
2084         mb += 32;
2085         me += 32;
2086 #endif
2087         mask = MASK(mb, me);
2088 #if defined(TARGET_PPC64)
2089         if (mask > 0xffffffffu) {
2090             mask_in_32b = false;
2091         }
2092 #endif
2093         if (mask_in_32b) {
2094             if (sh == 0) {
2095                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2096             } else {
2097                 TCGv_i32 t0 = tcg_temp_new_i32();
2098                 tcg_gen_trunc_tl_i32(t0, t_rs);
2099                 tcg_gen_rotli_i32(t0, t0, sh);
2100                 tcg_gen_andi_i32(t0, t0, mask);
2101                 tcg_gen_extu_i32_tl(t_ra, t0);
2102             }
2103         } else {
2104 #if defined(TARGET_PPC64)
2105             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2106             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2107             tcg_gen_andi_i64(t_ra, t_ra, mask);
2108 #else
2109             g_assert_not_reached();
2110 #endif
2111         }
2112     }
2113     if (unlikely(Rc(ctx->opcode) != 0)) {
2114         gen_set_Rc0(ctx, t_ra);
2115     }
2116 }
2117 
2118 /* rlwnm & rlwnm. */
2119 static void gen_rlwnm(DisasContext *ctx)
2120 {
2121     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2122     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2123     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2124     uint32_t mb = MB(ctx->opcode);
2125     uint32_t me = ME(ctx->opcode);
2126     target_ulong mask;
2127     bool mask_in_32b = true;
2128 
2129 #if defined(TARGET_PPC64)
2130     mb += 32;
2131     me += 32;
2132 #endif
2133     mask = MASK(mb, me);
2134 
2135 #if defined(TARGET_PPC64)
2136     if (mask > 0xffffffffu) {
2137         mask_in_32b = false;
2138     }
2139 #endif
2140     if (mask_in_32b) {
2141         TCGv_i32 t0 = tcg_temp_new_i32();
2142         TCGv_i32 t1 = tcg_temp_new_i32();
2143         tcg_gen_trunc_tl_i32(t0, t_rb);
2144         tcg_gen_trunc_tl_i32(t1, t_rs);
2145         tcg_gen_andi_i32(t0, t0, 0x1f);
2146         tcg_gen_rotl_i32(t1, t1, t0);
2147         tcg_gen_extu_i32_tl(t_ra, t1);
2148     } else {
2149 #if defined(TARGET_PPC64)
2150         TCGv_i64 t0 = tcg_temp_new_i64();
2151         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2152         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2153         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2154 #else
2155         g_assert_not_reached();
2156 #endif
2157     }
2158 
2159     tcg_gen_andi_tl(t_ra, t_ra, mask);
2160 
2161     if (unlikely(Rc(ctx->opcode) != 0)) {
2162         gen_set_Rc0(ctx, t_ra);
2163     }
2164 }
2165 
2166 #if defined(TARGET_PPC64)
2167 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2168 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2169 {                                                                             \
2170     gen_##name(ctx, 0);                                                       \
2171 }                                                                             \
2172                                                                               \
2173 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2174 {                                                                             \
2175     gen_##name(ctx, 1);                                                       \
2176 }
2177 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2178 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2179 {                                                                             \
2180     gen_##name(ctx, 0, 0);                                                    \
2181 }                                                                             \
2182                                                                               \
2183 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2184 {                                                                             \
2185     gen_##name(ctx, 0, 1);                                                    \
2186 }                                                                             \
2187                                                                               \
2188 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2189 {                                                                             \
2190     gen_##name(ctx, 1, 0);                                                    \
2191 }                                                                             \
2192                                                                               \
2193 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2194 {                                                                             \
2195     gen_##name(ctx, 1, 1);                                                    \
2196 }
2197 
2198 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2199 {
2200     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2201     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2202     int len = me - mb + 1;
2203     int rsh = (64 - sh) & 63;
2204 
2205     if (sh != 0 && len > 0 && me == (63 - sh)) {
2206         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2207     } else if (me == 63 && rsh + len <= 64) {
2208         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2209     } else {
2210         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2211         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2212     }
2213     if (unlikely(Rc(ctx->opcode) != 0)) {
2214         gen_set_Rc0(ctx, t_ra);
2215     }
2216 }
2217 
2218 /* rldicl - rldicl. */
2219 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2220 {
2221     uint32_t sh, mb;
2222 
2223     sh = SH(ctx->opcode) | (shn << 5);
2224     mb = MB(ctx->opcode) | (mbn << 5);
2225     gen_rldinm(ctx, mb, 63, sh);
2226 }
2227 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2228 
2229 /* rldicr - rldicr. */
2230 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2231 {
2232     uint32_t sh, me;
2233 
2234     sh = SH(ctx->opcode) | (shn << 5);
2235     me = MB(ctx->opcode) | (men << 5);
2236     gen_rldinm(ctx, 0, me, sh);
2237 }
2238 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2239 
2240 /* rldic - rldic. */
2241 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2242 {
2243     uint32_t sh, mb;
2244 
2245     sh = SH(ctx->opcode) | (shn << 5);
2246     mb = MB(ctx->opcode) | (mbn << 5);
2247     gen_rldinm(ctx, mb, 63 - sh, sh);
2248 }
2249 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2250 
2251 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2252 {
2253     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2254     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2255     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2256     TCGv t0;
2257 
2258     t0 = tcg_temp_new();
2259     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2260     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2261 
2262     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2263     if (unlikely(Rc(ctx->opcode) != 0)) {
2264         gen_set_Rc0(ctx, t_ra);
2265     }
2266 }
2267 
2268 /* rldcl - rldcl. */
2269 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2270 {
2271     uint32_t mb;
2272 
2273     mb = MB(ctx->opcode) | (mbn << 5);
2274     gen_rldnm(ctx, mb, 63);
2275 }
2276 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2277 
2278 /* rldcr - rldcr. */
2279 static inline void gen_rldcr(DisasContext *ctx, int men)
2280 {
2281     uint32_t me;
2282 
2283     me = MB(ctx->opcode) | (men << 5);
2284     gen_rldnm(ctx, 0, me);
2285 }
2286 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2287 
2288 /* rldimi - rldimi. */
2289 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2290 {
2291     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2292     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2293     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2294     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2295     uint32_t me = 63 - sh;
2296 
2297     if (mb <= me) {
2298         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2299     } else {
2300         target_ulong mask = MASK(mb, me);
2301         TCGv t1 = tcg_temp_new();
2302 
2303         tcg_gen_rotli_tl(t1, t_rs, sh);
2304         tcg_gen_andi_tl(t1, t1, mask);
2305         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2306         tcg_gen_or_tl(t_ra, t_ra, t1);
2307     }
2308     if (unlikely(Rc(ctx->opcode) != 0)) {
2309         gen_set_Rc0(ctx, t_ra);
2310     }
2311 }
2312 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2313 #endif
2314 
2315 /***                             Integer shift                             ***/
2316 
2317 /* slw & slw. */
2318 static void gen_slw(DisasContext *ctx)
2319 {
2320     TCGv t0, t1;
2321 
2322     t0 = tcg_temp_new();
2323     /* AND rS with a mask that is 0 when rB >= 0x20 */
2324 #if defined(TARGET_PPC64)
2325     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2326     tcg_gen_sari_tl(t0, t0, 0x3f);
2327 #else
2328     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2329     tcg_gen_sari_tl(t0, t0, 0x1f);
2330 #endif
2331     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2332     t1 = tcg_temp_new();
2333     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2334     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2335     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2336     if (unlikely(Rc(ctx->opcode) != 0)) {
2337         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2338     }
2339 }
2340 
2341 /* sraw & sraw. */
2342 static void gen_sraw(DisasContext *ctx)
2343 {
2344     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], tcg_env,
2345                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2346     if (unlikely(Rc(ctx->opcode) != 0)) {
2347         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2348     }
2349 }
2350 
2351 /* srawi & srawi. */
2352 static void gen_srawi(DisasContext *ctx)
2353 {
2354     int sh = SH(ctx->opcode);
2355     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2356     TCGv src = cpu_gpr[rS(ctx->opcode)];
2357     if (sh == 0) {
2358         tcg_gen_ext32s_tl(dst, src);
2359         tcg_gen_movi_tl(cpu_ca, 0);
2360         if (is_isa300(ctx)) {
2361             tcg_gen_movi_tl(cpu_ca32, 0);
2362         }
2363     } else {
2364         TCGv t0;
2365         tcg_gen_ext32s_tl(dst, src);
2366         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2367         t0 = tcg_temp_new();
2368         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2369         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2370         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2371         if (is_isa300(ctx)) {
2372             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2373         }
2374         tcg_gen_sari_tl(dst, dst, sh);
2375     }
2376     if (unlikely(Rc(ctx->opcode) != 0)) {
2377         gen_set_Rc0(ctx, dst);
2378     }
2379 }
2380 
2381 /* srw & srw. */
2382 static void gen_srw(DisasContext *ctx)
2383 {
2384     TCGv t0, t1;
2385 
2386     t0 = tcg_temp_new();
2387     /* AND rS with a mask that is 0 when rB >= 0x20 */
2388 #if defined(TARGET_PPC64)
2389     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2390     tcg_gen_sari_tl(t0, t0, 0x3f);
2391 #else
2392     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2393     tcg_gen_sari_tl(t0, t0, 0x1f);
2394 #endif
2395     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2396     tcg_gen_ext32u_tl(t0, t0);
2397     t1 = tcg_temp_new();
2398     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2399     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2400     if (unlikely(Rc(ctx->opcode) != 0)) {
2401         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2402     }
2403 }
2404 
2405 #if defined(TARGET_PPC64)
2406 /* sld & sld. */
2407 static void gen_sld(DisasContext *ctx)
2408 {
2409     TCGv t0, t1;
2410 
2411     t0 = tcg_temp_new();
2412     /* AND rS with a mask that is 0 when rB >= 0x40 */
2413     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2414     tcg_gen_sari_tl(t0, t0, 0x3f);
2415     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2416     t1 = tcg_temp_new();
2417     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2418     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2419     if (unlikely(Rc(ctx->opcode) != 0)) {
2420         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2421     }
2422 }
2423 
2424 /* srad & srad. */
2425 static void gen_srad(DisasContext *ctx)
2426 {
2427     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], tcg_env,
2428                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2429     if (unlikely(Rc(ctx->opcode) != 0)) {
2430         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2431     }
2432 }
2433 /* sradi & sradi. */
2434 static inline void gen_sradi(DisasContext *ctx, int n)
2435 {
2436     int sh = SH(ctx->opcode) + (n << 5);
2437     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2438     TCGv src = cpu_gpr[rS(ctx->opcode)];
2439     if (sh == 0) {
2440         tcg_gen_mov_tl(dst, src);
2441         tcg_gen_movi_tl(cpu_ca, 0);
2442         if (is_isa300(ctx)) {
2443             tcg_gen_movi_tl(cpu_ca32, 0);
2444         }
2445     } else {
2446         TCGv t0;
2447         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
2448         t0 = tcg_temp_new();
2449         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
2450         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2451         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2452         if (is_isa300(ctx)) {
2453             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2454         }
2455         tcg_gen_sari_tl(dst, src, sh);
2456     }
2457     if (unlikely(Rc(ctx->opcode) != 0)) {
2458         gen_set_Rc0(ctx, dst);
2459     }
2460 }
2461 
2462 static void gen_sradi0(DisasContext *ctx)
2463 {
2464     gen_sradi(ctx, 0);
2465 }
2466 
2467 static void gen_sradi1(DisasContext *ctx)
2468 {
2469     gen_sradi(ctx, 1);
2470 }
2471 
2472 /* extswsli & extswsli. */
2473 static inline void gen_extswsli(DisasContext *ctx, int n)
2474 {
2475     int sh = SH(ctx->opcode) + (n << 5);
2476     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2477     TCGv src = cpu_gpr[rS(ctx->opcode)];
2478 
2479     tcg_gen_ext32s_tl(dst, src);
2480     tcg_gen_shli_tl(dst, dst, sh);
2481     if (unlikely(Rc(ctx->opcode) != 0)) {
2482         gen_set_Rc0(ctx, dst);
2483     }
2484 }
2485 
2486 static void gen_extswsli0(DisasContext *ctx)
2487 {
2488     gen_extswsli(ctx, 0);
2489 }
2490 
2491 static void gen_extswsli1(DisasContext *ctx)
2492 {
2493     gen_extswsli(ctx, 1);
2494 }
2495 
2496 /* srd & srd. */
2497 static void gen_srd(DisasContext *ctx)
2498 {
2499     TCGv t0, t1;
2500 
2501     t0 = tcg_temp_new();
2502     /* AND rS with a mask that is 0 when rB >= 0x40 */
2503     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2504     tcg_gen_sari_tl(t0, t0, 0x3f);
2505     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2506     t1 = tcg_temp_new();
2507     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2508     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2509     if (unlikely(Rc(ctx->opcode) != 0)) {
2510         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2511     }
2512 }
2513 #endif
2514 
2515 /***                           Addressing modes                            ***/
2516 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2517 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2518                                       target_long maskl)
2519 {
2520     target_long simm = SIMM(ctx->opcode);
2521 
2522     simm &= ~maskl;
2523     if (rA(ctx->opcode) == 0) {
2524         if (NARROW_MODE(ctx)) {
2525             simm = (uint32_t)simm;
2526         }
2527         tcg_gen_movi_tl(EA, simm);
2528     } else if (likely(simm != 0)) {
2529         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2530         if (NARROW_MODE(ctx)) {
2531             tcg_gen_ext32u_tl(EA, EA);
2532         }
2533     } else {
2534         if (NARROW_MODE(ctx)) {
2535             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2536         } else {
2537             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2538         }
2539     }
2540 }
2541 
2542 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2543 {
2544     if (rA(ctx->opcode) == 0) {
2545         if (NARROW_MODE(ctx)) {
2546             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2547         } else {
2548             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2549         }
2550     } else {
2551         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2552         if (NARROW_MODE(ctx)) {
2553             tcg_gen_ext32u_tl(EA, EA);
2554         }
2555     }
2556 }
2557 
2558 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2559 {
2560     if (rA(ctx->opcode) == 0) {
2561         tcg_gen_movi_tl(EA, 0);
2562     } else if (NARROW_MODE(ctx)) {
2563         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2564     } else {
2565         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2566     }
2567 }
2568 
2569 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2570                                 target_long val)
2571 {
2572     tcg_gen_addi_tl(ret, arg1, val);
2573     if (NARROW_MODE(ctx)) {
2574         tcg_gen_ext32u_tl(ret, ret);
2575     }
2576 }
2577 
2578 static inline void gen_align_no_le(DisasContext *ctx)
2579 {
2580     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
2581                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
2582 }
2583 
2584 /* EA <- {(ra == 0) ? 0 : GPR[ra]} + displ */
2585 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
2586 {
2587     TCGv ea = tcg_temp_new();
2588     if (ra) {
2589         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
2590     } else {
2591         tcg_gen_mov_tl(ea, displ);
2592     }
2593     if (NARROW_MODE(ctx)) {
2594         tcg_gen_ext32u_tl(ea, ea);
2595     }
2596     return ea;
2597 }
2598 
2599 #if defined(TARGET_PPC64)
2600 /* EA <- (ra == 0) ? 0 : GPR[ra] */
2601 static TCGv do_ea_calc_ra(DisasContext *ctx, int ra)
2602 {
2603     TCGv EA = tcg_temp_new();
2604     if (!ra) {
2605         tcg_gen_movi_tl(EA, 0);
2606     } else if (NARROW_MODE(ctx)) {
2607         tcg_gen_ext32u_tl(EA, cpu_gpr[ra]);
2608     } else {
2609         tcg_gen_mov_tl(EA, cpu_gpr[ra]);
2610     }
2611     return EA;
2612 }
2613 #endif
2614 
2615 /***                             Integer load                              ***/
2616 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
2617 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
2618 
2619 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
2620 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
2621                                   TCGv val,                             \
2622                                   TCGv addr)                            \
2623 {                                                                       \
2624     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
2625 }
2626 
2627 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
2628 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
2629 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
2630 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
2631 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
2632 
2633 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
2634 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
2635 
2636 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
2637 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
2638                                              TCGv_i64 val,          \
2639                                              TCGv addr)             \
2640 {                                                                   \
2641     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
2642 }
2643 
2644 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
2645 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
2646 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
2647 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
2648 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
2649 
2650 #if defined(TARGET_PPC64)
2651 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
2652 #endif
2653 
2654 #define GEN_QEMU_STORE_TL(stop, op)                                     \
2655 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
2656                                   TCGv val,                             \
2657                                   TCGv addr)                            \
2658 {                                                                       \
2659     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
2660 }
2661 
2662 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
2663 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
2664 #endif
2665 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
2666 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
2667 
2668 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
2669 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
2670 
2671 #define GEN_QEMU_STORE_64(stop, op)                               \
2672 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
2673                                               TCGv_i64 val,       \
2674                                               TCGv addr)          \
2675 {                                                                 \
2676     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
2677 }
2678 
2679 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
2680 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
2681 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
2682 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
2683 
2684 #if defined(TARGET_PPC64)
2685 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
2686 #endif
2687 
2688 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
2689 static void glue(gen_, name##x)(DisasContext *ctx)                            \
2690 {                                                                             \
2691     TCGv EA;                                                                  \
2692     chk(ctx);                                                                 \
2693     gen_set_access_type(ctx, ACCESS_INT);                                     \
2694     EA = tcg_temp_new();                                                      \
2695     gen_addr_reg_index(ctx, EA);                                              \
2696     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
2697 }
2698 
2699 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
2700     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2701 
2702 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
2703     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2704 
2705 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
2706 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
2707 {                                                                             \
2708     TCGv EA;                                                                  \
2709     CHK_SV(ctx);                                                              \
2710     gen_set_access_type(ctx, ACCESS_INT);                                     \
2711     EA = tcg_temp_new();                                                      \
2712     gen_addr_reg_index(ctx, EA);                                              \
2713     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
2714 }
2715 
2716 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
2717 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
2718 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
2719 #if defined(TARGET_PPC64)
2720 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
2721 #endif
2722 
2723 #if defined(TARGET_PPC64)
2724 /* CI load/store variants */
2725 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
2726 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
2727 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
2728 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
2729 #endif
2730 
2731 /***                              Integer store                            ***/
2732 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
2733 static void glue(gen_, name##x)(DisasContext *ctx)                            \
2734 {                                                                             \
2735     TCGv EA;                                                                  \
2736     chk(ctx);                                                                 \
2737     gen_set_access_type(ctx, ACCESS_INT);                                     \
2738     EA = tcg_temp_new();                                                      \
2739     gen_addr_reg_index(ctx, EA);                                              \
2740     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
2741 }
2742 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
2743     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2744 
2745 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
2746     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2747 
2748 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
2749 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
2750 {                                                                             \
2751     TCGv EA;                                                                  \
2752     CHK_SV(ctx);                                                              \
2753     gen_set_access_type(ctx, ACCESS_INT);                                     \
2754     EA = tcg_temp_new();                                                      \
2755     gen_addr_reg_index(ctx, EA);                                              \
2756     tcg_gen_qemu_st_tl(                                                       \
2757         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
2758 }
2759 
2760 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
2761 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
2762 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
2763 #if defined(TARGET_PPC64)
2764 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
2765 #endif
2766 
2767 #if defined(TARGET_PPC64)
2768 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
2769 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
2770 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
2771 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
2772 #endif
2773 /***                Integer load and store with byte reverse               ***/
2774 
2775 /* lhbrx */
2776 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2777 
2778 /* lwbrx */
2779 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2780 
2781 #if defined(TARGET_PPC64)
2782 /* ldbrx */
2783 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
2784 /* stdbrx */
2785 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
2786 #endif  /* TARGET_PPC64 */
2787 
2788 /* sthbrx */
2789 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2790 /* stwbrx */
2791 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2792 
2793 /***                    Integer load and store multiple                    ***/
2794 
2795 /* lmw */
2796 static void gen_lmw(DisasContext *ctx)
2797 {
2798     TCGv t0;
2799     TCGv_i32 t1;
2800 
2801     if (ctx->le_mode) {
2802         gen_align_no_le(ctx);
2803         return;
2804     }
2805     gen_set_access_type(ctx, ACCESS_INT);
2806     t0 = tcg_temp_new();
2807     t1 = tcg_constant_i32(rD(ctx->opcode));
2808     gen_addr_imm_index(ctx, t0, 0);
2809     gen_helper_lmw(tcg_env, t0, t1);
2810 }
2811 
2812 /* stmw */
2813 static void gen_stmw(DisasContext *ctx)
2814 {
2815     TCGv t0;
2816     TCGv_i32 t1;
2817 
2818     if (ctx->le_mode) {
2819         gen_align_no_le(ctx);
2820         return;
2821     }
2822     gen_set_access_type(ctx, ACCESS_INT);
2823     t0 = tcg_temp_new();
2824     t1 = tcg_constant_i32(rS(ctx->opcode));
2825     gen_addr_imm_index(ctx, t0, 0);
2826     gen_helper_stmw(tcg_env, t0, t1);
2827 }
2828 
2829 /***                    Integer load and store strings                     ***/
2830 
2831 /* lswi */
2832 /*
2833  * PowerPC32 specification says we must generate an exception if rA is
2834  * in the range of registers to be loaded.  In an other hand, IBM says
2835  * this is valid, but rA won't be loaded.  For now, I'll follow the
2836  * spec...
2837  */
2838 static void gen_lswi(DisasContext *ctx)
2839 {
2840     TCGv t0;
2841     TCGv_i32 t1, t2;
2842     int nb = NB(ctx->opcode);
2843     int start = rD(ctx->opcode);
2844     int ra = rA(ctx->opcode);
2845     int nr;
2846 
2847     if (ctx->le_mode) {
2848         gen_align_no_le(ctx);
2849         return;
2850     }
2851     if (nb == 0) {
2852         nb = 32;
2853     }
2854     nr = DIV_ROUND_UP(nb, 4);
2855     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
2856         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2857         return;
2858     }
2859     gen_set_access_type(ctx, ACCESS_INT);
2860     t0 = tcg_temp_new();
2861     gen_addr_register(ctx, t0);
2862     t1 = tcg_constant_i32(nb);
2863     t2 = tcg_constant_i32(start);
2864     gen_helper_lsw(tcg_env, t0, t1, t2);
2865 }
2866 
2867 /* lswx */
2868 static void gen_lswx(DisasContext *ctx)
2869 {
2870     TCGv t0;
2871     TCGv_i32 t1, t2, t3;
2872 
2873     if (ctx->le_mode) {
2874         gen_align_no_le(ctx);
2875         return;
2876     }
2877     gen_set_access_type(ctx, ACCESS_INT);
2878     t0 = tcg_temp_new();
2879     gen_addr_reg_index(ctx, t0);
2880     t1 = tcg_constant_i32(rD(ctx->opcode));
2881     t2 = tcg_constant_i32(rA(ctx->opcode));
2882     t3 = tcg_constant_i32(rB(ctx->opcode));
2883     gen_helper_lswx(tcg_env, t0, t1, t2, t3);
2884 }
2885 
2886 /* stswi */
2887 static void gen_stswi(DisasContext *ctx)
2888 {
2889     TCGv t0;
2890     TCGv_i32 t1, t2;
2891     int nb = NB(ctx->opcode);
2892 
2893     if (ctx->le_mode) {
2894         gen_align_no_le(ctx);
2895         return;
2896     }
2897     gen_set_access_type(ctx, ACCESS_INT);
2898     t0 = tcg_temp_new();
2899     gen_addr_register(ctx, t0);
2900     if (nb == 0) {
2901         nb = 32;
2902     }
2903     t1 = tcg_constant_i32(nb);
2904     t2 = tcg_constant_i32(rS(ctx->opcode));
2905     gen_helper_stsw(tcg_env, t0, t1, t2);
2906 }
2907 
2908 /* stswx */
2909 static void gen_stswx(DisasContext *ctx)
2910 {
2911     TCGv t0;
2912     TCGv_i32 t1, t2;
2913 
2914     if (ctx->le_mode) {
2915         gen_align_no_le(ctx);
2916         return;
2917     }
2918     gen_set_access_type(ctx, ACCESS_INT);
2919     t0 = tcg_temp_new();
2920     gen_addr_reg_index(ctx, t0);
2921     t1 = tcg_temp_new_i32();
2922     tcg_gen_trunc_tl_i32(t1, cpu_xer);
2923     tcg_gen_andi_i32(t1, t1, 0x7F);
2924     t2 = tcg_constant_i32(rS(ctx->opcode));
2925     gen_helper_stsw(tcg_env, t0, t1, t2);
2926 }
2927 
2928 #if !defined(CONFIG_USER_ONLY)
2929 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
2930 {
2931     TCGv_i32 t;
2932     TCGLabel *l;
2933 
2934     if (!ctx->lazy_tlb_flush) {
2935         return;
2936     }
2937     l = gen_new_label();
2938     t = tcg_temp_new_i32();
2939     tcg_gen_ld_i32(t, tcg_env, offsetof(CPUPPCState, tlb_need_flush));
2940     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
2941     if (global) {
2942         gen_helper_check_tlb_flush_global(tcg_env);
2943     } else {
2944         gen_helper_check_tlb_flush_local(tcg_env);
2945     }
2946     gen_set_label(l);
2947     if (global) {
2948         /*
2949          * Global TLB flush uses async-work which must run before the
2950          * next instruction, so this must be the last in the TB.
2951          */
2952         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
2953     }
2954 }
2955 #else
2956 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
2957 #endif
2958 
2959 /* isync */
2960 static void gen_isync(DisasContext *ctx)
2961 {
2962     /*
2963      * We need to check for a pending TLB flush. This can only happen in
2964      * kernel mode however so check MSR_PR
2965      */
2966     if (!ctx->pr) {
2967         gen_check_tlb_flush(ctx, false);
2968     }
2969     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2970     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
2971 }
2972 
2973 static void gen_load_locked(DisasContext *ctx, MemOp memop)
2974 {
2975     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
2976     TCGv t0 = tcg_temp_new();
2977 
2978     gen_set_access_type(ctx, ACCESS_RES);
2979     gen_addr_reg_index(ctx, t0);
2980     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, DEF_MEMOP(memop) | MO_ALIGN);
2981     tcg_gen_mov_tl(cpu_reserve, t0);
2982     tcg_gen_movi_tl(cpu_reserve_length, memop_size(memop));
2983     tcg_gen_mov_tl(cpu_reserve_val, gpr);
2984 }
2985 
2986 #define LARX(name, memop)                  \
2987 static void gen_##name(DisasContext *ctx)  \
2988 {                                          \
2989     gen_load_locked(ctx, memop);           \
2990 }
2991 
2992 /* lwarx */
2993 LARX(lbarx, MO_UB)
2994 LARX(lharx, MO_UW)
2995 LARX(lwarx, MO_UL)
2996 
2997 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
2998                                       TCGv EA, TCGCond cond, int addend)
2999 {
3000     TCGv t = tcg_temp_new();
3001     TCGv t2 = tcg_temp_new();
3002     TCGv u = tcg_temp_new();
3003 
3004     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3005     tcg_gen_addi_tl(t2, EA, memop_size(memop));
3006     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3007     tcg_gen_addi_tl(u, t, addend);
3008 
3009     /* E.g. for fetch and increment bounded... */
3010     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3011     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3012     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3013 
3014     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3015     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t,
3016                        tcg_constant_tl(1 << (memop_size(memop) * 8 - 1)));
3017 }
3018 
3019 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3020 {
3021     uint32_t gpr_FC = FC(ctx->opcode);
3022     TCGv EA = tcg_temp_new();
3023     int rt = rD(ctx->opcode);
3024     bool need_serial;
3025     TCGv src, dst;
3026 
3027     gen_addr_register(ctx, EA);
3028     dst = cpu_gpr[rt];
3029     src = cpu_gpr[(rt + 1) & 31];
3030 
3031     need_serial = false;
3032     memop |= MO_ALIGN;
3033     switch (gpr_FC) {
3034     case 0: /* Fetch and add */
3035         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3036         break;
3037     case 1: /* Fetch and xor */
3038         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3039         break;
3040     case 2: /* Fetch and or */
3041         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3042         break;
3043     case 3: /* Fetch and 'and' */
3044         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3045         break;
3046     case 4:  /* Fetch and max unsigned */
3047         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3048         break;
3049     case 5:  /* Fetch and max signed */
3050         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3051         break;
3052     case 6:  /* Fetch and min unsigned */
3053         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3054         break;
3055     case 7:  /* Fetch and min signed */
3056         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3057         break;
3058     case 8: /* Swap */
3059         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3060         break;
3061 
3062     case 16: /* Compare and swap not equal */
3063         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3064             need_serial = true;
3065         } else {
3066             TCGv t0 = tcg_temp_new();
3067             TCGv t1 = tcg_temp_new();
3068 
3069             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3070             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3071                 tcg_gen_mov_tl(t1, src);
3072             } else {
3073                 tcg_gen_ext32u_tl(t1, src);
3074             }
3075             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3076                                cpu_gpr[(rt + 2) & 31], t0);
3077             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3078             tcg_gen_mov_tl(dst, t0);
3079         }
3080         break;
3081 
3082     case 24: /* Fetch and increment bounded */
3083         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3084             need_serial = true;
3085         } else {
3086             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3087         }
3088         break;
3089     case 25: /* Fetch and increment equal */
3090         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3091             need_serial = true;
3092         } else {
3093             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3094         }
3095         break;
3096     case 28: /* Fetch and decrement bounded */
3097         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3098             need_serial = true;
3099         } else {
3100             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3101         }
3102         break;
3103 
3104     default:
3105         /* invoke data storage error handler */
3106         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3107     }
3108 
3109     if (need_serial) {
3110         /* Restart with exclusive lock.  */
3111         gen_helper_exit_atomic(tcg_env);
3112         ctx->base.is_jmp = DISAS_NORETURN;
3113     }
3114 }
3115 
3116 static void gen_lwat(DisasContext *ctx)
3117 {
3118     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3119 }
3120 
3121 #ifdef TARGET_PPC64
3122 static void gen_ldat(DisasContext *ctx)
3123 {
3124     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3125 }
3126 #endif
3127 
3128 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3129 {
3130     uint32_t gpr_FC = FC(ctx->opcode);
3131     TCGv EA = tcg_temp_new();
3132     TCGv src, discard;
3133 
3134     gen_addr_register(ctx, EA);
3135     src = cpu_gpr[rD(ctx->opcode)];
3136     discard = tcg_temp_new();
3137 
3138     memop |= MO_ALIGN;
3139     switch (gpr_FC) {
3140     case 0: /* add and Store */
3141         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3142         break;
3143     case 1: /* xor and Store */
3144         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3145         break;
3146     case 2: /* Or and Store */
3147         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3148         break;
3149     case 3: /* 'and' and Store */
3150         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3151         break;
3152     case 4:  /* Store max unsigned */
3153         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3154         break;
3155     case 5:  /* Store max signed */
3156         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3157         break;
3158     case 6:  /* Store min unsigned */
3159         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3160         break;
3161     case 7:  /* Store min signed */
3162         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3163         break;
3164     case 24: /* Store twin  */
3165         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3166             /* Restart with exclusive lock.  */
3167             gen_helper_exit_atomic(tcg_env);
3168             ctx->base.is_jmp = DISAS_NORETURN;
3169         } else {
3170             TCGv t = tcg_temp_new();
3171             TCGv t2 = tcg_temp_new();
3172             TCGv s = tcg_temp_new();
3173             TCGv s2 = tcg_temp_new();
3174             TCGv ea_plus_s = tcg_temp_new();
3175 
3176             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3177             tcg_gen_addi_tl(ea_plus_s, EA, memop_size(memop));
3178             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3179             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3180             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3181             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3182             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3183         }
3184         break;
3185     default:
3186         /* invoke data storage error handler */
3187         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3188     }
3189 }
3190 
3191 static void gen_stwat(DisasContext *ctx)
3192 {
3193     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3194 }
3195 
3196 #ifdef TARGET_PPC64
3197 static void gen_stdat(DisasContext *ctx)
3198 {
3199     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3200 }
3201 #endif
3202 
3203 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3204 {
3205     TCGLabel *lfail;
3206     TCGv EA;
3207     TCGv cr0;
3208     TCGv t0;
3209     int rs = rS(ctx->opcode);
3210 
3211     lfail = gen_new_label();
3212     EA = tcg_temp_new();
3213     cr0 = tcg_temp_new();
3214     t0 = tcg_temp_new();
3215 
3216     tcg_gen_mov_tl(cr0, cpu_so);
3217     gen_set_access_type(ctx, ACCESS_RES);
3218     gen_addr_reg_index(ctx, EA);
3219     tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lfail);
3220     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_reserve_length, memop_size(memop), lfail);
3221 
3222     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3223                               cpu_gpr[rs], ctx->mem_idx,
3224                               DEF_MEMOP(memop) | MO_ALIGN);
3225     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3226     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3227     tcg_gen_or_tl(cr0, cr0, t0);
3228 
3229     gen_set_label(lfail);
3230     tcg_gen_trunc_tl_i32(cpu_crf[0], cr0);
3231     tcg_gen_movi_tl(cpu_reserve, -1);
3232 }
3233 
3234 #define STCX(name, memop)                  \
3235 static void gen_##name(DisasContext *ctx)  \
3236 {                                          \
3237     gen_conditional_store(ctx, memop);     \
3238 }
3239 
3240 STCX(stbcx_, MO_UB)
3241 STCX(sthcx_, MO_UW)
3242 STCX(stwcx_, MO_UL)
3243 
3244 #if defined(TARGET_PPC64)
3245 /* ldarx */
3246 LARX(ldarx, MO_UQ)
3247 /* stdcx. */
3248 STCX(stdcx_, MO_UQ)
3249 
3250 /* lqarx */
3251 static void gen_lqarx(DisasContext *ctx)
3252 {
3253     int rd = rD(ctx->opcode);
3254     TCGv EA, hi, lo;
3255     TCGv_i128 t16;
3256 
3257     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3258                  (rd == rB(ctx->opcode)))) {
3259         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3260         return;
3261     }
3262 
3263     gen_set_access_type(ctx, ACCESS_RES);
3264     EA = tcg_temp_new();
3265     gen_addr_reg_index(ctx, EA);
3266 
3267     /* Note that the low part is always in RD+1, even in LE mode.  */
3268     lo = cpu_gpr[rd + 1];
3269     hi = cpu_gpr[rd];
3270 
3271     t16 = tcg_temp_new_i128();
3272     tcg_gen_qemu_ld_i128(t16, EA, ctx->mem_idx, DEF_MEMOP(MO_128 | MO_ALIGN));
3273     tcg_gen_extr_i128_i64(lo, hi, t16);
3274 
3275     tcg_gen_mov_tl(cpu_reserve, EA);
3276     tcg_gen_movi_tl(cpu_reserve_length, 16);
3277     tcg_gen_st_tl(hi, tcg_env, offsetof(CPUPPCState, reserve_val));
3278     tcg_gen_st_tl(lo, tcg_env, offsetof(CPUPPCState, reserve_val2));
3279 }
3280 
3281 /* stqcx. */
3282 static void gen_stqcx_(DisasContext *ctx)
3283 {
3284     TCGLabel *lfail;
3285     TCGv EA, t0, t1;
3286     TCGv cr0;
3287     TCGv_i128 cmp, val;
3288     int rs = rS(ctx->opcode);
3289 
3290     if (unlikely(rs & 1)) {
3291         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3292         return;
3293     }
3294 
3295     lfail = gen_new_label();
3296     EA = tcg_temp_new();
3297     cr0 = tcg_temp_new();
3298 
3299     tcg_gen_mov_tl(cr0, cpu_so);
3300     gen_set_access_type(ctx, ACCESS_RES);
3301     gen_addr_reg_index(ctx, EA);
3302     tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lfail);
3303     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_reserve_length, 16, lfail);
3304 
3305     cmp = tcg_temp_new_i128();
3306     val = tcg_temp_new_i128();
3307 
3308     tcg_gen_concat_i64_i128(cmp, cpu_reserve_val2, cpu_reserve_val);
3309 
3310     /* Note that the low part is always in RS+1, even in LE mode.  */
3311     tcg_gen_concat_i64_i128(val, cpu_gpr[rs + 1], cpu_gpr[rs]);
3312 
3313     tcg_gen_atomic_cmpxchg_i128(val, cpu_reserve, cmp, val, ctx->mem_idx,
3314                                 DEF_MEMOP(MO_128 | MO_ALIGN));
3315 
3316     t0 = tcg_temp_new();
3317     t1 = tcg_temp_new();
3318     tcg_gen_extr_i128_i64(t1, t0, val);
3319 
3320     tcg_gen_xor_tl(t1, t1, cpu_reserve_val2);
3321     tcg_gen_xor_tl(t0, t0, cpu_reserve_val);
3322     tcg_gen_or_tl(t0, t0, t1);
3323 
3324     tcg_gen_setcondi_tl(TCG_COND_EQ, t0, t0, 0);
3325     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3326     tcg_gen_or_tl(cr0, cr0, t0);
3327 
3328     gen_set_label(lfail);
3329     tcg_gen_trunc_tl_i32(cpu_crf[0], cr0);
3330     tcg_gen_movi_tl(cpu_reserve, -1);
3331 }
3332 #endif /* defined(TARGET_PPC64) */
3333 
3334 /* wait */
3335 static void gen_wait(DisasContext *ctx)
3336 {
3337     uint32_t wc;
3338 
3339     if (ctx->insns_flags & PPC_WAIT) {
3340         /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
3341 
3342         if (ctx->insns_flags2 & PPC2_PM_ISA206) {
3343             /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
3344             wc = WC(ctx->opcode);
3345         } else {
3346             wc = 0;
3347         }
3348 
3349     } else if (ctx->insns_flags2 & PPC2_ISA300) {
3350         /* v3.0 defines a new 'wait' encoding. */
3351         wc = WC(ctx->opcode);
3352         if (ctx->insns_flags2 & PPC2_ISA310) {
3353             uint32_t pl = PL(ctx->opcode);
3354 
3355             /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
3356             if (wc == 3) {
3357                 gen_invalid(ctx);
3358                 return;
3359             }
3360 
3361             /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
3362             if (pl > 0 && wc != 2) {
3363                 gen_invalid(ctx);
3364                 return;
3365             }
3366 
3367         } else { /* ISA300 */
3368             /* WC 1-3 are reserved */
3369             if (wc > 0) {
3370                 gen_invalid(ctx);
3371                 return;
3372             }
3373         }
3374 
3375     } else {
3376         warn_report("wait instruction decoded with wrong ISA flags.");
3377         gen_invalid(ctx);
3378         return;
3379     }
3380 
3381     /*
3382      * wait without WC field or with WC=0 waits for an exception / interrupt
3383      * to occur.
3384      */
3385     if (wc == 0) {
3386         TCGv_i32 t0 = tcg_constant_i32(1);
3387         tcg_gen_st_i32(t0, tcg_env,
3388                        -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3389         /* Stop translation, as the CPU is supposed to sleep from now */
3390         gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3391     }
3392 
3393     /*
3394      * Other wait types must not just wait until an exception occurs because
3395      * ignoring their other wake-up conditions could cause a hang.
3396      *
3397      * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
3398      * no-ops.
3399      *
3400      * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
3401      *
3402      * wc=2 waits for an implementation-specific condition, such could be
3403      * always true, so it can be implemented as a no-op.
3404      *
3405      * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
3406      *
3407      * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
3408      * Reservation-loss may have implementation-specific conditions, so it
3409      * can be implemented as a no-op.
3410      *
3411      * wc=2 waits for an exception or an amount of time to pass. This
3412      * amount is implementation-specific so it can be implemented as a
3413      * no-op.
3414      *
3415      * ISA v3.1 allows for execution to resume "in the rare case of
3416      * an implementation-dependent event", so in any case software must
3417      * not depend on the architected resumption condition to become
3418      * true, so no-op implementations should be architecturally correct
3419      * (if suboptimal).
3420      */
3421 }
3422 
3423 #if defined(TARGET_PPC64)
3424 static void gen_doze(DisasContext *ctx)
3425 {
3426 #if defined(CONFIG_USER_ONLY)
3427     GEN_PRIV(ctx);
3428 #else
3429     TCGv_i32 t;
3430 
3431     CHK_HV(ctx);
3432     translator_io_start(&ctx->base);
3433     t = tcg_constant_i32(PPC_PM_DOZE);
3434     gen_helper_pminsn(tcg_env, t);
3435     /* Stop translation, as the CPU is supposed to sleep from now */
3436     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3437 #endif /* defined(CONFIG_USER_ONLY) */
3438 }
3439 
3440 static void gen_nap(DisasContext *ctx)
3441 {
3442 #if defined(CONFIG_USER_ONLY)
3443     GEN_PRIV(ctx);
3444 #else
3445     TCGv_i32 t;
3446 
3447     CHK_HV(ctx);
3448     translator_io_start(&ctx->base);
3449     t = tcg_constant_i32(PPC_PM_NAP);
3450     gen_helper_pminsn(tcg_env, t);
3451     /* Stop translation, as the CPU is supposed to sleep from now */
3452     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3453 #endif /* defined(CONFIG_USER_ONLY) */
3454 }
3455 
3456 static void gen_stop(DisasContext *ctx)
3457 {
3458 #if defined(CONFIG_USER_ONLY)
3459     GEN_PRIV(ctx);
3460 #else
3461     TCGv_i32 t;
3462 
3463     CHK_HV(ctx);
3464     translator_io_start(&ctx->base);
3465     t = tcg_constant_i32(PPC_PM_STOP);
3466     gen_helper_pminsn(tcg_env, t);
3467     /* Stop translation, as the CPU is supposed to sleep from now */
3468     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3469 #endif /* defined(CONFIG_USER_ONLY) */
3470 }
3471 
3472 static void gen_sleep(DisasContext *ctx)
3473 {
3474 #if defined(CONFIG_USER_ONLY)
3475     GEN_PRIV(ctx);
3476 #else
3477     TCGv_i32 t;
3478 
3479     CHK_HV(ctx);
3480     translator_io_start(&ctx->base);
3481     t = tcg_constant_i32(PPC_PM_SLEEP);
3482     gen_helper_pminsn(tcg_env, t);
3483     /* Stop translation, as the CPU is supposed to sleep from now */
3484     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3485 #endif /* defined(CONFIG_USER_ONLY) */
3486 }
3487 
3488 static void gen_rvwinkle(DisasContext *ctx)
3489 {
3490 #if defined(CONFIG_USER_ONLY)
3491     GEN_PRIV(ctx);
3492 #else
3493     TCGv_i32 t;
3494 
3495     CHK_HV(ctx);
3496     translator_io_start(&ctx->base);
3497     t = tcg_constant_i32(PPC_PM_RVWINKLE);
3498     gen_helper_pminsn(tcg_env, t);
3499     /* Stop translation, as the CPU is supposed to sleep from now */
3500     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3501 #endif /* defined(CONFIG_USER_ONLY) */
3502 }
3503 
3504 static inline TCGv gen_write_bhrb(TCGv_ptr base, TCGv offset, TCGv mask, TCGv value)
3505 {
3506     TCGv_ptr tmp = tcg_temp_new_ptr();
3507 
3508     /* add base and offset to get address of bhrb entry */
3509     tcg_gen_add_ptr(tmp, base, (TCGv_ptr)offset);
3510 
3511     /* store value into bhrb at bhrb_offset */
3512     tcg_gen_st_i64(value, tmp, 0);
3513 
3514     /* add 8 to current bhrb_offset */
3515     tcg_gen_addi_tl(offset, offset, 8);
3516 
3517     /* apply offset mask */
3518     tcg_gen_and_tl(offset, offset, mask);
3519 
3520     return offset;
3521 }
3522 #endif /* #if defined(TARGET_PPC64) */
3523 
3524 static inline void gen_update_branch_history(DisasContext *ctx,
3525                                              target_ulong nip,
3526                                              TCGv target,
3527                                              target_long inst_type)
3528 {
3529 #if defined(TARGET_PPC64)
3530     TCGv_ptr base;
3531     TCGv tmp;
3532     TCGv offset;
3533     TCGv mask;
3534     TCGLabel *no_update;
3535 
3536     if (ctx->has_cfar) {
3537         tcg_gen_movi_tl(cpu_cfar, nip);
3538     }
3539 
3540     if (!ctx->has_bhrb ||
3541         !ctx->bhrb_enable ||
3542         inst_type == BHRB_TYPE_NORECORD) {
3543         return;
3544     }
3545 
3546     tmp = tcg_temp_new();
3547     no_update = gen_new_label();
3548 
3549     /* check for bhrb filtering */
3550     tcg_gen_ld_tl(tmp, tcg_env, offsetof(CPUPPCState, bhrb_filter));
3551     tcg_gen_andi_tl(tmp, tmp, inst_type);
3552     tcg_gen_brcondi_tl(TCG_COND_EQ, tmp, 0, no_update);
3553 
3554     base = tcg_temp_new_ptr();
3555     offset = tcg_temp_new();
3556     mask = tcg_temp_new();
3557 
3558     /* load bhrb base address */
3559     tcg_gen_ld_ptr(base, tcg_env, offsetof(CPUPPCState, bhrb_base));
3560 
3561     /* load current bhrb_offset */
3562     tcg_gen_ld_tl(offset, tcg_env, offsetof(CPUPPCState, bhrb_offset));
3563 
3564     /* load a BHRB offset mask */
3565     tcg_gen_ld_tl(mask, tcg_env, offsetof(CPUPPCState, bhrb_offset_mask));
3566 
3567     offset = gen_write_bhrb(base, offset, mask, tcg_constant_i64(nip));
3568 
3569     /* Also record the target address for XL-Form branches */
3570     if (inst_type & BHRB_TYPE_XL_FORM) {
3571 
3572         /* Set the 'T' bit for target entries */
3573         tcg_gen_ori_tl(tmp, target, 0x2);
3574 
3575         offset = gen_write_bhrb(base, offset, mask, tmp);
3576     }
3577 
3578     /* save updated bhrb_offset for next time */
3579     tcg_gen_st_tl(offset, tcg_env, offsetof(CPUPPCState, bhrb_offset));
3580 
3581     gen_set_label(no_update);
3582 #endif
3583 }
3584 
3585 #if defined(TARGET_PPC64)
3586 static void pmu_count_insns(DisasContext *ctx)
3587 {
3588     /*
3589      * Do not bother calling the helper if the PMU isn't counting
3590      * instructions.
3591      */
3592     if (!ctx->pmu_insn_cnt) {
3593         return;
3594     }
3595 
3596  #if !defined(CONFIG_USER_ONLY)
3597     TCGLabel *l;
3598     TCGv t0;
3599 
3600     /*
3601      * The PMU insns_inc() helper stops the internal PMU timer if a
3602      * counter overflows happens. In that case, if the guest is
3603      * running with icount and we do not handle it beforehand,
3604      * the helper can trigger a 'bad icount read'.
3605      */
3606     translator_io_start(&ctx->base);
3607 
3608     /* Avoid helper calls when only PMC5-6 are enabled. */
3609     if (!ctx->pmc_other) {
3610         l = gen_new_label();
3611         t0 = tcg_temp_new();
3612 
3613         gen_load_spr(t0, SPR_POWER_PMC5);
3614         tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
3615         gen_store_spr(SPR_POWER_PMC5, t0);
3616         /* Check for overflow, if it's enabled */
3617         if (ctx->mmcr0_pmcjce) {
3618             tcg_gen_brcondi_tl(TCG_COND_LT, t0, PMC_COUNTER_NEGATIVE_VAL, l);
3619             gen_helper_handle_pmc5_overflow(tcg_env);
3620         }
3621 
3622         gen_set_label(l);
3623     } else {
3624         gen_helper_insns_inc(tcg_env, tcg_constant_i32(ctx->base.num_insns));
3625     }
3626   #else
3627     /*
3628      * User mode can read (but not write) PMC5 and start/stop
3629      * the PMU via MMCR0_FC. In this case just increment
3630      * PMC5 with base.num_insns.
3631      */
3632     TCGv t0 = tcg_temp_new();
3633 
3634     gen_load_spr(t0, SPR_POWER_PMC5);
3635     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
3636     gen_store_spr(SPR_POWER_PMC5, t0);
3637   #endif /* #if !defined(CONFIG_USER_ONLY) */
3638 }
3639 #else
3640 static void pmu_count_insns(DisasContext *ctx)
3641 {
3642 }
3643 #endif /* #if defined(TARGET_PPC64) */
3644 
3645 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
3646 {
3647     if (unlikely(ctx->singlestep_enabled)) {
3648         return false;
3649     }
3650     return translator_use_goto_tb(&ctx->base, dest);
3651 }
3652 
3653 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
3654 {
3655     if (unlikely(ctx->singlestep_enabled)) {
3656         gen_debug_exception(ctx, false);
3657     } else {
3658         /*
3659          * tcg_gen_lookup_and_goto_ptr will exit the TB if
3660          * CF_NO_GOTO_PTR is set. Count insns now.
3661          */
3662         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
3663             pmu_count_insns(ctx);
3664         }
3665 
3666         tcg_gen_lookup_and_goto_ptr();
3667     }
3668 }
3669 
3670 /***                                Branch                                 ***/
3671 static void gen_goto_tb(DisasContext *ctx, unsigned tb_slot_idx,
3672                         target_ulong dest)
3673 {
3674     if (NARROW_MODE(ctx)) {
3675         dest = (uint32_t) dest;
3676     }
3677     if (use_goto_tb(ctx, dest)) {
3678         pmu_count_insns(ctx);
3679         tcg_gen_goto_tb(tb_slot_idx);
3680         tcg_gen_movi_tl(cpu_nip, dest & ~3);
3681         tcg_gen_exit_tb(ctx->base.tb, tb_slot_idx);
3682     } else {
3683         tcg_gen_movi_tl(cpu_nip, dest & ~3);
3684         gen_lookup_and_goto_ptr(ctx);
3685     }
3686 }
3687 
3688 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3689 {
3690     if (NARROW_MODE(ctx)) {
3691         nip = (uint32_t)nip;
3692     }
3693     tcg_gen_movi_tl(cpu_lr, nip);
3694 }
3695 
3696 /* b ba bl bla */
3697 static void gen_b(DisasContext *ctx)
3698 {
3699     target_ulong li, target;
3700 
3701     /* sign extend LI */
3702     li = LI(ctx->opcode);
3703     li = (li ^ 0x02000000) - 0x02000000;
3704     if (likely(AA(ctx->opcode) == 0)) {
3705         target = ctx->cia + li;
3706     } else {
3707         target = li;
3708     }
3709     if (LK(ctx->opcode)) {
3710         gen_setlr(ctx, ctx->base.pc_next);
3711         gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_CALL);
3712     } else {
3713         gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_OTHER);
3714     }
3715     gen_goto_tb(ctx, 0, target);
3716     ctx->base.is_jmp = DISAS_NORETURN;
3717 }
3718 
3719 #define BCOND_IM  0
3720 #define BCOND_LR  1
3721 #define BCOND_CTR 2
3722 #define BCOND_TAR 3
3723 
3724 static void gen_bcond(DisasContext *ctx, int type)
3725 {
3726     uint32_t bo = BO(ctx->opcode);
3727     TCGLabel *l1;
3728     TCGv target;
3729     target_long bhrb_type = BHRB_TYPE_OTHER;
3730 
3731     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
3732         target = tcg_temp_new();
3733         if (type == BCOND_CTR) {
3734             tcg_gen_mov_tl(target, cpu_ctr);
3735         } else if (type == BCOND_TAR) {
3736             gen_load_spr(target, SPR_TAR);
3737         } else {
3738             tcg_gen_mov_tl(target, cpu_lr);
3739         }
3740         if (!LK(ctx->opcode)) {
3741             bhrb_type |= BHRB_TYPE_INDIRECT;
3742         }
3743         bhrb_type |= BHRB_TYPE_XL_FORM;
3744     } else {
3745         target = NULL;
3746     }
3747     if (LK(ctx->opcode)) {
3748         gen_setlr(ctx, ctx->base.pc_next);
3749         bhrb_type |= BHRB_TYPE_CALL;
3750     }
3751     l1 = gen_new_label();
3752     if ((bo & 0x4) == 0) {
3753         /* Decrement and test CTR */
3754         TCGv temp = tcg_temp_new();
3755 
3756         if (type == BCOND_CTR) {
3757             /*
3758              * All ISAs up to v3 describe this form of bcctr as invalid but
3759              * some processors, ie. 64-bit server processors compliant with
3760              * arch 2.x, do implement a "test and decrement" logic instead,
3761              * as described in their respective UMs. This logic involves CTR
3762              * to act as both the branch target and a counter, which makes
3763              * it basically useless and thus never used in real code.
3764              *
3765              * This form was hence chosen to trigger extra micro-architectural
3766              * side-effect on real HW needed for the Spectre v2 workaround.
3767              * It is up to guests that implement such workaround, ie. linux, to
3768              * use this form in a way it just triggers the side-effect without
3769              * doing anything else harmful.
3770              */
3771             if (unlikely(!is_book3s_arch2x(ctx))) {
3772                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3773                 return;
3774             }
3775 
3776             if (NARROW_MODE(ctx)) {
3777                 tcg_gen_ext32u_tl(temp, cpu_ctr);
3778             } else {
3779                 tcg_gen_mov_tl(temp, cpu_ctr);
3780             }
3781             if (bo & 0x2) {
3782                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3783             } else {
3784                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3785             }
3786             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3787         } else {
3788             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3789             if (NARROW_MODE(ctx)) {
3790                 tcg_gen_ext32u_tl(temp, cpu_ctr);
3791             } else {
3792                 tcg_gen_mov_tl(temp, cpu_ctr);
3793             }
3794             if (bo & 0x2) {
3795                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3796             } else {
3797                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3798             }
3799         }
3800         bhrb_type |= BHRB_TYPE_COND;
3801     }
3802     if ((bo & 0x10) == 0) {
3803         /* Test CR */
3804         uint32_t bi = BI(ctx->opcode);
3805         uint32_t mask = 0x08 >> (bi & 0x03);
3806         TCGv_i32 temp = tcg_temp_new_i32();
3807 
3808         if (bo & 0x8) {
3809             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3810             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3811         } else {
3812             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3813             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3814         }
3815         bhrb_type |= BHRB_TYPE_COND;
3816     }
3817 
3818     gen_update_branch_history(ctx, ctx->cia, target, bhrb_type);
3819 
3820     if (type == BCOND_IM) {
3821         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3822         if (likely(AA(ctx->opcode) == 0)) {
3823             gen_goto_tb(ctx, 0, ctx->cia + li);
3824         } else {
3825             gen_goto_tb(ctx, 0, li);
3826         }
3827     } else {
3828         if (NARROW_MODE(ctx)) {
3829             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3830         } else {
3831             tcg_gen_andi_tl(cpu_nip, target, ~3);
3832         }
3833         gen_lookup_and_goto_ptr(ctx);
3834     }
3835     if ((bo & 0x14) != 0x14) {
3836         /* fallthrough case */
3837         gen_set_label(l1);
3838         gen_goto_tb(ctx, 1, ctx->base.pc_next);
3839     }
3840     ctx->base.is_jmp = DISAS_NORETURN;
3841 }
3842 
3843 static void gen_bc(DisasContext *ctx)
3844 {
3845     gen_bcond(ctx, BCOND_IM);
3846 }
3847 
3848 static void gen_bcctr(DisasContext *ctx)
3849 {
3850     gen_bcond(ctx, BCOND_CTR);
3851 }
3852 
3853 static void gen_bclr(DisasContext *ctx)
3854 {
3855     gen_bcond(ctx, BCOND_LR);
3856 }
3857 
3858 static void gen_bctar(DisasContext *ctx)
3859 {
3860     gen_bcond(ctx, BCOND_TAR);
3861 }
3862 
3863 /***                      Condition register logical                       ***/
3864 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
3865 static void glue(gen_, name)(DisasContext *ctx)                               \
3866 {                                                                             \
3867     uint8_t bitmask;                                                          \
3868     int sh;                                                                   \
3869     TCGv_i32 t0, t1;                                                          \
3870     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
3871     t0 = tcg_temp_new_i32();                                                  \
3872     if (sh > 0)                                                               \
3873         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
3874     else if (sh < 0)                                                          \
3875         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
3876     else                                                                      \
3877         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
3878     t1 = tcg_temp_new_i32();                                                  \
3879     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
3880     if (sh > 0)                                                               \
3881         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
3882     else if (sh < 0)                                                          \
3883         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
3884     else                                                                      \
3885         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
3886     tcg_op(t0, t0, t1);                                                       \
3887     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
3888     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
3889     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
3890     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
3891 }
3892 
3893 /* crand */
3894 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3895 /* crandc */
3896 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3897 /* creqv */
3898 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3899 /* crnand */
3900 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3901 /* crnor */
3902 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3903 /* cror */
3904 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3905 /* crorc */
3906 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3907 /* crxor */
3908 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3909 
3910 /* mcrf */
3911 static void gen_mcrf(DisasContext *ctx)
3912 {
3913     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3914 }
3915 
3916 /***                           System linkage                              ***/
3917 
3918 /* rfi (supervisor only) */
3919 static void gen_rfi(DisasContext *ctx)
3920 {
3921 #if defined(CONFIG_USER_ONLY)
3922     GEN_PRIV(ctx);
3923 #else
3924     /*
3925      * This instruction doesn't exist anymore on 64-bit server
3926      * processors compliant with arch 2.x
3927      */
3928     if (is_book3s_arch2x(ctx)) {
3929         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3930         return;
3931     }
3932     /* Restore CPU state */
3933     CHK_SV(ctx);
3934     translator_io_start(&ctx->base);
3935     gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3936     gen_helper_rfi(tcg_env);
3937     ctx->base.is_jmp = DISAS_EXIT;
3938 #endif
3939 }
3940 
3941 #if defined(TARGET_PPC64)
3942 static void gen_rfid(DisasContext *ctx)
3943 {
3944 #if defined(CONFIG_USER_ONLY)
3945     GEN_PRIV(ctx);
3946 #else
3947     /* Restore CPU state */
3948     CHK_SV(ctx);
3949     translator_io_start(&ctx->base);
3950     gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3951     gen_helper_rfid(tcg_env);
3952     ctx->base.is_jmp = DISAS_EXIT;
3953 #endif
3954 }
3955 
3956 #if !defined(CONFIG_USER_ONLY)
3957 static void gen_rfscv(DisasContext *ctx)
3958 {
3959 #if defined(CONFIG_USER_ONLY)
3960     GEN_PRIV(ctx);
3961 #else
3962     /* Restore CPU state */
3963     CHK_SV(ctx);
3964     translator_io_start(&ctx->base);
3965     gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3966     gen_helper_rfscv(tcg_env);
3967     ctx->base.is_jmp = DISAS_EXIT;
3968 #endif
3969 }
3970 #endif
3971 
3972 static void gen_hrfid(DisasContext *ctx)
3973 {
3974 #if defined(CONFIG_USER_ONLY)
3975     GEN_PRIV(ctx);
3976 #else
3977     /* Restore CPU state */
3978     CHK_HV(ctx);
3979     translator_io_start(&ctx->base);
3980     gen_helper_hrfid(tcg_env);
3981     ctx->base.is_jmp = DISAS_EXIT;
3982 #endif
3983 }
3984 #endif
3985 
3986 /* sc */
3987 #if defined(CONFIG_USER_ONLY)
3988 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3989 #else
3990 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3991 #endif
3992 static void gen_sc(DisasContext *ctx)
3993 {
3994     uint32_t lev;
3995 
3996     /*
3997      * LEV is a 7-bit field, but the top 6 bits are treated as a reserved
3998      * field (i.e., ignored). ISA v3.1 changes that to 5 bits, but that is
3999      * for Ultravisor which TCG does not support, so just ignore the top 6.
4000      */
4001     lev = (ctx->opcode >> 5) & 0x1;
4002     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4003 }
4004 
4005 #if defined(TARGET_PPC64)
4006 #if !defined(CONFIG_USER_ONLY)
4007 static void gen_scv(DisasContext *ctx)
4008 {
4009     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4010 
4011     /* Set the PC back to the faulting instruction. */
4012     gen_update_nip(ctx, ctx->cia);
4013     gen_helper_scv(tcg_env, tcg_constant_i32(lev));
4014 
4015     ctx->base.is_jmp = DISAS_NORETURN;
4016 }
4017 #endif
4018 #endif
4019 
4020 /***                                Trap                                   ***/
4021 
4022 /* Check for unconditional traps (always or never) */
4023 static bool check_unconditional_trap(DisasContext *ctx, int to)
4024 {
4025     /* Trap never */
4026     if (to == 0) {
4027         return true;
4028     }
4029     /* Trap always */
4030     if (to == 31) {
4031         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4032         return true;
4033     }
4034     return false;
4035 }
4036 
4037 /***                          Processor control                            ***/
4038 
4039 /* mcrxr */
4040 static void gen_mcrxr(DisasContext *ctx)
4041 {
4042     TCGv_i32 t0 = tcg_temp_new_i32();
4043     TCGv_i32 t1 = tcg_temp_new_i32();
4044     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4045 
4046     tcg_gen_trunc_tl_i32(t0, cpu_so);
4047     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4048     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4049     tcg_gen_shli_i32(t0, t0, 3);
4050     tcg_gen_shli_i32(t1, t1, 2);
4051     tcg_gen_shli_i32(dst, dst, 1);
4052     tcg_gen_or_i32(dst, dst, t0);
4053     tcg_gen_or_i32(dst, dst, t1);
4054 
4055     tcg_gen_movi_tl(cpu_so, 0);
4056     tcg_gen_movi_tl(cpu_ov, 0);
4057     tcg_gen_movi_tl(cpu_ca, 0);
4058 }
4059 
4060 #ifdef TARGET_PPC64
4061 /* mcrxrx */
4062 static void gen_mcrxrx(DisasContext *ctx)
4063 {
4064     TCGv t0 = tcg_temp_new();
4065     TCGv t1 = tcg_temp_new();
4066     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4067 
4068     /* copy OV and OV32 */
4069     tcg_gen_shli_tl(t0, cpu_ov, 1);
4070     tcg_gen_or_tl(t0, t0, cpu_ov32);
4071     tcg_gen_shli_tl(t0, t0, 2);
4072     /* copy CA and CA32 */
4073     tcg_gen_shli_tl(t1, cpu_ca, 1);
4074     tcg_gen_or_tl(t1, t1, cpu_ca32);
4075     tcg_gen_or_tl(t0, t0, t1);
4076     tcg_gen_trunc_tl_i32(dst, t0);
4077 }
4078 #endif
4079 
4080 /* mfcr mfocrf */
4081 static void gen_mfcr(DisasContext *ctx)
4082 {
4083     uint32_t crm, crn;
4084 
4085     if (likely(ctx->opcode & 0x00100000)) {
4086         crm = CRM(ctx->opcode);
4087         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4088             crn = ctz32(crm);
4089             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4090             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4091                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4092         }
4093     } else {
4094         TCGv_i32 t0 = tcg_temp_new_i32();
4095         tcg_gen_mov_i32(t0, cpu_crf[0]);
4096         tcg_gen_shli_i32(t0, t0, 4);
4097         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4098         tcg_gen_shli_i32(t0, t0, 4);
4099         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4100         tcg_gen_shli_i32(t0, t0, 4);
4101         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4102         tcg_gen_shli_i32(t0, t0, 4);
4103         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4104         tcg_gen_shli_i32(t0, t0, 4);
4105         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4106         tcg_gen_shli_i32(t0, t0, 4);
4107         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4108         tcg_gen_shli_i32(t0, t0, 4);
4109         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4110         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4111     }
4112 }
4113 
4114 /* mfmsr */
4115 static void gen_mfmsr(DisasContext *ctx)
4116 {
4117     CHK_SV(ctx);
4118     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4119 }
4120 
4121 /* mfspr */
4122 static inline void gen_op_mfspr(DisasContext *ctx)
4123 {
4124     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4125     uint32_t sprn = SPR(ctx->opcode);
4126 
4127 #if defined(CONFIG_USER_ONLY)
4128     read_cb = ctx->spr_cb[sprn].uea_read;
4129 #else
4130     if (ctx->pr) {
4131         read_cb = ctx->spr_cb[sprn].uea_read;
4132     } else if (ctx->hv) {
4133         read_cb = ctx->spr_cb[sprn].hea_read;
4134     } else {
4135         read_cb = ctx->spr_cb[sprn].oea_read;
4136     }
4137 #endif
4138     if (likely(read_cb != NULL)) {
4139         if (likely(read_cb != SPR_NOACCESS)) {
4140             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4141         } else {
4142             /* Privilege exception */
4143             /*
4144              * This is a hack to avoid warnings when running Linux:
4145              * this OS breaks the PowerPC virtualisation model,
4146              * allowing userland application to read the PVR
4147              */
4148             if (sprn != SPR_PVR) {
4149                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4150                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4151                               ctx->cia);
4152             }
4153             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4154         }
4155     } else {
4156         /* ISA 2.07 defines these as no-ops */
4157         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4158             (sprn >= 808 && sprn <= 811)) {
4159             /* This is a nop */
4160             return;
4161         }
4162         /* Not defined */
4163         qemu_log_mask(LOG_GUEST_ERROR,
4164                       "Trying to read invalid spr %d (0x%03x) at "
4165                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4166 
4167         /*
4168          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4169          * generate a priv, a hv emu or a no-op
4170          */
4171         if (sprn & 0x10) {
4172             if (ctx->pr) {
4173                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4174             }
4175         } else {
4176             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4177                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4178             }
4179         }
4180     }
4181 }
4182 
4183 static void gen_mfspr(DisasContext *ctx)
4184 {
4185     gen_op_mfspr(ctx);
4186 }
4187 
4188 /* mftb */
4189 static void gen_mftb(DisasContext *ctx)
4190 {
4191     gen_op_mfspr(ctx);
4192 }
4193 
4194 /* mtcrf mtocrf*/
4195 static void gen_mtcrf(DisasContext *ctx)
4196 {
4197     uint32_t crm, crn;
4198 
4199     crm = CRM(ctx->opcode);
4200     if (likely((ctx->opcode & 0x00100000))) {
4201         if (crm && ((crm & (crm - 1)) == 0)) {
4202             TCGv_i32 temp = tcg_temp_new_i32();
4203             crn = ctz32(crm);
4204             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4205             tcg_gen_shri_i32(temp, temp, crn * 4);
4206             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4207         }
4208     } else {
4209         TCGv_i32 temp = tcg_temp_new_i32();
4210         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4211         for (crn = 0 ; crn < 8 ; crn++) {
4212             if (crm & (1 << crn)) {
4213                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4214                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4215             }
4216         }
4217     }
4218 }
4219 
4220 /* mtmsr */
4221 #if defined(TARGET_PPC64)
4222 static void gen_mtmsrd(DisasContext *ctx)
4223 {
4224     if (unlikely(!is_book3s_arch2x(ctx))) {
4225         gen_invalid(ctx);
4226         return;
4227     }
4228 
4229     CHK_SV(ctx);
4230 
4231 #if !defined(CONFIG_USER_ONLY)
4232     TCGv t0, t1;
4233     target_ulong mask;
4234 
4235     t0 = tcg_temp_new();
4236     t1 = tcg_temp_new();
4237 
4238     translator_io_start(&ctx->base);
4239 
4240     if (ctx->opcode & 0x00010000) {
4241         /* L=1 form only updates EE and RI */
4242         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4243     } else {
4244         /* mtmsrd does not alter HV, S, ME, or LE */
4245         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4246                  (1ULL << MSR_HV));
4247         /*
4248          * XXX: we need to update nip before the store if we enter
4249          *      power saving mode, we will exit the loop directly from
4250          *      ppc_store_msr
4251          */
4252         gen_update_nip(ctx, ctx->base.pc_next);
4253     }
4254 
4255     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4256     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4257     tcg_gen_or_tl(t0, t0, t1);
4258 
4259     gen_helper_store_msr(tcg_env, t0);
4260 
4261     /* Must stop the translation as machine state (may have) changed */
4262     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4263 #endif /* !defined(CONFIG_USER_ONLY) */
4264 }
4265 #endif /* defined(TARGET_PPC64) */
4266 
4267 static void gen_mtmsr(DisasContext *ctx)
4268 {
4269     CHK_SV(ctx);
4270 
4271 #if !defined(CONFIG_USER_ONLY)
4272     TCGv t0, t1;
4273     target_ulong mask = 0xFFFFFFFF;
4274 
4275     t0 = tcg_temp_new();
4276     t1 = tcg_temp_new();
4277 
4278     translator_io_start(&ctx->base);
4279     if (ctx->opcode & 0x00010000) {
4280         /* L=1 form only updates EE and RI */
4281         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
4282     } else {
4283         if (likely(!(ctx->insns_flags2 & PPC2_PPE42))) {
4284             /* mtmsr does not alter S, ME, or LE */
4285             mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
4286         }
4287 
4288         /*
4289          * XXX: we need to update nip before the store if we enter
4290          *      power saving mode, we will exit the loop directly from
4291          *      ppc_store_msr
4292          */
4293         gen_update_nip(ctx, ctx->base.pc_next);
4294     }
4295 
4296     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4297     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4298     tcg_gen_or_tl(t0, t0, t1);
4299 
4300     gen_helper_store_msr(tcg_env, t0);
4301 
4302     /* Must stop the translation as machine state (may have) changed */
4303     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4304 #endif
4305 }
4306 
4307 /* mtspr */
4308 static void gen_mtspr(DisasContext *ctx)
4309 {
4310     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
4311     uint32_t sprn = SPR(ctx->opcode);
4312 
4313 #if defined(CONFIG_USER_ONLY)
4314     write_cb = ctx->spr_cb[sprn].uea_write;
4315 #else
4316     if (ctx->pr) {
4317         write_cb = ctx->spr_cb[sprn].uea_write;
4318     } else if (ctx->hv) {
4319         write_cb = ctx->spr_cb[sprn].hea_write;
4320     } else {
4321         write_cb = ctx->spr_cb[sprn].oea_write;
4322     }
4323 #endif
4324     if (likely(write_cb != NULL)) {
4325         if (likely(write_cb != SPR_NOACCESS)) {
4326             (*write_cb)(ctx, sprn, rS(ctx->opcode));
4327         } else {
4328             /* Privilege exception */
4329             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
4330                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4331                           ctx->cia);
4332             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4333         }
4334     } else {
4335         /* ISA 2.07 defines these as no-ops */
4336         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4337             (sprn >= 808 && sprn <= 811)) {
4338             /* This is a nop */
4339             return;
4340         }
4341 
4342         /* Not defined */
4343         qemu_log_mask(LOG_GUEST_ERROR,
4344                       "Trying to write invalid spr %d (0x%03x) at "
4345                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4346 
4347 
4348         /*
4349          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4350          * generate a priv, a hv emu or a no-op
4351          */
4352         if (sprn & 0x10) {
4353             if (ctx->pr) {
4354                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4355             }
4356         } else {
4357             if (ctx->pr || sprn == 0) {
4358                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4359             }
4360         }
4361     }
4362 }
4363 
4364 #if defined(TARGET_PPC64)
4365 /* setb */
4366 static void gen_setb(DisasContext *ctx)
4367 {
4368     TCGv_i32 t0 = tcg_temp_new_i32();
4369     TCGv_i32 t8 = tcg_constant_i32(8);
4370     TCGv_i32 tm1 = tcg_constant_i32(-1);
4371     int crf = crfS(ctx->opcode);
4372 
4373     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
4374     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
4375     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4376 }
4377 #endif
4378 
4379 /***                         Cache management                              ***/
4380 
4381 /* dcbf */
4382 static void gen_dcbf(DisasContext *ctx)
4383 {
4384     /* XXX: specification says this is treated as a load by the MMU */
4385     TCGv t0;
4386     gen_set_access_type(ctx, ACCESS_CACHE);
4387     t0 = tcg_temp_new();
4388     gen_addr_reg_index(ctx, t0);
4389     gen_qemu_ld8u(ctx, t0, t0);
4390 }
4391 
4392 /* dcbfep (external PID dcbf) */
4393 static void gen_dcbfep(DisasContext *ctx)
4394 {
4395     /* XXX: specification says this is treated as a load by the MMU */
4396     TCGv t0;
4397     CHK_SV(ctx);
4398     gen_set_access_type(ctx, ACCESS_CACHE);
4399     t0 = tcg_temp_new();
4400     gen_addr_reg_index(ctx, t0);
4401     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4402 }
4403 
4404 /* dcbi (Supervisor only) */
4405 static void gen_dcbi(DisasContext *ctx)
4406 {
4407 #if defined(CONFIG_USER_ONLY)
4408     GEN_PRIV(ctx);
4409 #else
4410     TCGv EA, val;
4411 
4412     CHK_SV(ctx);
4413     EA = tcg_temp_new();
4414     gen_set_access_type(ctx, ACCESS_CACHE);
4415     gen_addr_reg_index(ctx, EA);
4416     val = tcg_temp_new();
4417     /* XXX: specification says this should be treated as a store by the MMU */
4418     gen_qemu_ld8u(ctx, val, EA);
4419     gen_qemu_st8(ctx, val, EA);
4420 #endif /* defined(CONFIG_USER_ONLY) */
4421 }
4422 
4423 /* dcdst */
4424 static void gen_dcbst(DisasContext *ctx)
4425 {
4426     /* XXX: specification say this is treated as a load by the MMU */
4427     TCGv t0;
4428     gen_set_access_type(ctx, ACCESS_CACHE);
4429     t0 = tcg_temp_new();
4430     gen_addr_reg_index(ctx, t0);
4431     gen_qemu_ld8u(ctx, t0, t0);
4432 }
4433 
4434 /* dcbstep (dcbstep External PID version) */
4435 static void gen_dcbstep(DisasContext *ctx)
4436 {
4437     /* XXX: specification say this is treated as a load by the MMU */
4438     TCGv t0;
4439     gen_set_access_type(ctx, ACCESS_CACHE);
4440     t0 = tcg_temp_new();
4441     gen_addr_reg_index(ctx, t0);
4442     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4443 }
4444 
4445 /* dcbt */
4446 static void gen_dcbt(DisasContext *ctx)
4447 {
4448     /*
4449      * interpreted as no-op
4450      * XXX: specification say this is treated as a load by the MMU but
4451      *      does not generate any exception
4452      */
4453 }
4454 
4455 /* dcbtep */
4456 static void gen_dcbtep(DisasContext *ctx)
4457 {
4458     /*
4459      * interpreted as no-op
4460      * XXX: specification say this is treated as a load by the MMU but
4461      *      does not generate any exception
4462      */
4463 }
4464 
4465 /* dcbtst */
4466 static void gen_dcbtst(DisasContext *ctx)
4467 {
4468     /*
4469      * interpreted as no-op
4470      * XXX: specification say this is treated as a load by the MMU but
4471      *      does not generate any exception
4472      */
4473 }
4474 
4475 /* dcbtstep */
4476 static void gen_dcbtstep(DisasContext *ctx)
4477 {
4478     /*
4479      * interpreted as no-op
4480      * XXX: specification say this is treated as a load by the MMU but
4481      *      does not generate any exception
4482      */
4483 }
4484 
4485 /* dcbtls */
4486 static void gen_dcbtls(DisasContext *ctx)
4487 {
4488     /* Always fails locking the cache */
4489     TCGv t0 = tcg_temp_new();
4490     gen_load_spr(t0, SPR_Exxx_L1CSR0);
4491     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
4492     gen_store_spr(SPR_Exxx_L1CSR0, t0);
4493 }
4494 
4495 /* dcblc */
4496 static void gen_dcblc(DisasContext *ctx)
4497 {
4498     /*
4499      * interpreted as no-op
4500      */
4501 }
4502 
4503 /* dcbz */
4504 static void gen_dcbz(DisasContext *ctx)
4505 {
4506     TCGv tcgv_addr = tcg_temp_new();
4507 
4508     gen_set_access_type(ctx, ACCESS_CACHE);
4509     gen_addr_reg_index(ctx, tcgv_addr);
4510 
4511 #ifdef TARGET_PPC64
4512     if (ctx->excp_model == POWERPC_EXCP_970 && !(ctx->opcode & 0x00200000)) {
4513         gen_helper_dcbzl(tcg_env, tcgv_addr);
4514         return;
4515     }
4516 #endif
4517 
4518     gen_helper_dcbz(tcg_env, tcgv_addr, tcg_constant_i32(ctx->mem_idx));
4519 }
4520 
4521 /* dcbzep */
4522 static void gen_dcbzep(DisasContext *ctx)
4523 {
4524     TCGv tcgv_addr = tcg_temp_new();
4525 
4526     gen_set_access_type(ctx, ACCESS_CACHE);
4527     gen_addr_reg_index(ctx, tcgv_addr);
4528     gen_helper_dcbz(tcg_env, tcgv_addr, tcg_constant_i32(PPC_TLB_EPID_STORE));
4529 }
4530 
4531 /* dst / dstt */
4532 static void gen_dst(DisasContext *ctx)
4533 {
4534     if (rA(ctx->opcode) == 0) {
4535         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4536     } else {
4537         /* interpreted as no-op */
4538     }
4539 }
4540 
4541 /* dstst /dststt */
4542 static void gen_dstst(DisasContext *ctx)
4543 {
4544     if (rA(ctx->opcode) == 0) {
4545         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4546     } else {
4547         /* interpreted as no-op */
4548     }
4549 
4550 }
4551 
4552 /* dss / dssall */
4553 static void gen_dss(DisasContext *ctx)
4554 {
4555     /* interpreted as no-op */
4556 }
4557 
4558 /* icbi */
4559 static void gen_icbi(DisasContext *ctx)
4560 {
4561     TCGv t0;
4562     gen_set_access_type(ctx, ACCESS_CACHE);
4563     t0 = tcg_temp_new();
4564     gen_addr_reg_index(ctx, t0);
4565     gen_helper_icbi(tcg_env, t0);
4566 }
4567 
4568 /* icbiep */
4569 static void gen_icbiep(DisasContext *ctx)
4570 {
4571     TCGv t0;
4572     gen_set_access_type(ctx, ACCESS_CACHE);
4573     t0 = tcg_temp_new();
4574     gen_addr_reg_index(ctx, t0);
4575     gen_helper_icbiep(tcg_env, t0);
4576 }
4577 
4578 /* Optional: */
4579 /* dcba */
4580 static void gen_dcba(DisasContext *ctx)
4581 {
4582     /*
4583      * interpreted as no-op
4584      * XXX: specification say this is treated as a store by the MMU
4585      *      but does not generate any exception
4586      */
4587 }
4588 
4589 /***                    Segment register manipulation                      ***/
4590 /* Supervisor only: */
4591 
4592 /* mfsr */
4593 static void gen_mfsr(DisasContext *ctx)
4594 {
4595 #if defined(CONFIG_USER_ONLY)
4596     GEN_PRIV(ctx);
4597 #else
4598     TCGv t0;
4599 
4600     CHK_SV(ctx);
4601     t0 = tcg_constant_tl(SR(ctx->opcode));
4602     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4603 #endif /* defined(CONFIG_USER_ONLY) */
4604 }
4605 
4606 /* mfsrin */
4607 static void gen_mfsrin(DisasContext *ctx)
4608 {
4609 #if defined(CONFIG_USER_ONLY)
4610     GEN_PRIV(ctx);
4611 #else
4612     TCGv t0;
4613 
4614     CHK_SV(ctx);
4615     t0 = tcg_temp_new();
4616     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4617     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4618 #endif /* defined(CONFIG_USER_ONLY) */
4619 }
4620 
4621 /* mtsr */
4622 static void gen_mtsr(DisasContext *ctx)
4623 {
4624 #if defined(CONFIG_USER_ONLY)
4625     GEN_PRIV(ctx);
4626 #else
4627     TCGv t0;
4628 
4629     CHK_SV(ctx);
4630     t0 = tcg_constant_tl(SR(ctx->opcode));
4631     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4632 #endif /* defined(CONFIG_USER_ONLY) */
4633 }
4634 
4635 /* mtsrin */
4636 static void gen_mtsrin(DisasContext *ctx)
4637 {
4638 #if defined(CONFIG_USER_ONLY)
4639     GEN_PRIV(ctx);
4640 #else
4641     TCGv t0;
4642     CHK_SV(ctx);
4643 
4644     t0 = tcg_temp_new();
4645     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4646     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rD(ctx->opcode)]);
4647 #endif /* defined(CONFIG_USER_ONLY) */
4648 }
4649 
4650 #if defined(TARGET_PPC64)
4651 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4652 
4653 /* mfsr */
4654 static void gen_mfsr_64b(DisasContext *ctx)
4655 {
4656 #if defined(CONFIG_USER_ONLY)
4657     GEN_PRIV(ctx);
4658 #else
4659     TCGv t0;
4660 
4661     CHK_SV(ctx);
4662     t0 = tcg_constant_tl(SR(ctx->opcode));
4663     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4664 #endif /* defined(CONFIG_USER_ONLY) */
4665 }
4666 
4667 /* mfsrin */
4668 static void gen_mfsrin_64b(DisasContext *ctx)
4669 {
4670 #if defined(CONFIG_USER_ONLY)
4671     GEN_PRIV(ctx);
4672 #else
4673     TCGv t0;
4674 
4675     CHK_SV(ctx);
4676     t0 = tcg_temp_new();
4677     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4678     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4679 #endif /* defined(CONFIG_USER_ONLY) */
4680 }
4681 
4682 /* mtsr */
4683 static void gen_mtsr_64b(DisasContext *ctx)
4684 {
4685 #if defined(CONFIG_USER_ONLY)
4686     GEN_PRIV(ctx);
4687 #else
4688     TCGv t0;
4689 
4690     CHK_SV(ctx);
4691     t0 = tcg_constant_tl(SR(ctx->opcode));
4692     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4693 #endif /* defined(CONFIG_USER_ONLY) */
4694 }
4695 
4696 /* mtsrin */
4697 static void gen_mtsrin_64b(DisasContext *ctx)
4698 {
4699 #if defined(CONFIG_USER_ONLY)
4700     GEN_PRIV(ctx);
4701 #else
4702     TCGv t0;
4703 
4704     CHK_SV(ctx);
4705     t0 = tcg_temp_new();
4706     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4707     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4708 #endif /* defined(CONFIG_USER_ONLY) */
4709 }
4710 
4711 #endif /* defined(TARGET_PPC64) */
4712 
4713 /***                      Lookaside buffer management                      ***/
4714 /* Optional & supervisor only: */
4715 
4716 /* tlbia */
4717 static void gen_tlbia(DisasContext *ctx)
4718 {
4719 #if defined(CONFIG_USER_ONLY)
4720     GEN_PRIV(ctx);
4721 #else
4722     CHK_HV(ctx);
4723 
4724     gen_helper_tlbia(tcg_env);
4725 #endif  /* defined(CONFIG_USER_ONLY) */
4726 }
4727 
4728 /* tlbsync */
4729 static void gen_tlbsync(DisasContext *ctx)
4730 {
4731 #if defined(CONFIG_USER_ONLY)
4732     GEN_PRIV(ctx);
4733 #else
4734 
4735     if (ctx->gtse) {
4736         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
4737     } else {
4738         CHK_HV(ctx); /* Else hypervisor privileged */
4739     }
4740 
4741     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
4742     if (ctx->insns_flags & PPC_BOOKE) {
4743         gen_check_tlb_flush(ctx, true);
4744     }
4745 #endif /* defined(CONFIG_USER_ONLY) */
4746 }
4747 
4748 /***                              External control                         ***/
4749 /* Optional: */
4750 
4751 /* eciwx */
4752 static void gen_eciwx(DisasContext *ctx)
4753 {
4754     TCGv t0;
4755     /* Should check EAR[E] ! */
4756     gen_set_access_type(ctx, ACCESS_EXT);
4757     t0 = tcg_temp_new();
4758     gen_addr_reg_index(ctx, t0);
4759     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
4760                        DEF_MEMOP(MO_UL | MO_ALIGN));
4761 }
4762 
4763 /* ecowx */
4764 static void gen_ecowx(DisasContext *ctx)
4765 {
4766     TCGv t0;
4767     /* Should check EAR[E] ! */
4768     gen_set_access_type(ctx, ACCESS_EXT);
4769     t0 = tcg_temp_new();
4770     gen_addr_reg_index(ctx, t0);
4771     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
4772                        DEF_MEMOP(MO_UL | MO_ALIGN));
4773 }
4774 
4775 /* 602 - 603 - G2 TLB management */
4776 
4777 /* tlbld */
4778 static void gen_tlbld_6xx(DisasContext *ctx)
4779 {
4780 #if defined(CONFIG_USER_ONLY)
4781     GEN_PRIV(ctx);
4782 #else
4783     CHK_SV(ctx);
4784     gen_helper_6xx_tlbd(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4785 #endif /* defined(CONFIG_USER_ONLY) */
4786 }
4787 
4788 /* tlbli */
4789 static void gen_tlbli_6xx(DisasContext *ctx)
4790 {
4791 #if defined(CONFIG_USER_ONLY)
4792     GEN_PRIV(ctx);
4793 #else
4794     CHK_SV(ctx);
4795     gen_helper_6xx_tlbi(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4796 #endif /* defined(CONFIG_USER_ONLY) */
4797 }
4798 
4799 /* BookE specific instructions */
4800 
4801 /* XXX: not implemented on 440 ? */
4802 static void gen_mfapidi(DisasContext *ctx)
4803 {
4804     /* XXX: TODO */
4805     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4806 }
4807 
4808 /* XXX: not implemented on 440 ? */
4809 static void gen_tlbiva(DisasContext *ctx)
4810 {
4811 #if defined(CONFIG_USER_ONLY)
4812     GEN_PRIV(ctx);
4813 #else
4814     TCGv t0;
4815 
4816     CHK_SV(ctx);
4817     t0 = tcg_temp_new();
4818     gen_addr_reg_index(ctx, t0);
4819     gen_helper_tlbiva(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4820 #endif /* defined(CONFIG_USER_ONLY) */
4821 }
4822 
4823 /* All 405 MAC instructions are translated here */
4824 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
4825                                         int ra, int rb, int rt, int Rc)
4826 {
4827     TCGv t0, t1;
4828 
4829     t0 = tcg_temp_new();
4830     t1 = tcg_temp_new();
4831 
4832     switch (opc3 & 0x0D) {
4833     case 0x05:
4834         /* macchw    - macchw.    - macchwo   - macchwo.   */
4835         /* macchws   - macchws.   - macchwso  - macchwso.  */
4836         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
4837         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
4838         /* mulchw - mulchw. */
4839         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
4840         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
4841         tcg_gen_ext16s_tl(t1, t1);
4842         break;
4843     case 0x04:
4844         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
4845         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
4846         /* mulchwu - mulchwu. */
4847         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
4848         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
4849         tcg_gen_ext16u_tl(t1, t1);
4850         break;
4851     case 0x01:
4852         /* machhw    - machhw.    - machhwo   - machhwo.   */
4853         /* machhws   - machhws.   - machhwso  - machhwso.  */
4854         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
4855         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
4856         /* mulhhw - mulhhw. */
4857         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
4858         tcg_gen_ext16s_tl(t0, t0);
4859         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
4860         tcg_gen_ext16s_tl(t1, t1);
4861         break;
4862     case 0x00:
4863         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
4864         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
4865         /* mulhhwu - mulhhwu. */
4866         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
4867         tcg_gen_ext16u_tl(t0, t0);
4868         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
4869         tcg_gen_ext16u_tl(t1, t1);
4870         break;
4871     case 0x0D:
4872         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
4873         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
4874         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
4875         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
4876         /* mullhw - mullhw. */
4877         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
4878         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
4879         break;
4880     case 0x0C:
4881         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
4882         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
4883         /* mullhwu - mullhwu. */
4884         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
4885         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
4886         break;
4887     }
4888     if (opc2 & 0x04) {
4889         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
4890         tcg_gen_mul_tl(t1, t0, t1);
4891         if (opc2 & 0x02) {
4892             /* nmultiply-and-accumulate (0x0E) */
4893             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
4894         } else {
4895             /* multiply-and-accumulate (0x0C) */
4896             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
4897         }
4898 
4899         if (opc3 & 0x12) {
4900             /* Check overflow and/or saturate */
4901             TCGLabel *l1 = gen_new_label();
4902 
4903             if (opc3 & 0x10) {
4904                 /* Start with XER OV disabled, the most likely case */
4905                 tcg_gen_movi_tl(cpu_ov, 0);
4906             }
4907             if (opc3 & 0x01) {
4908                 /* Signed */
4909                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
4910                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
4911                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
4912                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
4913                 if (opc3 & 0x02) {
4914                     /* Saturate */
4915                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
4916                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
4917                 }
4918             } else {
4919                 /* Unsigned */
4920                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
4921                 if (opc3 & 0x02) {
4922                     /* Saturate */
4923                     tcg_gen_movi_tl(t0, UINT32_MAX);
4924                 }
4925             }
4926             if (opc3 & 0x10) {
4927                 /* Check overflow */
4928                 tcg_gen_movi_tl(cpu_ov, 1);
4929                 tcg_gen_movi_tl(cpu_so, 1);
4930             }
4931             gen_set_label(l1);
4932             tcg_gen_mov_tl(cpu_gpr[rt], t0);
4933         }
4934     } else {
4935         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
4936     }
4937     if (unlikely(Rc) != 0) {
4938         /* Update Rc0 */
4939         gen_set_Rc0(ctx, cpu_gpr[rt]);
4940     }
4941 }
4942 
4943 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
4944 static void glue(gen_, name)(DisasContext *ctx)                               \
4945 {                                                                             \
4946     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
4947                          rD(ctx->opcode), Rc(ctx->opcode));                   \
4948 }
4949 
4950 /* macchw    - macchw.    */
4951 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
4952 /* macchwo   - macchwo.   */
4953 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
4954 /* macchws   - macchws.   */
4955 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
4956 /* macchwso  - macchwso.  */
4957 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
4958 /* macchwsu  - macchwsu.  */
4959 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
4960 /* macchwsuo - macchwsuo. */
4961 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
4962 /* macchwu   - macchwu.   */
4963 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
4964 /* macchwuo  - macchwuo.  */
4965 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
4966 /* machhw    - machhw.    */
4967 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
4968 /* machhwo   - machhwo.   */
4969 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
4970 /* machhws   - machhws.   */
4971 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
4972 /* machhwso  - machhwso.  */
4973 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
4974 /* machhwsu  - machhwsu.  */
4975 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
4976 /* machhwsuo - machhwsuo. */
4977 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
4978 /* machhwu   - machhwu.   */
4979 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
4980 /* machhwuo  - machhwuo.  */
4981 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
4982 /* maclhw    - maclhw.    */
4983 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
4984 /* maclhwo   - maclhwo.   */
4985 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
4986 /* maclhws   - maclhws.   */
4987 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
4988 /* maclhwso  - maclhwso.  */
4989 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
4990 /* maclhwu   - maclhwu.   */
4991 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
4992 /* maclhwuo  - maclhwuo.  */
4993 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
4994 /* maclhwsu  - maclhwsu.  */
4995 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
4996 /* maclhwsuo - maclhwsuo. */
4997 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
4998 /* nmacchw   - nmacchw.   */
4999 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5000 /* nmacchwo  - nmacchwo.  */
5001 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5002 /* nmacchws  - nmacchws.  */
5003 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5004 /* nmacchwso - nmacchwso. */
5005 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5006 /* nmachhw   - nmachhw.   */
5007 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5008 /* nmachhwo  - nmachhwo.  */
5009 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5010 /* nmachhws  - nmachhws.  */
5011 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5012 /* nmachhwso - nmachhwso. */
5013 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5014 /* nmaclhw   - nmaclhw.   */
5015 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5016 /* nmaclhwo  - nmaclhwo.  */
5017 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5018 /* nmaclhws  - nmaclhws.  */
5019 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5020 /* nmaclhwso - nmaclhwso. */
5021 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5022 
5023 /* mulchw  - mulchw.  */
5024 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5025 /* mulchwu - mulchwu. */
5026 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5027 /* mulhhw  - mulhhw.  */
5028 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5029 /* mulhhwu - mulhhwu. */
5030 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5031 /* mullhw  - mullhw.  */
5032 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5033 /* mullhwu - mullhwu. */
5034 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5035 
5036 /* mfdcr */
5037 static void gen_mfdcr(DisasContext *ctx)
5038 {
5039 #if defined(CONFIG_USER_ONLY)
5040     GEN_PRIV(ctx);
5041 #else
5042     TCGv dcrn;
5043 
5044     CHK_SV(ctx);
5045     dcrn = tcg_constant_tl(SPR(ctx->opcode));
5046     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], tcg_env, dcrn);
5047 #endif /* defined(CONFIG_USER_ONLY) */
5048 }
5049 
5050 /* mtdcr */
5051 static void gen_mtdcr(DisasContext *ctx)
5052 {
5053 #if defined(CONFIG_USER_ONLY)
5054     GEN_PRIV(ctx);
5055 #else
5056     TCGv dcrn;
5057 
5058     CHK_SV(ctx);
5059     dcrn = tcg_constant_tl(SPR(ctx->opcode));
5060     gen_helper_store_dcr(tcg_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5061 #endif /* defined(CONFIG_USER_ONLY) */
5062 }
5063 
5064 /* mfdcrx */
5065 /* XXX: not implemented on 440 ? */
5066 static void gen_mfdcrx(DisasContext *ctx)
5067 {
5068 #if defined(CONFIG_USER_ONLY)
5069     GEN_PRIV(ctx);
5070 #else
5071     CHK_SV(ctx);
5072     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], tcg_env,
5073                         cpu_gpr[rA(ctx->opcode)]);
5074     /* Note: Rc update flag set leads to undefined state of Rc0 */
5075 #endif /* defined(CONFIG_USER_ONLY) */
5076 }
5077 
5078 /* mtdcrx */
5079 /* XXX: not implemented on 440 ? */
5080 static void gen_mtdcrx(DisasContext *ctx)
5081 {
5082 #if defined(CONFIG_USER_ONLY)
5083     GEN_PRIV(ctx);
5084 #else
5085     CHK_SV(ctx);
5086     gen_helper_store_dcr(tcg_env, cpu_gpr[rA(ctx->opcode)],
5087                          cpu_gpr[rS(ctx->opcode)]);
5088     /* Note: Rc update flag set leads to undefined state of Rc0 */
5089 #endif /* defined(CONFIG_USER_ONLY) */
5090 }
5091 
5092 /* dccci */
5093 static void gen_dccci(DisasContext *ctx)
5094 {
5095     CHK_SV(ctx);
5096     /* interpreted as no-op */
5097 }
5098 
5099 /* dcread */
5100 static void gen_dcread(DisasContext *ctx)
5101 {
5102 #if defined(CONFIG_USER_ONLY)
5103     GEN_PRIV(ctx);
5104 #else
5105     TCGv EA, val;
5106 
5107     CHK_SV(ctx);
5108     gen_set_access_type(ctx, ACCESS_CACHE);
5109     EA = tcg_temp_new();
5110     gen_addr_reg_index(ctx, EA);
5111     val = tcg_temp_new();
5112     gen_qemu_ld32u(ctx, val, EA);
5113     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5114 #endif /* defined(CONFIG_USER_ONLY) */
5115 }
5116 
5117 /* icbt */
5118 static void gen_icbt_40x(DisasContext *ctx)
5119 {
5120     /*
5121      * interpreted as no-op
5122      * XXX: specification say this is treated as a load by the MMU but
5123      *      does not generate any exception
5124      */
5125 }
5126 
5127 /* iccci */
5128 static void gen_iccci(DisasContext *ctx)
5129 {
5130     CHK_SV(ctx);
5131     /* interpreted as no-op */
5132 }
5133 
5134 /* icread */
5135 static void gen_icread(DisasContext *ctx)
5136 {
5137     CHK_SV(ctx);
5138     /* interpreted as no-op */
5139 }
5140 
5141 /* rfci (supervisor only) */
5142 static void gen_rfci_40x(DisasContext *ctx)
5143 {
5144 #if defined(CONFIG_USER_ONLY)
5145     GEN_PRIV(ctx);
5146 #else
5147     CHK_SV(ctx);
5148     /* Restore CPU state */
5149     gen_helper_40x_rfci(tcg_env);
5150     ctx->base.is_jmp = DISAS_EXIT;
5151 #endif /* defined(CONFIG_USER_ONLY) */
5152 }
5153 
5154 static void gen_rfci(DisasContext *ctx)
5155 {
5156 #if defined(CONFIG_USER_ONLY)
5157     GEN_PRIV(ctx);
5158 #else
5159     CHK_SV(ctx);
5160     /* Restore CPU state */
5161     gen_helper_rfci(tcg_env);
5162     ctx->base.is_jmp = DISAS_EXIT;
5163 #endif /* defined(CONFIG_USER_ONLY) */
5164 }
5165 
5166 /* BookE specific */
5167 
5168 /* XXX: not implemented on 440 ? */
5169 static void gen_rfdi(DisasContext *ctx)
5170 {
5171 #if defined(CONFIG_USER_ONLY)
5172     GEN_PRIV(ctx);
5173 #else
5174     CHK_SV(ctx);
5175     /* Restore CPU state */
5176     gen_helper_rfdi(tcg_env);
5177     ctx->base.is_jmp = DISAS_EXIT;
5178 #endif /* defined(CONFIG_USER_ONLY) */
5179 }
5180 
5181 /* XXX: not implemented on 440 ? */
5182 static void gen_rfmci(DisasContext *ctx)
5183 {
5184 #if defined(CONFIG_USER_ONLY)
5185     GEN_PRIV(ctx);
5186 #else
5187     CHK_SV(ctx);
5188     /* Restore CPU state */
5189     gen_helper_rfmci(tcg_env);
5190     ctx->base.is_jmp = DISAS_EXIT;
5191 #endif /* defined(CONFIG_USER_ONLY) */
5192 }
5193 
5194 /* TLB management - PowerPC 405 implementation */
5195 
5196 /* tlbre */
5197 static void gen_tlbre_40x(DisasContext *ctx)
5198 {
5199 #if defined(CONFIG_USER_ONLY)
5200     GEN_PRIV(ctx);
5201 #else
5202     CHK_SV(ctx);
5203     switch (rB(ctx->opcode)) {
5204     case 0:
5205         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], tcg_env,
5206                                 cpu_gpr[rA(ctx->opcode)]);
5207         break;
5208     case 1:
5209         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], tcg_env,
5210                                 cpu_gpr[rA(ctx->opcode)]);
5211         break;
5212     default:
5213         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5214         break;
5215     }
5216 #endif /* defined(CONFIG_USER_ONLY) */
5217 }
5218 
5219 /* tlbsx - tlbsx. */
5220 static void gen_tlbsx_40x(DisasContext *ctx)
5221 {
5222 #if defined(CONFIG_USER_ONLY)
5223     GEN_PRIV(ctx);
5224 #else
5225     TCGv t0;
5226 
5227     CHK_SV(ctx);
5228     t0 = tcg_temp_new();
5229     gen_addr_reg_index(ctx, t0);
5230     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
5231     if (Rc(ctx->opcode)) {
5232         TCGLabel *l1 = gen_new_label();
5233         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5234         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5235         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5236         gen_set_label(l1);
5237     }
5238 #endif /* defined(CONFIG_USER_ONLY) */
5239 }
5240 
5241 /* tlbwe */
5242 static void gen_tlbwe_40x(DisasContext *ctx)
5243 {
5244 #if defined(CONFIG_USER_ONLY)
5245     GEN_PRIV(ctx);
5246 #else
5247     CHK_SV(ctx);
5248 
5249     switch (rB(ctx->opcode)) {
5250     case 0:
5251         gen_helper_4xx_tlbwe_hi(tcg_env, cpu_gpr[rA(ctx->opcode)],
5252                                 cpu_gpr[rS(ctx->opcode)]);
5253         break;
5254     case 1:
5255         gen_helper_4xx_tlbwe_lo(tcg_env, cpu_gpr[rA(ctx->opcode)],
5256                                 cpu_gpr[rS(ctx->opcode)]);
5257         break;
5258     default:
5259         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5260         break;
5261     }
5262 #endif /* defined(CONFIG_USER_ONLY) */
5263 }
5264 
5265 /* TLB management - PowerPC 440 implementation */
5266 
5267 /* tlbre */
5268 static void gen_tlbre_440(DisasContext *ctx)
5269 {
5270 #if defined(CONFIG_USER_ONLY)
5271     GEN_PRIV(ctx);
5272 #else
5273     CHK_SV(ctx);
5274 
5275     switch (rB(ctx->opcode)) {
5276     case 0:
5277     case 1:
5278     case 2:
5279         {
5280             TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5281             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], tcg_env,
5282                                  t0, cpu_gpr[rA(ctx->opcode)]);
5283         }
5284         break;
5285     default:
5286         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5287         break;
5288     }
5289 #endif /* defined(CONFIG_USER_ONLY) */
5290 }
5291 
5292 /* tlbsx - tlbsx. */
5293 static void gen_tlbsx_440(DisasContext *ctx)
5294 {
5295 #if defined(CONFIG_USER_ONLY)
5296     GEN_PRIV(ctx);
5297 #else
5298     TCGv t0;
5299 
5300     CHK_SV(ctx);
5301     t0 = tcg_temp_new();
5302     gen_addr_reg_index(ctx, t0);
5303     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
5304     if (Rc(ctx->opcode)) {
5305         TCGLabel *l1 = gen_new_label();
5306         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5307         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5308         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5309         gen_set_label(l1);
5310     }
5311 #endif /* defined(CONFIG_USER_ONLY) */
5312 }
5313 
5314 /* tlbwe */
5315 static void gen_tlbwe_440(DisasContext *ctx)
5316 {
5317 #if defined(CONFIG_USER_ONLY)
5318     GEN_PRIV(ctx);
5319 #else
5320     CHK_SV(ctx);
5321     switch (rB(ctx->opcode)) {
5322     case 0:
5323     case 1:
5324     case 2:
5325         {
5326             TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5327             gen_helper_440_tlbwe(tcg_env, t0, cpu_gpr[rA(ctx->opcode)],
5328                                  cpu_gpr[rS(ctx->opcode)]);
5329         }
5330         break;
5331     default:
5332         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5333         break;
5334     }
5335 #endif /* defined(CONFIG_USER_ONLY) */
5336 }
5337 
5338 /* TLB management - PowerPC BookE 2.06 implementation */
5339 
5340 /* tlbre */
5341 static void gen_tlbre_booke206(DisasContext *ctx)
5342 {
5343  #if defined(CONFIG_USER_ONLY)
5344     GEN_PRIV(ctx);
5345 #else
5346    CHK_SV(ctx);
5347     gen_helper_booke206_tlbre(tcg_env);
5348 #endif /* defined(CONFIG_USER_ONLY) */
5349 }
5350 
5351 /* tlbsx - tlbsx. */
5352 static void gen_tlbsx_booke206(DisasContext *ctx)
5353 {
5354 #if defined(CONFIG_USER_ONLY)
5355     GEN_PRIV(ctx);
5356 #else
5357     TCGv t0;
5358 
5359     CHK_SV(ctx);
5360     if (rA(ctx->opcode)) {
5361         t0 = tcg_temp_new();
5362         tcg_gen_add_tl(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5363     } else {
5364         t0 = cpu_gpr[rB(ctx->opcode)];
5365     }
5366     gen_helper_booke206_tlbsx(tcg_env, t0);
5367 #endif /* defined(CONFIG_USER_ONLY) */
5368 }
5369 
5370 /* tlbwe */
5371 static void gen_tlbwe_booke206(DisasContext *ctx)
5372 {
5373 #if defined(CONFIG_USER_ONLY)
5374     GEN_PRIV(ctx);
5375 #else
5376     CHK_SV(ctx);
5377     gen_helper_booke206_tlbwe(tcg_env);
5378 #endif /* defined(CONFIG_USER_ONLY) */
5379 }
5380 
5381 static void gen_tlbivax_booke206(DisasContext *ctx)
5382 {
5383 #if defined(CONFIG_USER_ONLY)
5384     GEN_PRIV(ctx);
5385 #else
5386     TCGv t0;
5387 
5388     CHK_SV(ctx);
5389     t0 = tcg_temp_new();
5390     gen_addr_reg_index(ctx, t0);
5391     gen_helper_booke206_tlbivax(tcg_env, t0);
5392 #endif /* defined(CONFIG_USER_ONLY) */
5393 }
5394 
5395 static void gen_tlbilx_booke206(DisasContext *ctx)
5396 {
5397 #if defined(CONFIG_USER_ONLY)
5398     GEN_PRIV(ctx);
5399 #else
5400     TCGv t0;
5401 
5402     CHK_SV(ctx);
5403     t0 = tcg_temp_new();
5404     gen_addr_reg_index(ctx, t0);
5405 
5406     switch ((ctx->opcode >> 21) & 0x3) {
5407     case 0:
5408         gen_helper_booke206_tlbilx0(tcg_env, t0);
5409         break;
5410     case 1:
5411         gen_helper_booke206_tlbilx1(tcg_env, t0);
5412         break;
5413     case 3:
5414         gen_helper_booke206_tlbilx3(tcg_env, t0);
5415         break;
5416     default:
5417         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5418         break;
5419     }
5420 #endif /* defined(CONFIG_USER_ONLY) */
5421 }
5422 
5423 /* wrtee */
5424 static void gen_wrtee(DisasContext *ctx)
5425 {
5426 #if defined(CONFIG_USER_ONLY)
5427     GEN_PRIV(ctx);
5428 #else
5429     TCGv t0;
5430 
5431     CHK_SV(ctx);
5432     t0 = tcg_temp_new();
5433     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
5434     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5435     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
5436     gen_ppc_maybe_interrupt(ctx);
5437     /*
5438      * Stop translation to have a chance to raise an exception if we
5439      * just set msr_ee to 1
5440      */
5441     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5442 #endif /* defined(CONFIG_USER_ONLY) */
5443 }
5444 
5445 /* wrteei */
5446 static void gen_wrteei(DisasContext *ctx)
5447 {
5448 #if defined(CONFIG_USER_ONLY)
5449     GEN_PRIV(ctx);
5450 #else
5451     CHK_SV(ctx);
5452     if (ctx->opcode & 0x00008000) {
5453         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
5454         gen_ppc_maybe_interrupt(ctx);
5455         /* Stop translation to have a chance to raise an exception */
5456         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5457     } else {
5458         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5459     }
5460 #endif /* defined(CONFIG_USER_ONLY) */
5461 }
5462 
5463 /* PowerPC 440 specific instructions */
5464 
5465 /* dlmzb */
5466 static void gen_dlmzb(DisasContext *ctx)
5467 {
5468     TCGv_i32 t0 = tcg_constant_i32(Rc(ctx->opcode));
5469     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], tcg_env,
5470                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
5471 }
5472 
5473 /* icbt */
5474 static void gen_icbt_440(DisasContext *ctx)
5475 {
5476     /*
5477      * interpreted as no-op
5478      * XXX: specification say this is treated as a load by the MMU but
5479      *      does not generate any exception
5480      */
5481 }
5482 
5483 static void gen_tbegin(DisasContext *ctx)
5484 {
5485     if (unlikely(!ctx->tm_enabled)) {
5486         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
5487         return;
5488     }
5489     gen_helper_tbegin(tcg_env);
5490 }
5491 
5492 #define GEN_TM_NOOP(name)                                      \
5493 static inline void gen_##name(DisasContext *ctx)               \
5494 {                                                              \
5495     if (unlikely(!ctx->tm_enabled)) {                          \
5496         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
5497         return;                                                \
5498     }                                                          \
5499     /*                                                         \
5500      * Because tbegin always fails in QEMU, these user         \
5501      * space instructions all have a simple implementation:    \
5502      *                                                         \
5503      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
5504      *           = 0b0 || 0b00    || 0b0                       \
5505      */                                                        \
5506     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
5507 }
5508 
5509 GEN_TM_NOOP(tend);
5510 GEN_TM_NOOP(tabort);
5511 GEN_TM_NOOP(tabortwc);
5512 GEN_TM_NOOP(tabortwci);
5513 GEN_TM_NOOP(tabortdc);
5514 GEN_TM_NOOP(tabortdci);
5515 GEN_TM_NOOP(tsr);
5516 
5517 static inline void gen_cp_abort(DisasContext *ctx)
5518 {
5519     /* Do Nothing */
5520 }
5521 
5522 #define GEN_CP_PASTE_NOOP(name)                           \
5523 static inline void gen_##name(DisasContext *ctx)          \
5524 {                                                         \
5525     /*                                                    \
5526      * Generate invalid exception until we have an        \
5527      * implementation of the copy paste facility          \
5528      */                                                   \
5529     gen_invalid(ctx);                                     \
5530 }
5531 
5532 GEN_CP_PASTE_NOOP(copy)
5533 GEN_CP_PASTE_NOOP(paste)
5534 
5535 static void gen_tcheck(DisasContext *ctx)
5536 {
5537     if (unlikely(!ctx->tm_enabled)) {
5538         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
5539         return;
5540     }
5541     /*
5542      * Because tbegin always fails, the tcheck implementation is
5543      * simple:
5544      *
5545      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
5546      *         = 0b1 || 0b00 || 0b0
5547      */
5548     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
5549 }
5550 
5551 #if defined(CONFIG_USER_ONLY)
5552 #define GEN_TM_PRIV_NOOP(name)                                 \
5553 static inline void gen_##name(DisasContext *ctx)               \
5554 {                                                              \
5555     gen_priv_opc(ctx);                                         \
5556 }
5557 
5558 #else
5559 
5560 #define GEN_TM_PRIV_NOOP(name)                                 \
5561 static inline void gen_##name(DisasContext *ctx)               \
5562 {                                                              \
5563     CHK_SV(ctx);                                               \
5564     if (unlikely(!ctx->tm_enabled)) {                          \
5565         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
5566         return;                                                \
5567     }                                                          \
5568     /*                                                         \
5569      * Because tbegin always fails, the implementation is      \
5570      * simple:                                                 \
5571      *                                                         \
5572      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
5573      *         = 0b0 || 0b00 | 0b0                             \
5574      */                                                        \
5575     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
5576 }
5577 
5578 #endif
5579 
5580 GEN_TM_PRIV_NOOP(treclaim);
5581 GEN_TM_PRIV_NOOP(trechkpt);
5582 
5583 static inline void get_fpr(TCGv_i64 dst, int regno)
5584 {
5585     tcg_gen_ld_i64(dst, tcg_env, fpr_offset(regno));
5586 }
5587 
5588 static inline void set_fpr(int regno, TCGv_i64 src)
5589 {
5590     tcg_gen_st_i64(src, tcg_env, fpr_offset(regno));
5591     /*
5592      * Before PowerISA v3.1 the result of doubleword 1 of the VSR
5593      * corresponding to the target FPR was undefined. However,
5594      * most (if not all) real hardware were setting the result to 0.
5595      * Starting at ISA v3.1, the result for doubleword 1 is now defined
5596      * to be 0.
5597      */
5598     tcg_gen_st_i64(tcg_constant_i64(0), tcg_env, vsr64_offset(regno, false));
5599 }
5600 
5601 /*
5602  * Helpers for decodetree used by !function for decoding arguments.
5603  */
5604 static int times_2(DisasContext *ctx, int x)
5605 {
5606     return x * 2;
5607 }
5608 
5609 static int times_4(DisasContext *ctx, int x)
5610 {
5611     return x * 4;
5612 }
5613 
5614 static int times_16(DisasContext *ctx, int x)
5615 {
5616     return x * 16;
5617 }
5618 
5619 static int64_t dw_compose_ea(DisasContext *ctx, int x)
5620 {
5621     return deposit64(0xfffffffffffffe00, 3, 6, x);
5622 }
5623 
5624 /*
5625  * Helpers for trans_* functions to check for specific insns flags.
5626  * Use token pasting to ensure that we use the proper flag with the
5627  * proper variable.
5628  */
5629 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
5630     do {                                                \
5631         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
5632             return false;                               \
5633         }                                               \
5634     } while (0)
5635 
5636 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
5637     do {                                                \
5638         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
5639             return false;                               \
5640         }                                               \
5641     } while (0)
5642 
5643 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
5644 #if TARGET_LONG_BITS == 32
5645 # define REQUIRE_64BIT(CTX)  return false
5646 #else
5647 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
5648 #endif
5649 
5650 #define REQUIRE_VECTOR(CTX)                             \
5651     do {                                                \
5652         if (unlikely(!(CTX)->altivec_enabled)) {        \
5653             gen_exception((CTX), POWERPC_EXCP_VPU);     \
5654             return true;                                \
5655         }                                               \
5656     } while (0)
5657 
5658 #define REQUIRE_VSX(CTX)                                \
5659     do {                                                \
5660         if (unlikely(!(CTX)->vsx_enabled)) {            \
5661             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
5662             return true;                                \
5663         }                                               \
5664     } while (0)
5665 
5666 #define REQUIRE_FPU(ctx)                                \
5667     do {                                                \
5668         if (unlikely(!(ctx)->fpu_enabled)) {            \
5669             gen_exception((ctx), POWERPC_EXCP_FPU);     \
5670             return true;                                \
5671         }                                               \
5672     } while (0)
5673 
5674 #if !defined(CONFIG_USER_ONLY)
5675 #define REQUIRE_SV(CTX)             \
5676     do {                            \
5677         if (unlikely((CTX)->pr)) {  \
5678             gen_priv_opc(CTX);      \
5679             return true;            \
5680         }                           \
5681     } while (0)
5682 
5683 #define REQUIRE_HV(CTX)                             \
5684     do {                                            \
5685         if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
5686             gen_priv_opc(CTX);                      \
5687             return true;                            \
5688         }                                           \
5689     } while (0)
5690 #else
5691 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
5692 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
5693 #endif
5694 
5695 /*
5696  * Helpers for implementing sets of trans_* functions.
5697  * Defer the implementation of NAME to FUNC, with optional extra arguments.
5698  */
5699 #define TRANS(NAME, FUNC, ...) \
5700     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5701     { return FUNC(ctx, a, __VA_ARGS__); }
5702 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
5703     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5704     {                                                          \
5705         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
5706         return FUNC(ctx, a, __VA_ARGS__);                      \
5707     }
5708 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
5709     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5710     {                                                          \
5711         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
5712         return FUNC(ctx, a, __VA_ARGS__);                      \
5713     }
5714 
5715 #define TRANS64(NAME, FUNC, ...) \
5716     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5717     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
5718 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
5719     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5720     {                                                          \
5721         REQUIRE_64BIT(ctx);                                    \
5722         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
5723         return FUNC(ctx, a, __VA_ARGS__);                      \
5724     }
5725 
5726 /* TODO: More TRANS* helpers for extra insn_flags checks. */
5727 
5728 
5729 #include "decode-insn32.c.inc"
5730 #include "decode-insn64.c.inc"
5731 #include "power8-pmu-regs.c.inc"
5732 
5733 /*
5734  * Incorporate CIA into the constant when R=1.
5735  * Validate that when R=1, RA=0.
5736  */
5737 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
5738 {
5739     d->rt = a->rt;
5740     d->ra = a->ra;
5741     d->si = a->si;
5742     if (a->r) {
5743         if (unlikely(a->ra != 0)) {
5744             gen_invalid(ctx);
5745             return false;
5746         }
5747         d->si += ctx->cia;
5748     }
5749     return true;
5750 }
5751 
5752 #include "translate/fixedpoint-impl.c.inc"
5753 
5754 #include "translate/fp-impl.c.inc"
5755 
5756 #include "translate/vmx-impl.c.inc"
5757 
5758 #include "translate/vsx-impl.c.inc"
5759 
5760 #include "translate/dfp-impl.c.inc"
5761 
5762 #include "translate/spe-impl.c.inc"
5763 
5764 #include "translate/branch-impl.c.inc"
5765 
5766 #include "translate/processor-ctrl-impl.c.inc"
5767 
5768 #include "translate/storage-ctrl-impl.c.inc"
5769 
5770 #include "translate/misc-impl.c.inc"
5771 
5772 #include "translate/bhrb-impl.c.inc"
5773 
5774 #include "translate/ppe-impl.c.inc"
5775 
5776 /* Handles lfdp */
5777 static void gen_dform39(DisasContext *ctx)
5778 {
5779     if ((ctx->opcode & 0x3) == 0) {
5780         if (ctx->insns_flags2 & PPC2_ISA205) {
5781             return gen_lfdp(ctx);
5782         }
5783     }
5784     return gen_invalid(ctx);
5785 }
5786 
5787 /* Handles stfdp */
5788 static void gen_dform3D(DisasContext *ctx)
5789 {
5790     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
5791         /* stfdp */
5792         if (ctx->insns_flags2 & PPC2_ISA205) {
5793             return gen_stfdp(ctx);
5794         }
5795     }
5796     return gen_invalid(ctx);
5797 }
5798 
5799 #if defined(TARGET_PPC64)
5800 /* brd */
5801 static void gen_brd(DisasContext *ctx)
5802 {
5803     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5804 }
5805 
5806 /* brw */
5807 static void gen_brw(DisasContext *ctx)
5808 {
5809     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5810     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
5811 
5812 }
5813 
5814 /* brh */
5815 static void gen_brh(DisasContext *ctx)
5816 {
5817     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
5818     TCGv_i64 t1 = tcg_temp_new_i64();
5819     TCGv_i64 t2 = tcg_temp_new_i64();
5820 
5821     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
5822     tcg_gen_and_i64(t2, t1, mask);
5823     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
5824     tcg_gen_shli_i64(t1, t1, 8);
5825     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
5826 }
5827 #endif
5828 
5829 static opcode_t opcodes[] = {
5830 #if defined(TARGET_PPC64)
5831 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
5832 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
5833 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
5834 #endif
5835 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
5836 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
5837 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
5838 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
5839 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5840 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5841 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5842 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
5843 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
5844 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
5845 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
5846 #if defined(TARGET_PPC64)
5847 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
5848 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
5849 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
5850 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
5851 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
5852 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
5853                PPC_NONE, PPC2_ISA300),
5854 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
5855                PPC_NONE, PPC2_ISA300),
5856 #endif
5857 /* handles lfdp, lxsd, lxssp */
5858 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
5859 /* handles stfdp, stxsd, stxssp */
5860 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
5861 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5862 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5863 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
5864 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
5865 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
5866 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
5867 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
5868 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5869 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5870 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
5871 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
5872 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
5873 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5874 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5875 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
5876 #if defined(TARGET_PPC64)
5877 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
5878 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
5879 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
5880 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
5881 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
5882 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
5883 #endif
5884 /* ISA v3.0 changed the extended opcode from 62 to 30 */
5885 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
5886 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
5887 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
5888 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
5889 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
5890 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
5891 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
5892 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
5893 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
5894 #if defined(TARGET_PPC64)
5895 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
5896 #if !defined(CONFIG_USER_ONLY)
5897 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
5898 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
5899 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
5900 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
5901 #endif
5902 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
5903 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5904 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5905 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5906 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5907 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
5908 #endif
5909 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
5910 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
5911 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
5912 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
5913 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
5914 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
5915 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
5916 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
5917 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
5918 #if defined(TARGET_PPC64)
5919 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
5920 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
5921 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
5922 #endif
5923 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
5924 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
5925 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
5926 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
5927 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
5928 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
5929 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
5930 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
5931 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
5932 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
5933 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
5934 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
5935 GEN_HANDLER_E(dcblc, 0x1F, 0x06, 0x0c, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
5936 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
5937 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
5938 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
5939 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
5940 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
5941 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
5942 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
5943 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
5944 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
5945 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
5946 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
5947 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
5948 #if defined(TARGET_PPC64)
5949 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
5950 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
5951              PPC_SEGMENT_64B),
5952 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
5953 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
5954              PPC_SEGMENT_64B),
5955 #endif
5956 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
5957 /*
5958  * XXX Those instructions will need to be handled differently for
5959  * different ISA versions
5960  */
5961 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
5962 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
5963 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
5964 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
5965 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
5966 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
5967 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
5968 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
5969 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
5970 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
5971 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
5972 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
5973 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
5974 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
5975 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
5976 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
5977 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
5978 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
5979 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
5980 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
5981 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
5982 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
5983 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
5984 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
5985 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
5986 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
5987 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
5988                PPC_NONE, PPC2_BOOKE206),
5989 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
5990                PPC_NONE, PPC2_BOOKE206),
5991 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
5992                PPC_NONE, PPC2_BOOKE206),
5993 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
5994                PPC_NONE, PPC2_BOOKE206),
5995 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
5996                PPC_NONE, PPC2_BOOKE206),
5997 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
5998 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
5999 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6000 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6001                PPC_BOOKE, PPC2_BOOKE206),
6002 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
6003              PPC_440_SPEC),
6004 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6005 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6006 
6007 #if defined(TARGET_PPC64)
6008 #undef GEN_PPC64_R2
6009 #undef GEN_PPC64_R4
6010 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
6011 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6012 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6013              PPC_64B)
6014 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
6015 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6016 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
6017              PPC_64B),                                                        \
6018 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6019              PPC_64B),                                                        \
6020 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
6021              PPC_64B)
6022 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
6023 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
6024 GEN_PPC64_R4(rldic, 0x1E, 0x04),
6025 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
6026 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
6027 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
6028 #endif
6029 
6030 #undef GEN_LDX_E
6031 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
6032 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
6033 
6034 #if defined(TARGET_PPC64)
6035 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
6036 
6037 /* HV/P7 and later only */
6038 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
6039 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
6040 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
6041 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
6042 #endif
6043 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
6044 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
6045 
6046 /* External PID based load */
6047 #undef GEN_LDEPX
6048 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
6049 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6050               0x00000001, PPC_NONE, PPC2_BOOKE206),
6051 
6052 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
6053 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
6054 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
6055 #if defined(TARGET_PPC64)
6056 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
6057 #endif
6058 
6059 #undef GEN_STX_E
6060 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
6061 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
6062 
6063 #if defined(TARGET_PPC64)
6064 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
6065 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
6066 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
6067 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
6068 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
6069 #endif
6070 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
6071 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
6072 
6073 #undef GEN_STEPX
6074 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
6075 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6076               0x00000001, PPC_NONE, PPC2_BOOKE206),
6077 
6078 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
6079 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
6080 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
6081 #if defined(TARGET_PPC64)
6082 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
6083 #endif
6084 
6085 #undef GEN_CRLOGIC
6086 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
6087 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
6088 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
6089 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
6090 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
6091 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
6092 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
6093 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
6094 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
6095 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
6096 
6097 #undef GEN_MAC_HANDLER
6098 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
6099 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
6100 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
6101 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
6102 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
6103 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
6104 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
6105 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
6106 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
6107 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
6108 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
6109 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
6110 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
6111 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
6112 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
6113 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
6114 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
6115 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
6116 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
6117 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
6118 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
6119 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
6120 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
6121 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
6122 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
6123 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
6124 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
6125 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
6126 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
6127 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
6128 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
6129 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
6130 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
6131 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
6132 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
6133 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
6134 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
6135 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
6136 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
6137 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
6138 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
6139 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
6140 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
6141 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
6142 
6143 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
6144                PPC_NONE, PPC2_TM),
6145 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
6146                PPC_NONE, PPC2_TM),
6147 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
6148                PPC_NONE, PPC2_TM),
6149 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
6150                PPC_NONE, PPC2_TM),
6151 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
6152                PPC_NONE, PPC2_TM),
6153 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
6154                PPC_NONE, PPC2_TM),
6155 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
6156                PPC_NONE, PPC2_TM),
6157 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
6158                PPC_NONE, PPC2_TM),
6159 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
6160                PPC_NONE, PPC2_TM),
6161 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
6162                PPC_NONE, PPC2_TM),
6163 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
6164                PPC_NONE, PPC2_TM),
6165 
6166 #include "translate/fp-ops.c.inc"
6167 
6168 #include "translate/vmx-ops.c.inc"
6169 
6170 #include "translate/vsx-ops.c.inc"
6171 
6172 #include "translate/spe-ops.c.inc"
6173 };
6174 
6175 /*****************************************************************************/
6176 /* Opcode types */
6177 enum {
6178     PPC_DIRECT   = 0, /* Opcode routine        */
6179     PPC_INDIRECT = 1, /* Indirect opcode table */
6180 };
6181 
6182 #define PPC_OPCODE_MASK 0x3
6183 
6184 static inline int is_indirect_opcode(void *handler)
6185 {
6186     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
6187 }
6188 
6189 static inline opc_handler_t **ind_table(void *handler)
6190 {
6191     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
6192 }
6193 
6194 /* Instruction table creation */
6195 /* Opcodes tables creation */
6196 static void fill_new_table(opc_handler_t **table, int len)
6197 {
6198     int i;
6199 
6200     for (i = 0; i < len; i++) {
6201         table[i] = &invalid_handler;
6202     }
6203 }
6204 
6205 static int create_new_table(opc_handler_t **table, unsigned char idx)
6206 {
6207     opc_handler_t **tmp;
6208 
6209     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
6210     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
6211     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
6212 
6213     return 0;
6214 }
6215 
6216 static int insert_in_table(opc_handler_t **table, unsigned char idx,
6217                             opc_handler_t *handler)
6218 {
6219     if (table[idx] != &invalid_handler) {
6220         return -1;
6221     }
6222     table[idx] = handler;
6223 
6224     return 0;
6225 }
6226 
6227 static int register_direct_insn(opc_handler_t **ppc_opcodes,
6228                                 unsigned char idx, opc_handler_t *handler)
6229 {
6230     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
6231         printf("*** ERROR: opcode %02x already assigned in main "
6232                "opcode table\n", idx);
6233         return -1;
6234     }
6235 
6236     return 0;
6237 }
6238 
6239 static int register_ind_in_table(opc_handler_t **table,
6240                                  unsigned char idx1, unsigned char idx2,
6241                                  opc_handler_t *handler)
6242 {
6243     if (table[idx1] == &invalid_handler) {
6244         if (create_new_table(table, idx1) < 0) {
6245             printf("*** ERROR: unable to create indirect table "
6246                    "idx=%02x\n", idx1);
6247             return -1;
6248         }
6249     } else {
6250         if (!is_indirect_opcode(table[idx1])) {
6251             printf("*** ERROR: idx %02x already assigned to a direct "
6252                    "opcode\n", idx1);
6253             return -1;
6254         }
6255     }
6256     if (handler != NULL &&
6257         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
6258         printf("*** ERROR: opcode %02x already assigned in "
6259                "opcode table %02x\n", idx2, idx1);
6260         return -1;
6261     }
6262 
6263     return 0;
6264 }
6265 
6266 static int register_ind_insn(opc_handler_t **ppc_opcodes,
6267                              unsigned char idx1, unsigned char idx2,
6268                              opc_handler_t *handler)
6269 {
6270     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
6271 }
6272 
6273 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
6274                                 unsigned char idx1, unsigned char idx2,
6275                                 unsigned char idx3, opc_handler_t *handler)
6276 {
6277     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6278         printf("*** ERROR: unable to join indirect table idx "
6279                "[%02x-%02x]\n", idx1, idx2);
6280         return -1;
6281     }
6282     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
6283                               handler) < 0) {
6284         printf("*** ERROR: unable to insert opcode "
6285                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6286         return -1;
6287     }
6288 
6289     return 0;
6290 }
6291 
6292 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
6293                                  unsigned char idx1, unsigned char idx2,
6294                                  unsigned char idx3, unsigned char idx4,
6295                                  opc_handler_t *handler)
6296 {
6297     opc_handler_t **table;
6298 
6299     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6300         printf("*** ERROR: unable to join indirect table idx "
6301                "[%02x-%02x]\n", idx1, idx2);
6302         return -1;
6303     }
6304     table = ind_table(ppc_opcodes[idx1]);
6305     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
6306         printf("*** ERROR: unable to join 2nd-level indirect table idx "
6307                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6308         return -1;
6309     }
6310     table = ind_table(table[idx2]);
6311     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
6312         printf("*** ERROR: unable to insert opcode "
6313                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
6314         return -1;
6315     }
6316     return 0;
6317 }
6318 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
6319 {
6320     if (insn->opc2 != 0xFF) {
6321         if (insn->opc3 != 0xFF) {
6322             if (insn->opc4 != 0xFF) {
6323                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
6324                                           insn->opc3, insn->opc4,
6325                                           &insn->handler) < 0) {
6326                     return -1;
6327                 }
6328             } else {
6329                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
6330                                          insn->opc3, &insn->handler) < 0) {
6331                     return -1;
6332                 }
6333             }
6334         } else {
6335             if (register_ind_insn(ppc_opcodes, insn->opc1,
6336                                   insn->opc2, &insn->handler) < 0) {
6337                 return -1;
6338             }
6339         }
6340     } else {
6341         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
6342             return -1;
6343         }
6344     }
6345 
6346     return 0;
6347 }
6348 
6349 static int test_opcode_table(opc_handler_t **table, int len)
6350 {
6351     int i, count, tmp;
6352 
6353     for (i = 0, count = 0; i < len; i++) {
6354         /* Consistency fixup */
6355         if (table[i] == NULL) {
6356             table[i] = &invalid_handler;
6357         }
6358         if (table[i] != &invalid_handler) {
6359             if (is_indirect_opcode(table[i])) {
6360                 tmp = test_opcode_table(ind_table(table[i]),
6361                     PPC_CPU_INDIRECT_OPCODES_LEN);
6362                 if (tmp == 0) {
6363                     g_free(table[i]);
6364                     table[i] = &invalid_handler;
6365                 } else {
6366                     count++;
6367                 }
6368             } else {
6369                 count++;
6370             }
6371         }
6372     }
6373 
6374     return count;
6375 }
6376 
6377 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
6378 {
6379     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
6380         printf("*** WARNING: no opcode defined !\n");
6381     }
6382 }
6383 
6384 /*****************************************************************************/
6385 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
6386 {
6387     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
6388     opcode_t *opc;
6389 
6390     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
6391     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
6392         if (((opc->handler.type & pcc->insns_flags) != 0) ||
6393             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
6394             if (register_insn(cpu->opcodes, opc) < 0) {
6395                 error_setg(errp, "ERROR initializing PowerPC instruction "
6396                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
6397                            opc->opc3);
6398                 return;
6399             }
6400         }
6401     }
6402     fix_opcode_tables(cpu->opcodes);
6403     fflush(stdout);
6404     fflush(stderr);
6405 }
6406 
6407 void destroy_ppc_opcodes(PowerPCCPU *cpu)
6408 {
6409     opc_handler_t **table, **table_2;
6410     int i, j, k;
6411 
6412     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
6413         if (cpu->opcodes[i] == &invalid_handler) {
6414             continue;
6415         }
6416         if (is_indirect_opcode(cpu->opcodes[i])) {
6417             table = ind_table(cpu->opcodes[i]);
6418             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
6419                 if (table[j] == &invalid_handler) {
6420                     continue;
6421                 }
6422                 if (is_indirect_opcode(table[j])) {
6423                     table_2 = ind_table(table[j]);
6424                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
6425                         if (table_2[k] != &invalid_handler &&
6426                             is_indirect_opcode(table_2[k])) {
6427                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
6428                                                      ~PPC_INDIRECT));
6429                         }
6430                     }
6431                     g_free((opc_handler_t *)((uintptr_t)table[j] &
6432                                              ~PPC_INDIRECT));
6433                 }
6434             }
6435             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
6436                 ~PPC_INDIRECT));
6437         }
6438     }
6439 }
6440 
6441 int ppc_fixup_cpu(PowerPCCPU *cpu)
6442 {
6443     CPUPPCState *env = &cpu->env;
6444 
6445     /*
6446      * TCG doesn't (yet) emulate some groups of instructions that are
6447      * implemented on some otherwise supported CPUs (e.g. VSX and
6448      * decimal floating point instructions on POWER7).  We remove
6449      * unsupported instruction groups from the cpu state's instruction
6450      * masks and hope the guest can cope.  For at least the pseries
6451      * machine, the unavailability of these instructions can be
6452      * advertised to the guest via the device tree.
6453      */
6454     if ((env->insns_flags & ~PPC_TCG_INSNS)
6455         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
6456         warn_report("Disabling some instructions which are not "
6457                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
6458                     env->insns_flags & ~PPC_TCG_INSNS,
6459                     env->insns_flags2 & ~PPC_TCG_INSNS2);
6460     }
6461     env->insns_flags &= PPC_TCG_INSNS;
6462     env->insns_flags2 &= PPC_TCG_INSNS2;
6463     return 0;
6464 }
6465 
6466 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
6467 {
6468     opc_handler_t **table, *handler;
6469     uint32_t inval;
6470 
6471     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
6472               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6473               ctx->le_mode ? "little" : "big");
6474 
6475     table = cpu->opcodes;
6476     handler = table[opc1(insn)];
6477     if (is_indirect_opcode(handler)) {
6478         table = ind_table(handler);
6479         handler = table[opc2(insn)];
6480         if (is_indirect_opcode(handler)) {
6481             table = ind_table(handler);
6482             handler = table[opc3(insn)];
6483             if (is_indirect_opcode(handler)) {
6484                 table = ind_table(handler);
6485                 handler = table[opc4(insn)];
6486             }
6487         }
6488     }
6489 
6490     /* Is opcode *REALLY* valid ? */
6491     if (unlikely(handler->handler == &gen_invalid)) {
6492         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
6493                       "%02x - %02x - %02x - %02x (%08x) "
6494                       TARGET_FMT_lx "\n",
6495                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6496                       insn, ctx->cia);
6497         return false;
6498     }
6499 
6500     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
6501                  && Rc(insn))) {
6502         inval = handler->inval2;
6503     } else {
6504         inval = handler->inval1;
6505     }
6506 
6507     if (unlikely((insn & inval) != 0)) {
6508         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
6509                       "%02x - %02x - %02x - %02x (%08x) "
6510                       TARGET_FMT_lx "\n", insn & inval,
6511                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6512                       insn, ctx->cia);
6513         return false;
6514     }
6515 
6516     handler->handler(ctx);
6517     return true;
6518 }
6519 
6520 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
6521 {
6522     DisasContext *ctx = container_of(dcbase, DisasContext, base);
6523     CPUPPCState *env = cpu_env(cs);
6524     uint32_t hflags = ctx->base.tb->flags;
6525 
6526     ctx->spr_cb = env->spr_cb;
6527     ctx->pr = (hflags >> HFLAGS_PR) & 1;
6528     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
6529     ctx->dr = (hflags >> HFLAGS_DR) & 1;
6530     ctx->hv = (hflags >> HFLAGS_HV) & 1;
6531     ctx->insns_flags = env->insns_flags;
6532     ctx->insns_flags2 = env->insns_flags2;
6533     ctx->access_type = -1;
6534     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
6535     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
6536     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
6537     ctx->flags = env->flags;
6538 #if defined(TARGET_PPC64)
6539     ctx->excp_model = env->excp_model;
6540     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
6541     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
6542     ctx->has_bhrb = !!(env->flags & POWERPC_FLAG_BHRB);
6543 #endif
6544     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
6545         || env->mmu_model & POWERPC_MMU_64;
6546 
6547     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
6548     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
6549     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
6550     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
6551     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
6552     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
6553     ctx->hr = (hflags >> HFLAGS_HR) & 1;
6554     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
6555     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
6556     ctx->mmcr0_pmcjce = (hflags >> HFLAGS_PMCJCE) & 1;
6557     ctx->pmc_other = (hflags >> HFLAGS_PMC_OTHER) & 1;
6558     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
6559     ctx->bhrb_enable = (hflags >> HFLAGS_BHRB_ENABLE) & 1;
6560 
6561     ctx->singlestep_enabled = 0;
6562     if ((hflags >> HFLAGS_SE) & 1) {
6563         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
6564         ctx->base.max_insns = 1;
6565     }
6566     if ((hflags >> HFLAGS_BE) & 1) {
6567         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
6568     }
6569 }
6570 
6571 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
6572 {
6573 }
6574 
6575 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
6576 {
6577     tcg_gen_insn_start(dcbase->pc_next);
6578 }
6579 
6580 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
6581 {
6582     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
6583     return opc1(insn) == 1;
6584 }
6585 
6586 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
6587 {
6588     DisasContext *ctx = container_of(dcbase, DisasContext, base);
6589     PowerPCCPU *cpu = POWERPC_CPU(cs);
6590     CPUPPCState *env = cpu_env(cs);
6591     target_ulong pc;
6592     uint32_t insn;
6593     bool ok;
6594 
6595     LOG_DISAS("----------------\n");
6596     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
6597               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
6598 
6599     ctx->cia = pc = ctx->base.pc_next;
6600     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
6601     ctx->base.pc_next = pc += 4;
6602 
6603     if (!is_prefix_insn(ctx, insn)) {
6604         ctx->opcode = insn;
6605         ok = (decode_insn32(ctx, insn) ||
6606               decode_legacy(cpu, ctx, insn));
6607     } else if ((pc & 63) == 0) {
6608         /*
6609          * Power v3.1, section 1.9 Exceptions:
6610          * attempt to execute a prefixed instruction that crosses a
6611          * 64-byte address boundary (system alignment error).
6612          */
6613         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
6614         ok = true;
6615     } else {
6616         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
6617                                              need_byteswap(ctx));
6618         ctx->base.pc_next = pc += 4;
6619         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
6620     }
6621     if (!ok) {
6622         gen_invalid(ctx);
6623     }
6624 
6625     /* End the TB when crossing a page boundary. */
6626     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
6627         ctx->base.is_jmp = DISAS_TOO_MANY;
6628     }
6629 }
6630 
6631 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
6632 {
6633     DisasContext *ctx = container_of(dcbase, DisasContext, base);
6634     DisasJumpType is_jmp = ctx->base.is_jmp;
6635     target_ulong nip = ctx->base.pc_next;
6636 
6637     if (is_jmp == DISAS_NORETURN) {
6638         /* We have already exited the TB. */
6639         return;
6640     }
6641 
6642     /* Honor single stepping. */
6643     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)) {
6644         bool rfi_type = false;
6645 
6646         switch (is_jmp) {
6647         case DISAS_TOO_MANY:
6648         case DISAS_EXIT_UPDATE:
6649         case DISAS_CHAIN_UPDATE:
6650             gen_update_nip(ctx, nip);
6651             break;
6652         case DISAS_EXIT:
6653         case DISAS_CHAIN:
6654             /*
6655              * This is a heuristic, to put it kindly. The rfi class of
6656              * instructions are among the few outside branches that change
6657              * NIP without taking an interrupt. Single step trace interrupts
6658              * do not fire on completion of these instructions.
6659              */
6660             rfi_type = true;
6661             break;
6662         default:
6663             g_assert_not_reached();
6664         }
6665 
6666         gen_debug_exception(ctx, rfi_type);
6667         return;
6668     }
6669 
6670     switch (is_jmp) {
6671     case DISAS_TOO_MANY:
6672         if (use_goto_tb(ctx, nip)) {
6673             pmu_count_insns(ctx);
6674             tcg_gen_goto_tb(0);
6675             gen_update_nip(ctx, nip);
6676             tcg_gen_exit_tb(ctx->base.tb, 0);
6677             break;
6678         }
6679         /* fall through */
6680     case DISAS_CHAIN_UPDATE:
6681         gen_update_nip(ctx, nip);
6682         /* fall through */
6683     case DISAS_CHAIN:
6684         /*
6685          * tcg_gen_lookup_and_goto_ptr will exit the TB if
6686          * CF_NO_GOTO_PTR is set. Count insns now.
6687          */
6688         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
6689             pmu_count_insns(ctx);
6690         }
6691 
6692         tcg_gen_lookup_and_goto_ptr();
6693         break;
6694 
6695     case DISAS_EXIT_UPDATE:
6696         gen_update_nip(ctx, nip);
6697         /* fall through */
6698     case DISAS_EXIT:
6699         pmu_count_insns(ctx);
6700         tcg_gen_exit_tb(NULL, 0);
6701         break;
6702 
6703     default:
6704         g_assert_not_reached();
6705     }
6706 }
6707 
6708 static const TranslatorOps ppc_tr_ops = {
6709     .init_disas_context = ppc_tr_init_disas_context,
6710     .tb_start           = ppc_tr_tb_start,
6711     .insn_start         = ppc_tr_insn_start,
6712     .translate_insn     = ppc_tr_translate_insn,
6713     .tb_stop            = ppc_tr_tb_stop,
6714 };
6715 
6716 void ppc_translate_code(CPUState *cs, TranslationBlock *tb,
6717                         int *max_insns, vaddr pc, void *host_pc)
6718 {
6719     DisasContext ctx;
6720 
6721     translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
6722 }
6723