xref: /openbmc/qemu/target/ppc/translate.c (revision 19db3b5a247c57a40d7e8a545a8dee9faf4db150)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "exec/exec-all.h"
25 #include "tcg/tcg-op.h"
26 #include "tcg/tcg-op-gvec.h"
27 #include "qemu/host-utils.h"
28 
29 #include "exec/helper-proto.h"
30 #include "exec/helper-gen.h"
31 
32 #include "exec/translator.h"
33 #include "exec/translation-block.h"
34 #include "exec/log.h"
35 #include "qemu/atomic128.h"
36 #include "spr_common.h"
37 #include "power8-pmu.h"
38 
39 #include "qemu/qemu-print.h"
40 #include "qapi/error.h"
41 
42 #define HELPER_H "helper.h"
43 #include "exec/helper-info.c.inc"
44 #undef  HELPER_H
45 
46 #define CPU_SINGLE_STEP 0x1
47 #define CPU_BRANCH_STEP 0x2
48 
49 /* Include definitions for instructions classes and implementations flags */
50 /* #define PPC_DEBUG_DISAS */
51 
52 #ifdef PPC_DEBUG_DISAS
53 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
54 #else
55 #  define LOG_DISAS(...) do { } while (0)
56 #endif
57 /*****************************************************************************/
58 /* Code translation helpers                                                  */
59 
60 /* global register indexes */
61 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
62                           + 10 * 4 + 22 * 5 /* SPE GPRh */
63                           + 8 * 5           /* CRF */];
64 static TCGv cpu_gpr[32];
65 static TCGv cpu_gprh[32];
66 static TCGv_i32 cpu_crf[8];
67 static TCGv cpu_nip;
68 static TCGv cpu_msr;
69 static TCGv cpu_ctr;
70 static TCGv cpu_lr;
71 #if defined(TARGET_PPC64)
72 static TCGv cpu_cfar;
73 #endif
74 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
75 static TCGv cpu_reserve;
76 static TCGv cpu_reserve_length;
77 static TCGv cpu_reserve_val;
78 #if defined(TARGET_PPC64)
79 static TCGv cpu_reserve_val2;
80 #endif
81 static TCGv cpu_fpscr;
82 static TCGv_i32 cpu_access_type;
83 
84 void ppc_translate_init(void)
85 {
86     int i;
87     char *p;
88     size_t cpu_reg_names_size;
89 
90     p = cpu_reg_names;
91     cpu_reg_names_size = sizeof(cpu_reg_names);
92 
93     for (i = 0; i < 8; i++) {
94         snprintf(p, cpu_reg_names_size, "crf%d", i);
95         cpu_crf[i] = tcg_global_mem_new_i32(tcg_env,
96                                             offsetof(CPUPPCState, crf[i]), p);
97         p += 5;
98         cpu_reg_names_size -= 5;
99     }
100 
101     for (i = 0; i < 32; i++) {
102         snprintf(p, cpu_reg_names_size, "r%d", i);
103         cpu_gpr[i] = tcg_global_mem_new(tcg_env,
104                                         offsetof(CPUPPCState, gpr[i]), p);
105         p += (i < 10) ? 3 : 4;
106         cpu_reg_names_size -= (i < 10) ? 3 : 4;
107         snprintf(p, cpu_reg_names_size, "r%dH", i);
108         cpu_gprh[i] = tcg_global_mem_new(tcg_env,
109                                          offsetof(CPUPPCState, gprh[i]), p);
110         p += (i < 10) ? 4 : 5;
111         cpu_reg_names_size -= (i < 10) ? 4 : 5;
112     }
113 
114     cpu_nip = tcg_global_mem_new(tcg_env,
115                                  offsetof(CPUPPCState, nip), "nip");
116 
117     cpu_msr = tcg_global_mem_new(tcg_env,
118                                  offsetof(CPUPPCState, msr), "msr");
119 
120     cpu_ctr = tcg_global_mem_new(tcg_env,
121                                  offsetof(CPUPPCState, ctr), "ctr");
122 
123     cpu_lr = tcg_global_mem_new(tcg_env,
124                                 offsetof(CPUPPCState, lr), "lr");
125 
126 #if defined(TARGET_PPC64)
127     cpu_cfar = tcg_global_mem_new(tcg_env,
128                                   offsetof(CPUPPCState, cfar), "cfar");
129 #endif
130 
131     cpu_xer = tcg_global_mem_new(tcg_env,
132                                  offsetof(CPUPPCState, xer), "xer");
133     cpu_so = tcg_global_mem_new(tcg_env,
134                                 offsetof(CPUPPCState, so), "SO");
135     cpu_ov = tcg_global_mem_new(tcg_env,
136                                 offsetof(CPUPPCState, ov), "OV");
137     cpu_ca = tcg_global_mem_new(tcg_env,
138                                 offsetof(CPUPPCState, ca), "CA");
139     cpu_ov32 = tcg_global_mem_new(tcg_env,
140                                   offsetof(CPUPPCState, ov32), "OV32");
141     cpu_ca32 = tcg_global_mem_new(tcg_env,
142                                   offsetof(CPUPPCState, ca32), "CA32");
143 
144     cpu_reserve = tcg_global_mem_new(tcg_env,
145                                      offsetof(CPUPPCState, reserve_addr),
146                                      "reserve_addr");
147     cpu_reserve_length = tcg_global_mem_new(tcg_env,
148                                             offsetof(CPUPPCState,
149                                                      reserve_length),
150                                             "reserve_length");
151     cpu_reserve_val = tcg_global_mem_new(tcg_env,
152                                          offsetof(CPUPPCState, reserve_val),
153                                          "reserve_val");
154 #if defined(TARGET_PPC64)
155     cpu_reserve_val2 = tcg_global_mem_new(tcg_env,
156                                           offsetof(CPUPPCState, reserve_val2),
157                                           "reserve_val2");
158 #endif
159 
160     cpu_fpscr = tcg_global_mem_new(tcg_env,
161                                    offsetof(CPUPPCState, fpscr), "fpscr");
162 
163     cpu_access_type = tcg_global_mem_new_i32(tcg_env,
164                                              offsetof(CPUPPCState, access_type),
165                                              "access_type");
166 }
167 
168 /* internal defines */
169 struct DisasContext {
170     DisasContextBase base;
171     target_ulong cia;  /* current instruction address */
172     uint32_t opcode;
173     /* Routine used to access memory */
174     bool pr, hv, dr, le_mode;
175     bool lazy_tlb_flush;
176     bool need_access_type;
177     int mem_idx;
178     int access_type;
179     /* Translation flags */
180     MemOp default_tcg_memop_mask;
181 #if defined(TARGET_PPC64)
182     powerpc_excp_t excp_model;
183     bool sf_mode;
184     bool has_cfar;
185     bool has_bhrb;
186 #endif
187     bool fpu_enabled;
188     bool altivec_enabled;
189     bool vsx_enabled;
190     bool spe_enabled;
191     bool tm_enabled;
192     bool gtse;
193     bool hr;
194     bool mmcr0_pmcc0;
195     bool mmcr0_pmcc1;
196     bool mmcr0_pmcjce;
197     bool pmc_other;
198     bool pmu_insn_cnt;
199     bool bhrb_enable;
200     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
201     int singlestep_enabled;
202     uint32_t flags;
203     uint64_t insns_flags;
204     uint64_t insns_flags2;
205 };
206 
207 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
208 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
209 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
210 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
211 
212 /* Return true iff byteswap is needed in a scalar memop */
213 static inline bool need_byteswap(const DisasContext *ctx)
214 {
215 #if TARGET_BIG_ENDIAN
216      return ctx->le_mode;
217 #else
218      return !ctx->le_mode;
219 #endif
220 }
221 
222 /* True when active word size < size of target_long.  */
223 #ifdef TARGET_PPC64
224 # define NARROW_MODE(C)  (!(C)->sf_mode)
225 #else
226 # define NARROW_MODE(C)  0
227 #endif
228 
229 struct opc_handler_t {
230     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
231     uint32_t inval1;
232     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
233     uint32_t inval2;
234     /* instruction type */
235     uint64_t type;
236     /* extended instruction type */
237     uint64_t type2;
238     /* handler */
239     void (*handler)(DisasContext *ctx);
240 };
241 
242 static inline bool gen_serialize(DisasContext *ctx)
243 {
244     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
245         /* Restart with exclusive lock.  */
246         gen_helper_exit_atomic(tcg_env);
247         ctx->base.is_jmp = DISAS_NORETURN;
248         return false;
249     }
250     return true;
251 }
252 
253 #if !defined(CONFIG_USER_ONLY)
254 #if defined(TARGET_PPC64)
255 static inline bool gen_serialize_core(DisasContext *ctx)
256 {
257     if (ctx->flags & POWERPC_FLAG_SMT) {
258         return gen_serialize(ctx);
259     }
260     return true;
261 }
262 #endif
263 
264 static inline bool gen_serialize_core_lpar(DisasContext *ctx)
265 {
266 #if defined(TARGET_PPC64)
267     if (ctx->flags & POWERPC_FLAG_SMT_1LPAR) {
268         return gen_serialize(ctx);
269     }
270 #endif
271     return true;
272 }
273 #endif
274 
275 /* SPR load/store helpers */
276 static inline void gen_load_spr(TCGv t, int reg)
277 {
278     tcg_gen_ld_tl(t, tcg_env, offsetof(CPUPPCState, spr[reg]));
279 }
280 
281 static inline void gen_store_spr(int reg, TCGv t)
282 {
283     tcg_gen_st_tl(t, tcg_env, offsetof(CPUPPCState, spr[reg]));
284 }
285 
286 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
287 {
288     if (ctx->need_access_type && ctx->access_type != access_type) {
289         tcg_gen_movi_i32(cpu_access_type, access_type);
290         ctx->access_type = access_type;
291     }
292 }
293 
294 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
295 {
296     if (NARROW_MODE(ctx)) {
297         nip = (uint32_t)nip;
298     }
299     tcg_gen_movi_tl(cpu_nip, nip);
300 }
301 
302 static void gen_exception_err_nip(DisasContext *ctx, uint32_t excp,
303                                   uint32_t error, target_ulong nip)
304 {
305     TCGv_i32 t0, t1;
306 
307     gen_update_nip(ctx, nip);
308     t0 = tcg_constant_i32(excp);
309     t1 = tcg_constant_i32(error);
310     gen_helper_raise_exception_err(tcg_env, t0, t1);
311     ctx->base.is_jmp = DISAS_NORETURN;
312 }
313 
314 static inline void gen_exception_err(DisasContext *ctx, uint32_t excp,
315                                      uint32_t error)
316 {
317     /*
318      * These are all synchronous exceptions, we set the PC back to the
319      * faulting instruction
320      */
321     gen_exception_err_nip(ctx, excp, error, ctx->cia);
322 }
323 
324 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
325                               target_ulong nip)
326 {
327     TCGv_i32 t0;
328 
329     gen_update_nip(ctx, nip);
330     t0 = tcg_constant_i32(excp);
331     gen_helper_raise_exception(tcg_env, t0);
332     ctx->base.is_jmp = DISAS_NORETURN;
333 }
334 
335 static inline void gen_exception(DisasContext *ctx, uint32_t excp)
336 {
337     /*
338      * These are all synchronous exceptions, we set the PC back to the
339      * faulting instruction
340      */
341     gen_exception_nip(ctx, excp, ctx->cia);
342 }
343 
344 #if !defined(CONFIG_USER_ONLY)
345 static void gen_ppc_maybe_interrupt(DisasContext *ctx)
346 {
347     translator_io_start(&ctx->base);
348     gen_helper_ppc_maybe_interrupt(tcg_env);
349 }
350 #endif
351 
352 /*
353  * Tells the caller what is the appropriate exception to generate and prepares
354  * SPR registers for this exception.
355  *
356  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
357  * POWERPC_EXCP_DEBUG (on BookE).
358  */
359 static void gen_debug_exception(DisasContext *ctx, bool rfi_type)
360 {
361 #if !defined(CONFIG_USER_ONLY)
362     if (ctx->flags & POWERPC_FLAG_DE) {
363         target_ulong dbsr = 0;
364         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
365             dbsr = DBCR0_ICMP;
366         } else {
367             /* Must have been branch */
368             dbsr = DBCR0_BRT;
369         }
370         TCGv t0 = tcg_temp_new();
371         gen_load_spr(t0, SPR_BOOKE_DBSR);
372         tcg_gen_ori_tl(t0, t0, dbsr);
373         gen_store_spr(SPR_BOOKE_DBSR, t0);
374         gen_helper_raise_exception(tcg_env,
375                                    tcg_constant_i32(POWERPC_EXCP_DEBUG));
376         ctx->base.is_jmp = DISAS_NORETURN;
377     } else {
378         if (!rfi_type) { /* BookS does not single step rfi type instructions */
379             TCGv t0 = tcg_temp_new();
380             tcg_gen_movi_tl(t0, ctx->cia);
381             gen_helper_book3s_trace(tcg_env, t0);
382             ctx->base.is_jmp = DISAS_NORETURN;
383         }
384     }
385 #endif
386 }
387 
388 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
389 {
390     /* Will be converted to program check if needed */
391     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
392 }
393 
394 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
395 {
396     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
397 }
398 
399 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
400 {
401     /* Will be converted to program check if needed */
402     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
403 }
404 
405 /*****************************************************************************/
406 /* SPR READ/WRITE CALLBACKS */
407 
408 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
409 {
410 #if 0
411     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
412     printf("ERROR: try to access SPR %d !\n", sprn);
413 #endif
414 }
415 
416 /* #define PPC_DUMP_SPR_ACCESSES */
417 
418 /*
419  * Generic callbacks:
420  * do nothing but store/retrieve spr value
421  */
422 static void spr_load_dump_spr(int sprn)
423 {
424 #ifdef PPC_DUMP_SPR_ACCESSES
425     TCGv_i32 t0 = tcg_constant_i32(sprn);
426     gen_helper_load_dump_spr(tcg_env, t0);
427 #endif
428 }
429 
430 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
431 {
432     gen_load_spr(cpu_gpr[gprn], sprn);
433     spr_load_dump_spr(sprn);
434 }
435 
436 static void spr_store_dump_spr(int sprn)
437 {
438 #ifdef PPC_DUMP_SPR_ACCESSES
439     TCGv_i32 t0 = tcg_constant_i32(sprn);
440     gen_helper_store_dump_spr(tcg_env, t0);
441 #endif
442 }
443 
444 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
445 {
446     gen_store_spr(sprn, cpu_gpr[gprn]);
447     spr_store_dump_spr(sprn);
448 }
449 
450 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
451 {
452 #ifdef TARGET_PPC64
453     TCGv t0 = tcg_temp_new();
454     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
455     gen_store_spr(sprn, t0);
456     spr_store_dump_spr(sprn);
457 #else
458     spr_write_generic(ctx, sprn, gprn);
459 #endif
460 }
461 
462 void spr_core_write_generic(DisasContext *ctx, int sprn, int gprn)
463 {
464     if (!(ctx->flags & POWERPC_FLAG_SMT)) {
465         spr_write_generic(ctx, sprn, gprn);
466         return;
467     }
468 
469     if (!gen_serialize(ctx)) {
470         return;
471     }
472 
473     gen_helper_spr_core_write_generic(tcg_env, tcg_constant_i32(sprn),
474                                       cpu_gpr[gprn]);
475     spr_store_dump_spr(sprn);
476 }
477 
478 void spr_core_write_generic32(DisasContext *ctx, int sprn, int gprn)
479 {
480     TCGv t0;
481 
482     if (!(ctx->flags & POWERPC_FLAG_SMT)) {
483         spr_write_generic32(ctx, sprn, gprn);
484         return;
485     }
486 
487     if (!gen_serialize(ctx)) {
488         return;
489     }
490 
491     t0 = tcg_temp_new();
492     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
493     gen_helper_spr_core_write_generic(tcg_env, tcg_constant_i32(sprn), t0);
494     spr_store_dump_spr(sprn);
495 }
496 
497 void spr_core_lpar_write_generic(DisasContext *ctx, int sprn, int gprn)
498 {
499     if (ctx->flags & POWERPC_FLAG_SMT_1LPAR) {
500         spr_core_write_generic(ctx, sprn, gprn);
501     } else {
502         spr_write_generic(ctx, sprn, gprn);
503     }
504 }
505 
506 static void spr_write_CTRL_ST(DisasContext *ctx, int sprn, int gprn)
507 {
508     /* This does not implement >1 thread */
509     TCGv t0 = tcg_temp_new();
510     TCGv t1 = tcg_temp_new();
511     tcg_gen_extract_tl(t0, cpu_gpr[gprn], 0, 1); /* Extract RUN field */
512     tcg_gen_shli_tl(t1, t0, 8); /* Duplicate the bit in TS */
513     tcg_gen_or_tl(t1, t1, t0);
514     gen_store_spr(sprn, t1);
515 }
516 
517 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
518 {
519     if (!(ctx->flags & POWERPC_FLAG_SMT_1LPAR)) {
520         /* CTRL behaves as 1-thread in LPAR-per-thread mode */
521         spr_write_CTRL_ST(ctx, sprn, gprn);
522         goto out;
523     }
524 
525     if (!gen_serialize(ctx)) {
526         return;
527     }
528 
529     gen_helper_spr_write_CTRL(tcg_env, tcg_constant_i32(sprn),
530                               cpu_gpr[gprn]);
531 out:
532     spr_store_dump_spr(sprn);
533 
534     /*
535      * SPR_CTRL writes must force a new translation block,
536      * allowing the PMU to calculate the run latch events with
537      * more accuracy.
538      */
539     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
540 }
541 
542 #if !defined(CONFIG_USER_ONLY)
543 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
544 {
545     TCGv t0 = tcg_temp_new();
546     TCGv t1 = tcg_temp_new();
547     gen_load_spr(t0, sprn);
548     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
549     tcg_gen_and_tl(t0, t0, t1);
550     gen_store_spr(sprn, t0);
551 }
552 
553 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
554 {
555 }
556 
557 #endif
558 
559 /* SPR common to all PowerPC */
560 /* XER */
561 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
562 {
563     TCGv dst = cpu_gpr[gprn];
564     TCGv t0 = tcg_temp_new();
565     TCGv t1 = tcg_temp_new();
566     TCGv t2 = tcg_temp_new();
567     tcg_gen_mov_tl(dst, cpu_xer);
568     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
569     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
570     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
571     tcg_gen_or_tl(t0, t0, t1);
572     tcg_gen_or_tl(dst, dst, t2);
573     tcg_gen_or_tl(dst, dst, t0);
574     if (is_isa300(ctx)) {
575         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
576         tcg_gen_or_tl(dst, dst, t0);
577         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
578         tcg_gen_or_tl(dst, dst, t0);
579     }
580 }
581 
582 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
583 {
584     TCGv src = cpu_gpr[gprn];
585     /* Write all flags, while reading back check for isa300 */
586     tcg_gen_andi_tl(cpu_xer, src,
587                     ~((1u << XER_SO) |
588                       (1u << XER_OV) | (1u << XER_OV32) |
589                       (1u << XER_CA) | (1u << XER_CA32)));
590     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
591     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
592     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
593     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
594     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
595 }
596 
597 /* LR */
598 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
599 {
600     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
601 }
602 
603 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
604 {
605     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
606 }
607 
608 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
609 /* Debug facilities */
610 /* CFAR */
611 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
612 {
613     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
614 }
615 
616 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
617 {
618     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
619 }
620 
621 /* Breakpoint */
622 void spr_write_ciabr(DisasContext *ctx, int sprn, int gprn)
623 {
624     translator_io_start(&ctx->base);
625     gen_helper_store_ciabr(tcg_env, cpu_gpr[gprn]);
626 }
627 
628 /* Watchpoint */
629 void spr_write_dawr0(DisasContext *ctx, int sprn, int gprn)
630 {
631     translator_io_start(&ctx->base);
632     gen_helper_store_dawr0(tcg_env, cpu_gpr[gprn]);
633 }
634 
635 void spr_write_dawrx0(DisasContext *ctx, int sprn, int gprn)
636 {
637     translator_io_start(&ctx->base);
638     gen_helper_store_dawrx0(tcg_env, cpu_gpr[gprn]);
639 }
640 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
641 
642 /* CTR */
643 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
644 {
645     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
646 }
647 
648 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
649 {
650     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
651 }
652 
653 /* User read access to SPR */
654 /* USPRx */
655 /* UMMCRx */
656 /* UPMCx */
657 /* USIA */
658 /* UDECR */
659 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
660 {
661     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
662 }
663 
664 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
665 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
666 {
667     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
668 }
669 #endif
670 
671 /* SPR common to all non-embedded PowerPC */
672 /* DECR */
673 #if !defined(CONFIG_USER_ONLY)
674 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
675 {
676     translator_io_start(&ctx->base);
677     gen_helper_load_decr(cpu_gpr[gprn], tcg_env);
678 }
679 
680 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
681 {
682     translator_io_start(&ctx->base);
683     gen_helper_store_decr(tcg_env, cpu_gpr[gprn]);
684 }
685 #endif
686 
687 /* SPR common to all non-embedded PowerPC, except 601 */
688 /* Time base */
689 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
690 {
691     translator_io_start(&ctx->base);
692     gen_helper_load_tbl(cpu_gpr[gprn], tcg_env);
693 }
694 
695 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
696 {
697     translator_io_start(&ctx->base);
698     gen_helper_load_tbu(cpu_gpr[gprn], tcg_env);
699 }
700 
701 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
702 {
703     gen_helper_load_atbl(cpu_gpr[gprn], tcg_env);
704 }
705 
706 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
707 {
708     gen_helper_load_atbu(cpu_gpr[gprn], tcg_env);
709 }
710 
711 #if !defined(CONFIG_USER_ONLY)
712 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
713 {
714     if (!gen_serialize_core_lpar(ctx)) {
715         return;
716     }
717 
718     translator_io_start(&ctx->base);
719     gen_helper_store_tbl(tcg_env, cpu_gpr[gprn]);
720 }
721 
722 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
723 {
724     if (!gen_serialize_core_lpar(ctx)) {
725         return;
726     }
727 
728     translator_io_start(&ctx->base);
729     gen_helper_store_tbu(tcg_env, cpu_gpr[gprn]);
730 }
731 
732 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
733 {
734     gen_helper_store_atbl(tcg_env, cpu_gpr[gprn]);
735 }
736 
737 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
738 {
739     gen_helper_store_atbu(tcg_env, cpu_gpr[gprn]);
740 }
741 
742 #if defined(TARGET_PPC64)
743 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
744 {
745     translator_io_start(&ctx->base);
746     gen_helper_load_purr(cpu_gpr[gprn], tcg_env);
747 }
748 
749 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
750 {
751     if (!gen_serialize_core_lpar(ctx)) {
752         return;
753     }
754     translator_io_start(&ctx->base);
755     gen_helper_store_purr(tcg_env, cpu_gpr[gprn]);
756 }
757 
758 /* HDECR */
759 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
760 {
761     translator_io_start(&ctx->base);
762     gen_helper_load_hdecr(cpu_gpr[gprn], tcg_env);
763 }
764 
765 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
766 {
767     if (!gen_serialize_core_lpar(ctx)) {
768         return;
769     }
770     translator_io_start(&ctx->base);
771     gen_helper_store_hdecr(tcg_env, cpu_gpr[gprn]);
772 }
773 
774 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
775 {
776     translator_io_start(&ctx->base);
777     gen_helper_load_vtb(cpu_gpr[gprn], tcg_env);
778 }
779 
780 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
781 {
782     if (!gen_serialize_core_lpar(ctx)) {
783         return;
784     }
785     translator_io_start(&ctx->base);
786     gen_helper_store_vtb(tcg_env, cpu_gpr[gprn]);
787 }
788 
789 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
790 {
791     if (!gen_serialize_core_lpar(ctx)) {
792         return;
793     }
794     translator_io_start(&ctx->base);
795     gen_helper_store_tbu40(tcg_env, cpu_gpr[gprn]);
796 }
797 
798 #endif
799 #endif
800 
801 #if !defined(CONFIG_USER_ONLY)
802 /* IBAT0U...IBAT0U */
803 /* IBAT0L...IBAT7L */
804 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
805 {
806     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
807                   offsetof(CPUPPCState,
808                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
809 }
810 
811 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
812 {
813     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
814                   offsetof(CPUPPCState,
815                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
816 }
817 
818 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
819 {
820     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0U) / 2);
821     gen_helper_store_ibatu(tcg_env, t0, cpu_gpr[gprn]);
822 }
823 
824 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
825 {
826     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4U) / 2) + 4);
827     gen_helper_store_ibatu(tcg_env, t0, cpu_gpr[gprn]);
828 }
829 
830 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
831 {
832     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0L) / 2);
833     gen_helper_store_ibatl(tcg_env, t0, cpu_gpr[gprn]);
834 }
835 
836 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
837 {
838     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4L) / 2) + 4);
839     gen_helper_store_ibatl(tcg_env, t0, cpu_gpr[gprn]);
840 }
841 
842 /* DBAT0U...DBAT7U */
843 /* DBAT0L...DBAT7L */
844 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
845 {
846     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
847                   offsetof(CPUPPCState,
848                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
849 }
850 
851 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
852 {
853     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
854                   offsetof(CPUPPCState,
855                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
856 }
857 
858 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
859 {
860     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0U) / 2);
861     gen_helper_store_dbatu(tcg_env, t0, cpu_gpr[gprn]);
862 }
863 
864 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
865 {
866     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4U) / 2) + 4);
867     gen_helper_store_dbatu(tcg_env, t0, cpu_gpr[gprn]);
868 }
869 
870 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
871 {
872     TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0L) / 2);
873     gen_helper_store_dbatl(tcg_env, t0, cpu_gpr[gprn]);
874 }
875 
876 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
877 {
878     TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4L) / 2) + 4);
879     gen_helper_store_dbatl(tcg_env, t0, cpu_gpr[gprn]);
880 }
881 
882 /* SDR1 */
883 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
884 {
885     gen_helper_store_sdr1(tcg_env, cpu_gpr[gprn]);
886 }
887 
888 #if defined(TARGET_PPC64)
889 /* 64 bits PowerPC specific SPRs */
890 /* PIDR */
891 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
892 {
893     gen_helper_store_pidr(tcg_env, cpu_gpr[gprn]);
894 }
895 
896 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
897 {
898     gen_helper_store_lpidr(tcg_env, cpu_gpr[gprn]);
899 }
900 
901 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
902 {
903     tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env, offsetof(CPUPPCState, excp_prefix));
904 }
905 
906 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
907 {
908     TCGv t0 = tcg_temp_new();
909     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
910     tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_prefix));
911 }
912 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
913 {
914     if (!gen_serialize_core(ctx)) {
915         return;
916     }
917 
918     gen_helper_store_ptcr(tcg_env, cpu_gpr[gprn]);
919 }
920 
921 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
922 {
923     gen_helper_store_pcr(tcg_env, cpu_gpr[gprn]);
924 }
925 
926 /* DPDES */
927 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
928 {
929     if (!gen_serialize_core_lpar(ctx)) {
930         return;
931     }
932 
933     gen_helper_load_dpdes(cpu_gpr[gprn], tcg_env);
934 }
935 
936 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
937 {
938     if (!gen_serialize_core_lpar(ctx)) {
939         return;
940     }
941 
942     gen_helper_store_dpdes(tcg_env, cpu_gpr[gprn]);
943 }
944 #endif
945 #endif
946 
947 /* PowerPC 40x specific registers */
948 #if !defined(CONFIG_USER_ONLY)
949 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
950 {
951     translator_io_start(&ctx->base);
952     gen_helper_load_40x_pit(cpu_gpr[gprn], tcg_env);
953 }
954 
955 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
956 {
957     translator_io_start(&ctx->base);
958     gen_helper_store_40x_pit(tcg_env, cpu_gpr[gprn]);
959 }
960 
961 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
962 {
963     translator_io_start(&ctx->base);
964     gen_store_spr(sprn, cpu_gpr[gprn]);
965     gen_helper_store_40x_dbcr0(tcg_env, cpu_gpr[gprn]);
966     /* We must stop translation as we may have rebooted */
967     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
968 }
969 
970 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
971 {
972     translator_io_start(&ctx->base);
973     gen_helper_store_40x_sler(tcg_env, cpu_gpr[gprn]);
974 }
975 
976 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
977 {
978     translator_io_start(&ctx->base);
979     gen_helper_store_40x_tcr(tcg_env, cpu_gpr[gprn]);
980 }
981 
982 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
983 {
984     translator_io_start(&ctx->base);
985     gen_helper_store_40x_tsr(tcg_env, cpu_gpr[gprn]);
986 }
987 
988 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
989 {
990     TCGv t0 = tcg_temp_new();
991     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
992     gen_helper_store_40x_pid(tcg_env, t0);
993 }
994 
995 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
996 {
997     translator_io_start(&ctx->base);
998     gen_helper_store_booke_tcr(tcg_env, cpu_gpr[gprn]);
999 }
1000 
1001 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
1002 {
1003     translator_io_start(&ctx->base);
1004     gen_helper_store_booke_tsr(tcg_env, cpu_gpr[gprn]);
1005 }
1006 #endif
1007 
1008 /* PIR */
1009 #if !defined(CONFIG_USER_ONLY)
1010 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
1011 {
1012     TCGv t0 = tcg_temp_new();
1013     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
1014     gen_store_spr(SPR_PIR, t0);
1015 }
1016 #endif
1017 
1018 /* SPE specific registers */
1019 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
1020 {
1021     TCGv_i32 t0 = tcg_temp_new_i32();
1022     tcg_gen_ld_i32(t0, tcg_env, offsetof(CPUPPCState, spe_fscr));
1023     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
1024 }
1025 
1026 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
1027 {
1028     TCGv_i32 t0 = tcg_temp_new_i32();
1029     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
1030     tcg_gen_st_i32(t0, tcg_env, offsetof(CPUPPCState, spe_fscr));
1031 }
1032 
1033 #if !defined(CONFIG_USER_ONLY)
1034 /* Callback used to write the exception vector base */
1035 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
1036 {
1037     TCGv t0 = tcg_temp_new();
1038     tcg_gen_ld_tl(t0, tcg_env, offsetof(CPUPPCState, ivpr_mask));
1039     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1040     tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_prefix));
1041     gen_store_spr(sprn, t0);
1042 }
1043 
1044 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
1045 {
1046     int sprn_offs;
1047 
1048     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
1049         sprn_offs = sprn - SPR_BOOKE_IVOR0;
1050     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
1051         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
1052     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
1053         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
1054     } else {
1055         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
1056                       " vector 0x%03x\n", sprn);
1057         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1058         return;
1059     }
1060 
1061     TCGv t0 = tcg_temp_new();
1062     tcg_gen_ld_tl(t0, tcg_env, offsetof(CPUPPCState, ivor_mask));
1063     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1064     tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
1065     gen_store_spr(sprn, t0);
1066 }
1067 #endif
1068 
1069 #ifdef TARGET_PPC64
1070 #ifndef CONFIG_USER_ONLY
1071 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
1072 {
1073     TCGv t0 = tcg_temp_new();
1074     TCGv t1 = tcg_temp_new();
1075     TCGv t2 = tcg_temp_new();
1076 
1077     /*
1078      * Note, the HV=1 PR=0 case is handled earlier by simply using
1079      * spr_write_generic for HV mode in the SPR table
1080      */
1081 
1082     /* Build insertion mask into t1 based on context */
1083     if (ctx->pr) {
1084         gen_load_spr(t1, SPR_UAMOR);
1085     } else {
1086         gen_load_spr(t1, SPR_AMOR);
1087     }
1088 
1089     /* Mask new bits into t2 */
1090     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1091 
1092     /* Load AMR and clear new bits in t0 */
1093     gen_load_spr(t0, SPR_AMR);
1094     tcg_gen_andc_tl(t0, t0, t1);
1095 
1096     /* Or'in new bits and write it out */
1097     tcg_gen_or_tl(t0, t0, t2);
1098     gen_store_spr(SPR_AMR, t0);
1099     spr_store_dump_spr(SPR_AMR);
1100 }
1101 
1102 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
1103 {
1104     TCGv t0 = tcg_temp_new();
1105     TCGv t1 = tcg_temp_new();
1106     TCGv t2 = tcg_temp_new();
1107 
1108     /*
1109      * Note, the HV=1 case is handled earlier by simply using
1110      * spr_write_generic for HV mode in the SPR table
1111      */
1112 
1113     /* Build insertion mask into t1 based on context */
1114     gen_load_spr(t1, SPR_AMOR);
1115 
1116     /* Mask new bits into t2 */
1117     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1118 
1119     /* Load AMR and clear new bits in t0 */
1120     gen_load_spr(t0, SPR_UAMOR);
1121     tcg_gen_andc_tl(t0, t0, t1);
1122 
1123     /* Or'in new bits and write it out */
1124     tcg_gen_or_tl(t0, t0, t2);
1125     gen_store_spr(SPR_UAMOR, t0);
1126     spr_store_dump_spr(SPR_UAMOR);
1127 }
1128 
1129 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1130 {
1131     TCGv t0 = tcg_temp_new();
1132     TCGv t1 = tcg_temp_new();
1133     TCGv t2 = tcg_temp_new();
1134 
1135     /*
1136      * Note, the HV=1 case is handled earlier by simply using
1137      * spr_write_generic for HV mode in the SPR table
1138      */
1139 
1140     /* Build insertion mask into t1 based on context */
1141     gen_load_spr(t1, SPR_AMOR);
1142 
1143     /* Mask new bits into t2 */
1144     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1145 
1146     /* Load AMR and clear new bits in t0 */
1147     gen_load_spr(t0, SPR_IAMR);
1148     tcg_gen_andc_tl(t0, t0, t1);
1149 
1150     /* Or'in new bits and write it out */
1151     tcg_gen_or_tl(t0, t0, t2);
1152     gen_store_spr(SPR_IAMR, t0);
1153     spr_store_dump_spr(SPR_IAMR);
1154 }
1155 #endif
1156 #endif
1157 
1158 #ifndef CONFIG_USER_ONLY
1159 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1160 {
1161     gen_helper_fixup_thrm(tcg_env);
1162     gen_load_spr(cpu_gpr[gprn], sprn);
1163     spr_load_dump_spr(sprn);
1164 }
1165 #endif /* !CONFIG_USER_ONLY */
1166 
1167 #if !defined(CONFIG_USER_ONLY)
1168 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1169 {
1170     TCGv t0 = tcg_temp_new();
1171 
1172     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1173     gen_store_spr(sprn, t0);
1174 }
1175 
1176 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1177 {
1178     TCGv t0 = tcg_temp_new();
1179 
1180     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1181     gen_store_spr(sprn, t0);
1182 }
1183 
1184 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1185 {
1186     TCGv t0 = tcg_temp_new();
1187 
1188     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1189                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1190     gen_store_spr(sprn, t0);
1191 }
1192 
1193 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1194 {
1195     gen_helper_booke206_tlbflush(tcg_env, cpu_gpr[gprn]);
1196 }
1197 
1198 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1199 {
1200     TCGv_i32 t0 = tcg_constant_i32(sprn);
1201     gen_helper_booke_setpid(tcg_env, t0, cpu_gpr[gprn]);
1202 }
1203 
1204 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1205 {
1206     gen_helper_booke_set_eplc(tcg_env, cpu_gpr[gprn]);
1207 }
1208 
1209 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1210 {
1211     gen_helper_booke_set_epsc(tcg_env, cpu_gpr[gprn]);
1212 }
1213 
1214 #endif
1215 
1216 #if !defined(CONFIG_USER_ONLY)
1217 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1218 {
1219     TCGv val = tcg_temp_new();
1220     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1221     gen_store_spr(SPR_BOOKE_MAS3, val);
1222     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1223     gen_store_spr(SPR_BOOKE_MAS7, val);
1224 }
1225 
1226 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1227 {
1228     TCGv mas7 = tcg_temp_new();
1229     TCGv mas3 = tcg_temp_new();
1230     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1231     tcg_gen_shli_tl(mas7, mas7, 32);
1232     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1233     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1234 }
1235 
1236 #endif
1237 
1238 #ifdef TARGET_PPC64
1239 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1240                                     int bit, int sprn, int cause)
1241 {
1242     TCGv_i32 t1 = tcg_constant_i32(bit);
1243     TCGv_i32 t2 = tcg_constant_i32(sprn);
1244     TCGv_i32 t3 = tcg_constant_i32(cause);
1245 
1246     gen_helper_fscr_facility_check(tcg_env, t1, t2, t3);
1247 }
1248 
1249 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1250                                    int bit, int sprn, int cause)
1251 {
1252     TCGv_i32 t1 = tcg_constant_i32(bit);
1253     TCGv_i32 t2 = tcg_constant_i32(sprn);
1254     TCGv_i32 t3 = tcg_constant_i32(cause);
1255 
1256     gen_helper_msr_facility_check(tcg_env, t1, t2, t3);
1257 }
1258 
1259 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1260 {
1261     TCGv spr_up = tcg_temp_new();
1262     TCGv spr = tcg_temp_new();
1263 
1264     gen_load_spr(spr, sprn - 1);
1265     tcg_gen_shri_tl(spr_up, spr, 32);
1266     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1267 }
1268 
1269 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1270 {
1271     TCGv spr = tcg_temp_new();
1272 
1273     gen_load_spr(spr, sprn - 1);
1274     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1275     gen_store_spr(sprn - 1, spr);
1276 }
1277 
1278 #if !defined(CONFIG_USER_ONLY)
1279 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1280 {
1281     TCGv hmer = tcg_temp_new();
1282 
1283     gen_load_spr(hmer, sprn);
1284     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1285     gen_store_spr(sprn, hmer);
1286     spr_store_dump_spr(sprn);
1287 }
1288 
1289 void spr_read_tfmr(DisasContext *ctx, int gprn, int sprn)
1290 {
1291     /* Reading TFMR can cause it to be updated, so serialize threads here too */
1292     if (!gen_serialize_core(ctx)) {
1293         return;
1294     }
1295     gen_helper_load_tfmr(cpu_gpr[gprn], tcg_env);
1296 }
1297 
1298 void spr_write_tfmr(DisasContext *ctx, int sprn, int gprn)
1299 {
1300     if (!gen_serialize_core(ctx)) {
1301         return;
1302     }
1303     gen_helper_store_tfmr(tcg_env, cpu_gpr[gprn]);
1304 }
1305 
1306 void spr_write_sprc(DisasContext *ctx, int sprn, int gprn)
1307 {
1308     gen_helper_store_sprc(tcg_env, cpu_gpr[gprn]);
1309 }
1310 
1311 void spr_read_sprd(DisasContext *ctx, int gprn, int sprn)
1312 {
1313     gen_helper_load_sprd(cpu_gpr[gprn], tcg_env);
1314 }
1315 
1316 void spr_write_sprd(DisasContext *ctx, int sprn, int gprn)
1317 {
1318     if (!gen_serialize_core(ctx)) {
1319         return;
1320     }
1321     gen_helper_store_sprd(tcg_env, cpu_gpr[gprn]);
1322 }
1323 
1324 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1325 {
1326     translator_io_start(&ctx->base);
1327     gen_helper_store_lpcr(tcg_env, cpu_gpr[gprn]);
1328 }
1329 
1330 void spr_read_pmsr(DisasContext *ctx, int gprn, int sprn)
1331 {
1332     translator_io_start(&ctx->base);
1333     gen_helper_load_pmsr(cpu_gpr[gprn], tcg_env);
1334 }
1335 
1336 void spr_write_pmcr(DisasContext *ctx, int sprn, int gprn)
1337 {
1338     if (!gen_serialize_core_lpar(ctx)) {
1339         return;
1340     }
1341     translator_io_start(&ctx->base);
1342     gen_helper_store_pmcr(tcg_env, cpu_gpr[gprn]);
1343 }
1344 
1345 #endif /* !defined(CONFIG_USER_ONLY) */
1346 
1347 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1348 {
1349     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1350     spr_read_generic(ctx, gprn, sprn);
1351 }
1352 
1353 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1354 {
1355     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1356     spr_write_generic(ctx, sprn, gprn);
1357 }
1358 
1359 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1360 {
1361     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1362     spr_read_generic(ctx, gprn, sprn);
1363 }
1364 
1365 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1366 {
1367     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1368     spr_write_generic(ctx, sprn, gprn);
1369 }
1370 
1371 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1372 {
1373     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1374     spr_read_prev_upper32(ctx, gprn, sprn);
1375 }
1376 
1377 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1378 {
1379     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1380     spr_write_prev_upper32(ctx, sprn, gprn);
1381 }
1382 
1383 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1384 {
1385     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1386     spr_read_generic(ctx, gprn, sprn);
1387 }
1388 
1389 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1390 {
1391     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1392     spr_write_generic(ctx, sprn, gprn);
1393 }
1394 
1395 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1396 {
1397     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1398     spr_read_prev_upper32(ctx, gprn, sprn);
1399 }
1400 
1401 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1402 {
1403     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1404     spr_write_prev_upper32(ctx, sprn, gprn);
1405 }
1406 
1407 void spr_read_dexcr_ureg(DisasContext *ctx, int gprn, int sprn)
1408 {
1409     TCGv t0 = tcg_temp_new();
1410 
1411     /*
1412      * Access to the (H)DEXCR in problem state is done using separated
1413      * SPR indexes which are 16 below the SPR indexes which have full
1414      * access to the (H)DEXCR in privileged state. Problem state can
1415      * only read bits 32:63, bits 0:31 return 0.
1416      *
1417      * See section 9.3.1-9.3.2 of PowerISA v3.1B
1418      */
1419 
1420     gen_load_spr(t0, sprn + 16);
1421     tcg_gen_ext32u_tl(cpu_gpr[gprn], t0);
1422 }
1423 
1424 /* The PPR32 SPR accesses the upper 32-bits of PPR */
1425 void spr_read_ppr32(DisasContext *ctx, int gprn, int sprn)
1426 {
1427     gen_load_spr(cpu_gpr[gprn], SPR_PPR);
1428     tcg_gen_shri_tl(cpu_gpr[gprn], cpu_gpr[gprn], 32);
1429     spr_load_dump_spr(SPR_PPR);
1430 }
1431 
1432 void spr_write_ppr32(DisasContext *ctx, int sprn, int gprn)
1433 {
1434     TCGv t0 = tcg_temp_new();
1435 
1436     /*
1437      * Don't clobber the low 32-bits of the PPR. These are all reserved bits
1438      * but TCG does implement them, so it would be surprising to zero them
1439      * here. "Priority nops" are similarly careful not to clobber reserved
1440      * bits.
1441      */
1442     gen_load_spr(t0, SPR_PPR);
1443     tcg_gen_deposit_tl(t0, t0, cpu_gpr[gprn], 32, 32);
1444     gen_store_spr(SPR_PPR, t0);
1445     spr_store_dump_spr(SPR_PPR);
1446 }
1447 #endif
1448 
1449 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1450 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1451 
1452 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1453 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1454 
1455 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1456 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1457 
1458 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1459 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1460 
1461 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1462 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1463 
1464 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1465 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1466 
1467 typedef struct opcode_t {
1468     unsigned char opc1, opc2, opc3, opc4;
1469 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1470     unsigned char pad[4];
1471 #endif
1472     opc_handler_t handler;
1473     const char *oname;
1474 } opcode_t;
1475 
1476 static void gen_priv_opc(DisasContext *ctx)
1477 {
1478     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1479 }
1480 
1481 /* Helpers for priv. check */
1482 #define GEN_PRIV(CTX)              \
1483     do {                           \
1484         gen_priv_opc(CTX); return; \
1485     } while (0)
1486 
1487 #if defined(CONFIG_USER_ONLY)
1488 #define CHK_HV(CTX) GEN_PRIV(CTX)
1489 #define CHK_SV(CTX) GEN_PRIV(CTX)
1490 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1491 #else
1492 #define CHK_HV(CTX)                         \
1493     do {                                    \
1494         if (unlikely(ctx->pr || !ctx->hv)) {\
1495             GEN_PRIV(CTX);                  \
1496         }                                   \
1497     } while (0)
1498 #define CHK_SV(CTX)              \
1499     do {                         \
1500         if (unlikely(ctx->pr)) { \
1501             GEN_PRIV(CTX);       \
1502         }                        \
1503     } while (0)
1504 #define CHK_HVRM(CTX)                                   \
1505     do {                                                \
1506         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1507             GEN_PRIV(CTX);                              \
1508         }                                               \
1509     } while (0)
1510 #endif
1511 
1512 #define CHK_NONE(CTX)
1513 
1514 /*****************************************************************************/
1515 /* PowerPC instructions table                                                */
1516 
1517 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1518 {                                                                             \
1519     .opc1 = op1,                                                              \
1520     .opc2 = op2,                                                              \
1521     .opc3 = op3,                                                              \
1522     .opc4 = 0xff,                                                             \
1523     .handler = {                                                              \
1524         .inval1  = invl,                                                      \
1525         .type = _typ,                                                         \
1526         .type2 = _typ2,                                                       \
1527         .handler = &gen_##name,                                               \
1528     },                                                                        \
1529     .oname = stringify(name),                                                 \
1530 }
1531 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1532 {                                                                             \
1533     .opc1 = op1,                                                              \
1534     .opc2 = op2,                                                              \
1535     .opc3 = op3,                                                              \
1536     .opc4 = 0xff,                                                             \
1537     .handler = {                                                              \
1538         .inval1  = invl1,                                                     \
1539         .inval2  = invl2,                                                     \
1540         .type = _typ,                                                         \
1541         .type2 = _typ2,                                                       \
1542         .handler = &gen_##name,                                               \
1543     },                                                                        \
1544     .oname = stringify(name),                                                 \
1545 }
1546 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1547 {                                                                             \
1548     .opc1 = op1,                                                              \
1549     .opc2 = op2,                                                              \
1550     .opc3 = op3,                                                              \
1551     .opc4 = 0xff,                                                             \
1552     .handler = {                                                              \
1553         .inval1  = invl,                                                      \
1554         .type = _typ,                                                         \
1555         .type2 = _typ2,                                                       \
1556         .handler = &gen_##name,                                               \
1557     },                                                                        \
1558     .oname = onam,                                                            \
1559 }
1560 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1561 {                                                                             \
1562     .opc1 = op1,                                                              \
1563     .opc2 = op2,                                                              \
1564     .opc3 = op3,                                                              \
1565     .opc4 = op4,                                                              \
1566     .handler = {                                                              \
1567         .inval1  = invl,                                                      \
1568         .type = _typ,                                                         \
1569         .type2 = _typ2,                                                       \
1570         .handler = &gen_##name,                                               \
1571     },                                                                        \
1572     .oname = stringify(name),                                                 \
1573 }
1574 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1575 {                                                                             \
1576     .opc1 = op1,                                                              \
1577     .opc2 = op2,                                                              \
1578     .opc3 = op3,                                                              \
1579     .opc4 = op4,                                                              \
1580     .handler = {                                                              \
1581         .inval1  = invl,                                                      \
1582         .type = _typ,                                                         \
1583         .type2 = _typ2,                                                       \
1584         .handler = &gen_##name,                                               \
1585     },                                                                        \
1586     .oname = onam,                                                            \
1587 }
1588 
1589 /* Invalid instruction */
1590 static void gen_invalid(DisasContext *ctx)
1591 {
1592     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1593 }
1594 
1595 static opc_handler_t invalid_handler = {
1596     .inval1  = 0xFFFFFFFF,
1597     .inval2  = 0xFFFFFFFF,
1598     .type    = PPC_NONE,
1599     .type2   = PPC_NONE,
1600     .handler = gen_invalid,
1601 };
1602 
1603 /***                           Integer comparison                          ***/
1604 
1605 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1606 {
1607     TCGv t0 = tcg_temp_new();
1608     TCGv_i32 t = tcg_temp_new_i32();
1609 
1610     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1611                        t0, arg0, arg1,
1612                        tcg_constant_tl(CRF_LT), tcg_constant_tl(CRF_EQ));
1613     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1614                        t0, arg0, arg1, tcg_constant_tl(CRF_GT), t0);
1615 
1616     tcg_gen_trunc_tl_i32(t, t0);
1617     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1618     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1619 }
1620 
1621 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1622 {
1623     TCGv t0 = tcg_constant_tl(arg1);
1624     gen_op_cmp(arg0, t0, s, crf);
1625 }
1626 
1627 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1628 {
1629     TCGv t0, t1;
1630     t0 = tcg_temp_new();
1631     t1 = tcg_temp_new();
1632     if (s) {
1633         tcg_gen_ext32s_tl(t0, arg0);
1634         tcg_gen_ext32s_tl(t1, arg1);
1635     } else {
1636         tcg_gen_ext32u_tl(t0, arg0);
1637         tcg_gen_ext32u_tl(t1, arg1);
1638     }
1639     gen_op_cmp(t0, t1, s, crf);
1640 }
1641 
1642 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1643 {
1644     TCGv t0 = tcg_constant_tl(arg1);
1645     gen_op_cmp32(arg0, t0, s, crf);
1646 }
1647 
1648 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1649 {
1650     if (NARROW_MODE(ctx)) {
1651         gen_op_cmpi32(reg, 0, 1, 0);
1652     } else {
1653         gen_op_cmpi(reg, 0, 1, 0);
1654     }
1655 }
1656 
1657 /***                           Integer arithmetic                          ***/
1658 
1659 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1660                                            TCGv arg1, TCGv arg2, int sub)
1661 {
1662     TCGv t0 = tcg_temp_new();
1663 
1664     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1665     tcg_gen_xor_tl(t0, arg1, arg2);
1666     if (sub) {
1667         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1668     } else {
1669         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1670     }
1671     if (NARROW_MODE(ctx)) {
1672         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1673         if (is_isa300(ctx)) {
1674             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1675         }
1676     } else {
1677         if (is_isa300(ctx)) {
1678             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1679         }
1680         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1681     }
1682     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1683 }
1684 
1685 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1686                                              TCGv res, TCGv arg0, TCGv arg1,
1687                                              TCGv ca32, int sub)
1688 {
1689     TCGv t0;
1690 
1691     if (!is_isa300(ctx)) {
1692         return;
1693     }
1694 
1695     t0 = tcg_temp_new();
1696     if (sub) {
1697         tcg_gen_eqv_tl(t0, arg0, arg1);
1698     } else {
1699         tcg_gen_xor_tl(t0, arg0, arg1);
1700     }
1701     tcg_gen_xor_tl(t0, t0, res);
1702     tcg_gen_extract_tl(ca32, t0, 32, 1);
1703 }
1704 
1705 /* Common add function */
1706 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1707                                     TCGv arg2, TCGv ca, TCGv ca32,
1708                                     bool add_ca, bool compute_ca,
1709                                     bool compute_ov, bool compute_rc0)
1710 {
1711     TCGv t0 = ret;
1712 
1713     if (compute_ca || compute_ov) {
1714         t0 = tcg_temp_new();
1715     }
1716 
1717     if (compute_ca) {
1718         if (NARROW_MODE(ctx)) {
1719             /*
1720              * Caution: a non-obvious corner case of the spec is that
1721              * we must produce the *entire* 64-bit addition, but
1722              * produce the carry into bit 32.
1723              */
1724             TCGv t1 = tcg_temp_new();
1725             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1726             tcg_gen_add_tl(t0, arg1, arg2);
1727             if (add_ca) {
1728                 tcg_gen_add_tl(t0, t0, ca);
1729             }
1730             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1731             tcg_gen_extract_tl(ca, ca, 32, 1);
1732             if (is_isa300(ctx)) {
1733                 tcg_gen_mov_tl(ca32, ca);
1734             }
1735         } else {
1736             TCGv zero = tcg_constant_tl(0);
1737             if (add_ca) {
1738                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1739                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1740             } else {
1741                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1742             }
1743             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1744         }
1745     } else {
1746         tcg_gen_add_tl(t0, arg1, arg2);
1747         if (add_ca) {
1748             tcg_gen_add_tl(t0, t0, ca);
1749         }
1750     }
1751 
1752     if (compute_ov) {
1753         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1754     }
1755     if (unlikely(compute_rc0)) {
1756         gen_set_Rc0(ctx, t0);
1757     }
1758 
1759     if (t0 != ret) {
1760         tcg_gen_mov_tl(ret, t0);
1761     }
1762 }
1763 
1764 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret,
1765                                      TCGv arg1, TCGv arg2, bool sign,
1766                                      bool compute_ov, bool compute_rc0)
1767 {
1768     TCGv_i32 t0 = tcg_temp_new_i32();
1769     TCGv_i32 t1 = tcg_temp_new_i32();
1770     TCGv_i32 t2 = tcg_temp_new_i32();
1771     TCGv_i32 t3 = tcg_temp_new_i32();
1772 
1773     tcg_gen_trunc_tl_i32(t0, arg1);
1774     tcg_gen_trunc_tl_i32(t1, arg2);
1775     if (sign) {
1776         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1777         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1778         tcg_gen_and_i32(t2, t2, t3);
1779         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1780         tcg_gen_or_i32(t2, t2, t3);
1781         tcg_gen_movi_i32(t3, 0);
1782         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1783         tcg_gen_div_i32(t3, t0, t1);
1784         tcg_gen_extu_i32_tl(ret, t3);
1785     } else {
1786         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1787         tcg_gen_movi_i32(t3, 0);
1788         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1789         tcg_gen_divu_i32(t3, t0, t1);
1790         tcg_gen_extu_i32_tl(ret, t3);
1791     }
1792     if (compute_ov) {
1793         tcg_gen_extu_i32_tl(cpu_ov, t2);
1794         if (is_isa300(ctx)) {
1795             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1796         }
1797         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1798     }
1799 
1800     if (unlikely(compute_rc0)) {
1801         gen_set_Rc0(ctx, ret);
1802     }
1803 }
1804 
1805 #if defined(TARGET_PPC64)
1806 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret,
1807                                      TCGv arg1, TCGv arg2, bool sign,
1808                                      bool compute_ov, bool compute_rc0)
1809 {
1810     TCGv_i64 t0 = tcg_temp_new_i64();
1811     TCGv_i64 t1 = tcg_temp_new_i64();
1812     TCGv_i64 t2 = tcg_temp_new_i64();
1813     TCGv_i64 t3 = tcg_temp_new_i64();
1814 
1815     tcg_gen_mov_i64(t0, arg1);
1816     tcg_gen_mov_i64(t1, arg2);
1817     if (sign) {
1818         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1819         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1820         tcg_gen_and_i64(t2, t2, t3);
1821         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1822         tcg_gen_or_i64(t2, t2, t3);
1823         tcg_gen_movi_i64(t3, 0);
1824         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1825         tcg_gen_div_i64(ret, t0, t1);
1826     } else {
1827         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1828         tcg_gen_movi_i64(t3, 0);
1829         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1830         tcg_gen_divu_i64(ret, t0, t1);
1831     }
1832     if (compute_ov) {
1833         tcg_gen_mov_tl(cpu_ov, t2);
1834         if (is_isa300(ctx)) {
1835             tcg_gen_mov_tl(cpu_ov32, t2);
1836         }
1837         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1838     }
1839 
1840     if (unlikely(compute_rc0)) {
1841         gen_set_Rc0(ctx, ret);
1842     }
1843 }
1844 #endif
1845 
1846 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1847                                      TCGv arg2, int sign)
1848 {
1849     TCGv_i32 t0 = tcg_temp_new_i32();
1850     TCGv_i32 t1 = tcg_temp_new_i32();
1851 
1852     tcg_gen_trunc_tl_i32(t0, arg1);
1853     tcg_gen_trunc_tl_i32(t1, arg2);
1854     if (sign) {
1855         TCGv_i32 t2 = tcg_temp_new_i32();
1856         TCGv_i32 t3 = tcg_temp_new_i32();
1857         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1858         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1859         tcg_gen_and_i32(t2, t2, t3);
1860         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1861         tcg_gen_or_i32(t2, t2, t3);
1862         tcg_gen_movi_i32(t3, 0);
1863         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1864         tcg_gen_rem_i32(t3, t0, t1);
1865         tcg_gen_ext_i32_tl(ret, t3);
1866     } else {
1867         TCGv_i32 t2 = tcg_constant_i32(1);
1868         TCGv_i32 t3 = tcg_constant_i32(0);
1869         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1870         tcg_gen_remu_i32(t0, t0, t1);
1871         tcg_gen_extu_i32_tl(ret, t0);
1872     }
1873 }
1874 
1875 #if defined(TARGET_PPC64)
1876 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1877                                      TCGv arg2, int sign)
1878 {
1879     TCGv_i64 t0 = tcg_temp_new_i64();
1880     TCGv_i64 t1 = tcg_temp_new_i64();
1881 
1882     tcg_gen_mov_i64(t0, arg1);
1883     tcg_gen_mov_i64(t1, arg2);
1884     if (sign) {
1885         TCGv_i64 t2 = tcg_temp_new_i64();
1886         TCGv_i64 t3 = tcg_temp_new_i64();
1887         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1888         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1889         tcg_gen_and_i64(t2, t2, t3);
1890         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1891         tcg_gen_or_i64(t2, t2, t3);
1892         tcg_gen_movi_i64(t3, 0);
1893         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1894         tcg_gen_rem_i64(ret, t0, t1);
1895     } else {
1896         TCGv_i64 t2 = tcg_constant_i64(1);
1897         TCGv_i64 t3 = tcg_constant_i64(0);
1898         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1899         tcg_gen_remu_i64(ret, t0, t1);
1900     }
1901 }
1902 #endif
1903 
1904 /* Common subf function */
1905 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1906                                      TCGv arg2, bool add_ca, bool compute_ca,
1907                                      bool compute_ov, bool compute_rc0)
1908 {
1909     TCGv t0 = ret;
1910 
1911     if (compute_ca || compute_ov) {
1912         t0 = tcg_temp_new();
1913     }
1914 
1915     if (compute_ca) {
1916         /* dest = ~arg1 + arg2 [+ ca].  */
1917         if (NARROW_MODE(ctx)) {
1918             /*
1919              * Caution: a non-obvious corner case of the spec is that
1920              * we must produce the *entire* 64-bit addition, but
1921              * produce the carry into bit 32.
1922              */
1923             TCGv inv1 = tcg_temp_new();
1924             TCGv t1 = tcg_temp_new();
1925             tcg_gen_not_tl(inv1, arg1);
1926             if (add_ca) {
1927                 tcg_gen_add_tl(t0, arg2, cpu_ca);
1928             } else {
1929                 tcg_gen_addi_tl(t0, arg2, 1);
1930             }
1931             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
1932             tcg_gen_add_tl(t0, t0, inv1);
1933             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
1934             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
1935             if (is_isa300(ctx)) {
1936                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
1937             }
1938         } else if (add_ca) {
1939             TCGv zero, inv1 = tcg_temp_new();
1940             tcg_gen_not_tl(inv1, arg1);
1941             zero = tcg_constant_tl(0);
1942             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
1943             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
1944             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
1945         } else {
1946             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
1947             tcg_gen_sub_tl(t0, arg2, arg1);
1948             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
1949         }
1950     } else if (add_ca) {
1951         /*
1952          * Since we're ignoring carry-out, we can simplify the
1953          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
1954          */
1955         tcg_gen_sub_tl(t0, arg2, arg1);
1956         tcg_gen_add_tl(t0, t0, cpu_ca);
1957         tcg_gen_subi_tl(t0, t0, 1);
1958     } else {
1959         tcg_gen_sub_tl(t0, arg2, arg1);
1960     }
1961 
1962     if (compute_ov) {
1963         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1964     }
1965     if (unlikely(compute_rc0)) {
1966         gen_set_Rc0(ctx, t0);
1967     }
1968 
1969     if (t0 != ret) {
1970         tcg_gen_mov_tl(ret, t0);
1971     }
1972 }
1973 
1974 /***                            Integer logical                            ***/
1975 
1976 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
1977 static void gen_pause(DisasContext *ctx)
1978 {
1979     TCGv_i32 t0 = tcg_constant_i32(0);
1980     tcg_gen_st_i32(t0, tcg_env,
1981                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
1982 
1983     /* Stop translation, this gives other CPUs a chance to run */
1984     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
1985 }
1986 #endif /* defined(TARGET_PPC64) */
1987 
1988 /***                             Integer rotate                            ***/
1989 
1990 /* rlwimi & rlwimi. */
1991 static void gen_rlwimi(DisasContext *ctx)
1992 {
1993     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
1994     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
1995     uint32_t sh = SH(ctx->opcode);
1996     uint32_t mb = MB(ctx->opcode);
1997     uint32_t me = ME(ctx->opcode);
1998 
1999     if (sh == (31 - me) && mb <= me) {
2000         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2001     } else {
2002         target_ulong mask;
2003         bool mask_in_32b = true;
2004         TCGv t1;
2005 
2006 #if defined(TARGET_PPC64)
2007         mb += 32;
2008         me += 32;
2009 #endif
2010         mask = MASK(mb, me);
2011 
2012 #if defined(TARGET_PPC64)
2013         if (mask > 0xffffffffu) {
2014             mask_in_32b = false;
2015         }
2016 #endif
2017         t1 = tcg_temp_new();
2018         if (mask_in_32b) {
2019             TCGv_i32 t0 = tcg_temp_new_i32();
2020             tcg_gen_trunc_tl_i32(t0, t_rs);
2021             tcg_gen_rotli_i32(t0, t0, sh);
2022             tcg_gen_extu_i32_tl(t1, t0);
2023         } else {
2024 #if defined(TARGET_PPC64)
2025             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2026             tcg_gen_rotli_i64(t1, t1, sh);
2027 #else
2028             g_assert_not_reached();
2029 #endif
2030         }
2031 
2032         tcg_gen_andi_tl(t1, t1, mask);
2033         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2034         tcg_gen_or_tl(t_ra, t_ra, t1);
2035     }
2036     if (unlikely(Rc(ctx->opcode) != 0)) {
2037         gen_set_Rc0(ctx, t_ra);
2038     }
2039 }
2040 
2041 /* rlwinm & rlwinm. */
2042 static void gen_rlwinm(DisasContext *ctx)
2043 {
2044     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2045     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2046     int sh = SH(ctx->opcode);
2047     int mb = MB(ctx->opcode);
2048     int me = ME(ctx->opcode);
2049     int len = me - mb + 1;
2050     int rsh = (32 - sh) & 31;
2051 
2052     if (sh != 0 && len > 0 && me == (31 - sh)) {
2053         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2054     } else if (me == 31 && rsh + len <= 32) {
2055         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2056     } else {
2057         target_ulong mask;
2058         bool mask_in_32b = true;
2059 #if defined(TARGET_PPC64)
2060         mb += 32;
2061         me += 32;
2062 #endif
2063         mask = MASK(mb, me);
2064 #if defined(TARGET_PPC64)
2065         if (mask > 0xffffffffu) {
2066             mask_in_32b = false;
2067         }
2068 #endif
2069         if (mask_in_32b) {
2070             if (sh == 0) {
2071                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2072             } else {
2073                 TCGv_i32 t0 = tcg_temp_new_i32();
2074                 tcg_gen_trunc_tl_i32(t0, t_rs);
2075                 tcg_gen_rotli_i32(t0, t0, sh);
2076                 tcg_gen_andi_i32(t0, t0, mask);
2077                 tcg_gen_extu_i32_tl(t_ra, t0);
2078             }
2079         } else {
2080 #if defined(TARGET_PPC64)
2081             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2082             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2083             tcg_gen_andi_i64(t_ra, t_ra, mask);
2084 #else
2085             g_assert_not_reached();
2086 #endif
2087         }
2088     }
2089     if (unlikely(Rc(ctx->opcode) != 0)) {
2090         gen_set_Rc0(ctx, t_ra);
2091     }
2092 }
2093 
2094 /* rlwnm & rlwnm. */
2095 static void gen_rlwnm(DisasContext *ctx)
2096 {
2097     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2098     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2099     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2100     uint32_t mb = MB(ctx->opcode);
2101     uint32_t me = ME(ctx->opcode);
2102     target_ulong mask;
2103     bool mask_in_32b = true;
2104 
2105 #if defined(TARGET_PPC64)
2106     mb += 32;
2107     me += 32;
2108 #endif
2109     mask = MASK(mb, me);
2110 
2111 #if defined(TARGET_PPC64)
2112     if (mask > 0xffffffffu) {
2113         mask_in_32b = false;
2114     }
2115 #endif
2116     if (mask_in_32b) {
2117         TCGv_i32 t0 = tcg_temp_new_i32();
2118         TCGv_i32 t1 = tcg_temp_new_i32();
2119         tcg_gen_trunc_tl_i32(t0, t_rb);
2120         tcg_gen_trunc_tl_i32(t1, t_rs);
2121         tcg_gen_andi_i32(t0, t0, 0x1f);
2122         tcg_gen_rotl_i32(t1, t1, t0);
2123         tcg_gen_extu_i32_tl(t_ra, t1);
2124     } else {
2125 #if defined(TARGET_PPC64)
2126         TCGv_i64 t0 = tcg_temp_new_i64();
2127         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2128         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2129         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2130 #else
2131         g_assert_not_reached();
2132 #endif
2133     }
2134 
2135     tcg_gen_andi_tl(t_ra, t_ra, mask);
2136 
2137     if (unlikely(Rc(ctx->opcode) != 0)) {
2138         gen_set_Rc0(ctx, t_ra);
2139     }
2140 }
2141 
2142 #if defined(TARGET_PPC64)
2143 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2144 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2145 {                                                                             \
2146     gen_##name(ctx, 0);                                                       \
2147 }                                                                             \
2148                                                                               \
2149 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2150 {                                                                             \
2151     gen_##name(ctx, 1);                                                       \
2152 }
2153 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2154 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2155 {                                                                             \
2156     gen_##name(ctx, 0, 0);                                                    \
2157 }                                                                             \
2158                                                                               \
2159 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2160 {                                                                             \
2161     gen_##name(ctx, 0, 1);                                                    \
2162 }                                                                             \
2163                                                                               \
2164 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2165 {                                                                             \
2166     gen_##name(ctx, 1, 0);                                                    \
2167 }                                                                             \
2168                                                                               \
2169 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2170 {                                                                             \
2171     gen_##name(ctx, 1, 1);                                                    \
2172 }
2173 
2174 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2175 {
2176     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2177     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2178     int len = me - mb + 1;
2179     int rsh = (64 - sh) & 63;
2180 
2181     if (sh != 0 && len > 0 && me == (63 - sh)) {
2182         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2183     } else if (me == 63 && rsh + len <= 64) {
2184         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2185     } else {
2186         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2187         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2188     }
2189     if (unlikely(Rc(ctx->opcode) != 0)) {
2190         gen_set_Rc0(ctx, t_ra);
2191     }
2192 }
2193 
2194 /* rldicl - rldicl. */
2195 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2196 {
2197     uint32_t sh, mb;
2198 
2199     sh = SH(ctx->opcode) | (shn << 5);
2200     mb = MB(ctx->opcode) | (mbn << 5);
2201     gen_rldinm(ctx, mb, 63, sh);
2202 }
2203 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2204 
2205 /* rldicr - rldicr. */
2206 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2207 {
2208     uint32_t sh, me;
2209 
2210     sh = SH(ctx->opcode) | (shn << 5);
2211     me = MB(ctx->opcode) | (men << 5);
2212     gen_rldinm(ctx, 0, me, sh);
2213 }
2214 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2215 
2216 /* rldic - rldic. */
2217 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2218 {
2219     uint32_t sh, mb;
2220 
2221     sh = SH(ctx->opcode) | (shn << 5);
2222     mb = MB(ctx->opcode) | (mbn << 5);
2223     gen_rldinm(ctx, mb, 63 - sh, sh);
2224 }
2225 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2226 
2227 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2228 {
2229     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2230     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2231     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2232     TCGv t0;
2233 
2234     t0 = tcg_temp_new();
2235     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2236     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2237 
2238     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2239     if (unlikely(Rc(ctx->opcode) != 0)) {
2240         gen_set_Rc0(ctx, t_ra);
2241     }
2242 }
2243 
2244 /* rldcl - rldcl. */
2245 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2246 {
2247     uint32_t mb;
2248 
2249     mb = MB(ctx->opcode) | (mbn << 5);
2250     gen_rldnm(ctx, mb, 63);
2251 }
2252 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2253 
2254 /* rldcr - rldcr. */
2255 static inline void gen_rldcr(DisasContext *ctx, int men)
2256 {
2257     uint32_t me;
2258 
2259     me = MB(ctx->opcode) | (men << 5);
2260     gen_rldnm(ctx, 0, me);
2261 }
2262 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2263 
2264 /* rldimi - rldimi. */
2265 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2266 {
2267     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2268     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2269     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2270     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2271     uint32_t me = 63 - sh;
2272 
2273     if (mb <= me) {
2274         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2275     } else {
2276         target_ulong mask = MASK(mb, me);
2277         TCGv t1 = tcg_temp_new();
2278 
2279         tcg_gen_rotli_tl(t1, t_rs, sh);
2280         tcg_gen_andi_tl(t1, t1, mask);
2281         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2282         tcg_gen_or_tl(t_ra, t_ra, t1);
2283     }
2284     if (unlikely(Rc(ctx->opcode) != 0)) {
2285         gen_set_Rc0(ctx, t_ra);
2286     }
2287 }
2288 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2289 #endif
2290 
2291 /***                             Integer shift                             ***/
2292 
2293 /* slw & slw. */
2294 static void gen_slw(DisasContext *ctx)
2295 {
2296     TCGv t0, t1;
2297 
2298     t0 = tcg_temp_new();
2299     /* AND rS with a mask that is 0 when rB >= 0x20 */
2300 #if defined(TARGET_PPC64)
2301     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2302     tcg_gen_sari_tl(t0, t0, 0x3f);
2303 #else
2304     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2305     tcg_gen_sari_tl(t0, t0, 0x1f);
2306 #endif
2307     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2308     t1 = tcg_temp_new();
2309     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2310     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2311     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2312     if (unlikely(Rc(ctx->opcode) != 0)) {
2313         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2314     }
2315 }
2316 
2317 /* sraw & sraw. */
2318 static void gen_sraw(DisasContext *ctx)
2319 {
2320     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], tcg_env,
2321                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2322     if (unlikely(Rc(ctx->opcode) != 0)) {
2323         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2324     }
2325 }
2326 
2327 /* srawi & srawi. */
2328 static void gen_srawi(DisasContext *ctx)
2329 {
2330     int sh = SH(ctx->opcode);
2331     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2332     TCGv src = cpu_gpr[rS(ctx->opcode)];
2333     if (sh == 0) {
2334         tcg_gen_ext32s_tl(dst, src);
2335         tcg_gen_movi_tl(cpu_ca, 0);
2336         if (is_isa300(ctx)) {
2337             tcg_gen_movi_tl(cpu_ca32, 0);
2338         }
2339     } else {
2340         TCGv t0;
2341         tcg_gen_ext32s_tl(dst, src);
2342         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2343         t0 = tcg_temp_new();
2344         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2345         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2346         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2347         if (is_isa300(ctx)) {
2348             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2349         }
2350         tcg_gen_sari_tl(dst, dst, sh);
2351     }
2352     if (unlikely(Rc(ctx->opcode) != 0)) {
2353         gen_set_Rc0(ctx, dst);
2354     }
2355 }
2356 
2357 /* srw & srw. */
2358 static void gen_srw(DisasContext *ctx)
2359 {
2360     TCGv t0, t1;
2361 
2362     t0 = tcg_temp_new();
2363     /* AND rS with a mask that is 0 when rB >= 0x20 */
2364 #if defined(TARGET_PPC64)
2365     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2366     tcg_gen_sari_tl(t0, t0, 0x3f);
2367 #else
2368     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2369     tcg_gen_sari_tl(t0, t0, 0x1f);
2370 #endif
2371     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2372     tcg_gen_ext32u_tl(t0, t0);
2373     t1 = tcg_temp_new();
2374     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2375     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2376     if (unlikely(Rc(ctx->opcode) != 0)) {
2377         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2378     }
2379 }
2380 
2381 #if defined(TARGET_PPC64)
2382 /* sld & sld. */
2383 static void gen_sld(DisasContext *ctx)
2384 {
2385     TCGv t0, t1;
2386 
2387     t0 = tcg_temp_new();
2388     /* AND rS with a mask that is 0 when rB >= 0x40 */
2389     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2390     tcg_gen_sari_tl(t0, t0, 0x3f);
2391     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2392     t1 = tcg_temp_new();
2393     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2394     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2395     if (unlikely(Rc(ctx->opcode) != 0)) {
2396         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2397     }
2398 }
2399 
2400 /* srad & srad. */
2401 static void gen_srad(DisasContext *ctx)
2402 {
2403     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], tcg_env,
2404                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2405     if (unlikely(Rc(ctx->opcode) != 0)) {
2406         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2407     }
2408 }
2409 /* sradi & sradi. */
2410 static inline void gen_sradi(DisasContext *ctx, int n)
2411 {
2412     int sh = SH(ctx->opcode) + (n << 5);
2413     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2414     TCGv src = cpu_gpr[rS(ctx->opcode)];
2415     if (sh == 0) {
2416         tcg_gen_mov_tl(dst, src);
2417         tcg_gen_movi_tl(cpu_ca, 0);
2418         if (is_isa300(ctx)) {
2419             tcg_gen_movi_tl(cpu_ca32, 0);
2420         }
2421     } else {
2422         TCGv t0;
2423         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
2424         t0 = tcg_temp_new();
2425         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
2426         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2427         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2428         if (is_isa300(ctx)) {
2429             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2430         }
2431         tcg_gen_sari_tl(dst, src, sh);
2432     }
2433     if (unlikely(Rc(ctx->opcode) != 0)) {
2434         gen_set_Rc0(ctx, dst);
2435     }
2436 }
2437 
2438 static void gen_sradi0(DisasContext *ctx)
2439 {
2440     gen_sradi(ctx, 0);
2441 }
2442 
2443 static void gen_sradi1(DisasContext *ctx)
2444 {
2445     gen_sradi(ctx, 1);
2446 }
2447 
2448 /* extswsli & extswsli. */
2449 static inline void gen_extswsli(DisasContext *ctx, int n)
2450 {
2451     int sh = SH(ctx->opcode) + (n << 5);
2452     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2453     TCGv src = cpu_gpr[rS(ctx->opcode)];
2454 
2455     tcg_gen_ext32s_tl(dst, src);
2456     tcg_gen_shli_tl(dst, dst, sh);
2457     if (unlikely(Rc(ctx->opcode) != 0)) {
2458         gen_set_Rc0(ctx, dst);
2459     }
2460 }
2461 
2462 static void gen_extswsli0(DisasContext *ctx)
2463 {
2464     gen_extswsli(ctx, 0);
2465 }
2466 
2467 static void gen_extswsli1(DisasContext *ctx)
2468 {
2469     gen_extswsli(ctx, 1);
2470 }
2471 
2472 /* srd & srd. */
2473 static void gen_srd(DisasContext *ctx)
2474 {
2475     TCGv t0, t1;
2476 
2477     t0 = tcg_temp_new();
2478     /* AND rS with a mask that is 0 when rB >= 0x40 */
2479     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2480     tcg_gen_sari_tl(t0, t0, 0x3f);
2481     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2482     t1 = tcg_temp_new();
2483     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2484     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2485     if (unlikely(Rc(ctx->opcode) != 0)) {
2486         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2487     }
2488 }
2489 #endif
2490 
2491 /***                           Addressing modes                            ***/
2492 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
2493 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2494                                       target_long maskl)
2495 {
2496     target_long simm = SIMM(ctx->opcode);
2497 
2498     simm &= ~maskl;
2499     if (rA(ctx->opcode) == 0) {
2500         if (NARROW_MODE(ctx)) {
2501             simm = (uint32_t)simm;
2502         }
2503         tcg_gen_movi_tl(EA, simm);
2504     } else if (likely(simm != 0)) {
2505         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2506         if (NARROW_MODE(ctx)) {
2507             tcg_gen_ext32u_tl(EA, EA);
2508         }
2509     } else {
2510         if (NARROW_MODE(ctx)) {
2511             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2512         } else {
2513             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2514         }
2515     }
2516 }
2517 
2518 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2519 {
2520     if (rA(ctx->opcode) == 0) {
2521         if (NARROW_MODE(ctx)) {
2522             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2523         } else {
2524             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2525         }
2526     } else {
2527         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2528         if (NARROW_MODE(ctx)) {
2529             tcg_gen_ext32u_tl(EA, EA);
2530         }
2531     }
2532 }
2533 
2534 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2535 {
2536     if (rA(ctx->opcode) == 0) {
2537         tcg_gen_movi_tl(EA, 0);
2538     } else if (NARROW_MODE(ctx)) {
2539         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2540     } else {
2541         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2542     }
2543 }
2544 
2545 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2546                                 target_long val)
2547 {
2548     tcg_gen_addi_tl(ret, arg1, val);
2549     if (NARROW_MODE(ctx)) {
2550         tcg_gen_ext32u_tl(ret, ret);
2551     }
2552 }
2553 
2554 static inline void gen_align_no_le(DisasContext *ctx)
2555 {
2556     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
2557                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
2558 }
2559 
2560 /* EA <- {(ra == 0) ? 0 : GPR[ra]} + displ */
2561 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
2562 {
2563     TCGv ea = tcg_temp_new();
2564     if (ra) {
2565         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
2566     } else {
2567         tcg_gen_mov_tl(ea, displ);
2568     }
2569     if (NARROW_MODE(ctx)) {
2570         tcg_gen_ext32u_tl(ea, ea);
2571     }
2572     return ea;
2573 }
2574 
2575 #if defined(TARGET_PPC64)
2576 /* EA <- (ra == 0) ? 0 : GPR[ra] */
2577 static TCGv do_ea_calc_ra(DisasContext *ctx, int ra)
2578 {
2579     TCGv EA = tcg_temp_new();
2580     if (!ra) {
2581         tcg_gen_movi_tl(EA, 0);
2582     } else if (NARROW_MODE(ctx)) {
2583         tcg_gen_ext32u_tl(EA, cpu_gpr[ra]);
2584     } else {
2585         tcg_gen_mov_tl(EA, cpu_gpr[ra]);
2586     }
2587     return EA;
2588 }
2589 #endif
2590 
2591 /***                             Integer load                              ***/
2592 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
2593 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
2594 
2595 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
2596 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
2597                                   TCGv val,                             \
2598                                   TCGv addr)                            \
2599 {                                                                       \
2600     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
2601 }
2602 
2603 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
2604 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
2605 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
2606 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
2607 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
2608 
2609 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
2610 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
2611 
2612 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
2613 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
2614                                              TCGv_i64 val,          \
2615                                              TCGv addr)             \
2616 {                                                                   \
2617     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
2618 }
2619 
2620 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
2621 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
2622 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
2623 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
2624 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
2625 
2626 #if defined(TARGET_PPC64)
2627 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
2628 #endif
2629 
2630 #define GEN_QEMU_STORE_TL(stop, op)                                     \
2631 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
2632                                   TCGv val,                             \
2633                                   TCGv addr)                            \
2634 {                                                                       \
2635     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
2636 }
2637 
2638 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
2639 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
2640 #endif
2641 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
2642 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
2643 
2644 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
2645 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
2646 
2647 #define GEN_QEMU_STORE_64(stop, op)                               \
2648 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
2649                                               TCGv_i64 val,       \
2650                                               TCGv addr)          \
2651 {                                                                 \
2652     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
2653 }
2654 
2655 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
2656 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
2657 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
2658 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
2659 
2660 #if defined(TARGET_PPC64)
2661 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
2662 #endif
2663 
2664 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
2665 static void glue(gen_, name##x)(DisasContext *ctx)                            \
2666 {                                                                             \
2667     TCGv EA;                                                                  \
2668     chk(ctx);                                                                 \
2669     gen_set_access_type(ctx, ACCESS_INT);                                     \
2670     EA = tcg_temp_new();                                                      \
2671     gen_addr_reg_index(ctx, EA);                                              \
2672     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
2673 }
2674 
2675 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
2676     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2677 
2678 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
2679     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2680 
2681 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
2682 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
2683 {                                                                             \
2684     TCGv EA;                                                                  \
2685     CHK_SV(ctx);                                                              \
2686     gen_set_access_type(ctx, ACCESS_INT);                                     \
2687     EA = tcg_temp_new();                                                      \
2688     gen_addr_reg_index(ctx, EA);                                              \
2689     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
2690 }
2691 
2692 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
2693 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
2694 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
2695 #if defined(TARGET_PPC64)
2696 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
2697 #endif
2698 
2699 #if defined(TARGET_PPC64)
2700 /* CI load/store variants */
2701 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
2702 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
2703 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
2704 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
2705 #endif
2706 
2707 /***                              Integer store                            ***/
2708 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
2709 static void glue(gen_, name##x)(DisasContext *ctx)                            \
2710 {                                                                             \
2711     TCGv EA;                                                                  \
2712     chk(ctx);                                                                 \
2713     gen_set_access_type(ctx, ACCESS_INT);                                     \
2714     EA = tcg_temp_new();                                                      \
2715     gen_addr_reg_index(ctx, EA);                                              \
2716     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
2717 }
2718 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
2719     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2720 
2721 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
2722     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2723 
2724 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
2725 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
2726 {                                                                             \
2727     TCGv EA;                                                                  \
2728     CHK_SV(ctx);                                                              \
2729     gen_set_access_type(ctx, ACCESS_INT);                                     \
2730     EA = tcg_temp_new();                                                      \
2731     gen_addr_reg_index(ctx, EA);                                              \
2732     tcg_gen_qemu_st_tl(                                                       \
2733         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
2734 }
2735 
2736 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
2737 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
2738 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
2739 #if defined(TARGET_PPC64)
2740 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
2741 #endif
2742 
2743 #if defined(TARGET_PPC64)
2744 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
2745 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
2746 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
2747 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
2748 #endif
2749 /***                Integer load and store with byte reverse               ***/
2750 
2751 /* lhbrx */
2752 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2753 
2754 /* lwbrx */
2755 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2756 
2757 #if defined(TARGET_PPC64)
2758 /* ldbrx */
2759 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
2760 /* stdbrx */
2761 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
2762 #endif  /* TARGET_PPC64 */
2763 
2764 /* sthbrx */
2765 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2766 /* stwbrx */
2767 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2768 
2769 /***                    Integer load and store multiple                    ***/
2770 
2771 /* lmw */
2772 static void gen_lmw(DisasContext *ctx)
2773 {
2774     TCGv t0;
2775     TCGv_i32 t1;
2776 
2777     if (ctx->le_mode) {
2778         gen_align_no_le(ctx);
2779         return;
2780     }
2781     gen_set_access_type(ctx, ACCESS_INT);
2782     t0 = tcg_temp_new();
2783     t1 = tcg_constant_i32(rD(ctx->opcode));
2784     gen_addr_imm_index(ctx, t0, 0);
2785     gen_helper_lmw(tcg_env, t0, t1);
2786 }
2787 
2788 /* stmw */
2789 static void gen_stmw(DisasContext *ctx)
2790 {
2791     TCGv t0;
2792     TCGv_i32 t1;
2793 
2794     if (ctx->le_mode) {
2795         gen_align_no_le(ctx);
2796         return;
2797     }
2798     gen_set_access_type(ctx, ACCESS_INT);
2799     t0 = tcg_temp_new();
2800     t1 = tcg_constant_i32(rS(ctx->opcode));
2801     gen_addr_imm_index(ctx, t0, 0);
2802     gen_helper_stmw(tcg_env, t0, t1);
2803 }
2804 
2805 /***                    Integer load and store strings                     ***/
2806 
2807 /* lswi */
2808 /*
2809  * PowerPC32 specification says we must generate an exception if rA is
2810  * in the range of registers to be loaded.  In an other hand, IBM says
2811  * this is valid, but rA won't be loaded.  For now, I'll follow the
2812  * spec...
2813  */
2814 static void gen_lswi(DisasContext *ctx)
2815 {
2816     TCGv t0;
2817     TCGv_i32 t1, t2;
2818     int nb = NB(ctx->opcode);
2819     int start = rD(ctx->opcode);
2820     int ra = rA(ctx->opcode);
2821     int nr;
2822 
2823     if (ctx->le_mode) {
2824         gen_align_no_le(ctx);
2825         return;
2826     }
2827     if (nb == 0) {
2828         nb = 32;
2829     }
2830     nr = DIV_ROUND_UP(nb, 4);
2831     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
2832         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2833         return;
2834     }
2835     gen_set_access_type(ctx, ACCESS_INT);
2836     t0 = tcg_temp_new();
2837     gen_addr_register(ctx, t0);
2838     t1 = tcg_constant_i32(nb);
2839     t2 = tcg_constant_i32(start);
2840     gen_helper_lsw(tcg_env, t0, t1, t2);
2841 }
2842 
2843 /* lswx */
2844 static void gen_lswx(DisasContext *ctx)
2845 {
2846     TCGv t0;
2847     TCGv_i32 t1, t2, t3;
2848 
2849     if (ctx->le_mode) {
2850         gen_align_no_le(ctx);
2851         return;
2852     }
2853     gen_set_access_type(ctx, ACCESS_INT);
2854     t0 = tcg_temp_new();
2855     gen_addr_reg_index(ctx, t0);
2856     t1 = tcg_constant_i32(rD(ctx->opcode));
2857     t2 = tcg_constant_i32(rA(ctx->opcode));
2858     t3 = tcg_constant_i32(rB(ctx->opcode));
2859     gen_helper_lswx(tcg_env, t0, t1, t2, t3);
2860 }
2861 
2862 /* stswi */
2863 static void gen_stswi(DisasContext *ctx)
2864 {
2865     TCGv t0;
2866     TCGv_i32 t1, t2;
2867     int nb = NB(ctx->opcode);
2868 
2869     if (ctx->le_mode) {
2870         gen_align_no_le(ctx);
2871         return;
2872     }
2873     gen_set_access_type(ctx, ACCESS_INT);
2874     t0 = tcg_temp_new();
2875     gen_addr_register(ctx, t0);
2876     if (nb == 0) {
2877         nb = 32;
2878     }
2879     t1 = tcg_constant_i32(nb);
2880     t2 = tcg_constant_i32(rS(ctx->opcode));
2881     gen_helper_stsw(tcg_env, t0, t1, t2);
2882 }
2883 
2884 /* stswx */
2885 static void gen_stswx(DisasContext *ctx)
2886 {
2887     TCGv t0;
2888     TCGv_i32 t1, t2;
2889 
2890     if (ctx->le_mode) {
2891         gen_align_no_le(ctx);
2892         return;
2893     }
2894     gen_set_access_type(ctx, ACCESS_INT);
2895     t0 = tcg_temp_new();
2896     gen_addr_reg_index(ctx, t0);
2897     t1 = tcg_temp_new_i32();
2898     tcg_gen_trunc_tl_i32(t1, cpu_xer);
2899     tcg_gen_andi_i32(t1, t1, 0x7F);
2900     t2 = tcg_constant_i32(rS(ctx->opcode));
2901     gen_helper_stsw(tcg_env, t0, t1, t2);
2902 }
2903 
2904 #if !defined(CONFIG_USER_ONLY)
2905 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
2906 {
2907     TCGv_i32 t;
2908     TCGLabel *l;
2909 
2910     if (!ctx->lazy_tlb_flush) {
2911         return;
2912     }
2913     l = gen_new_label();
2914     t = tcg_temp_new_i32();
2915     tcg_gen_ld_i32(t, tcg_env, offsetof(CPUPPCState, tlb_need_flush));
2916     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
2917     if (global) {
2918         gen_helper_check_tlb_flush_global(tcg_env);
2919     } else {
2920         gen_helper_check_tlb_flush_local(tcg_env);
2921     }
2922     gen_set_label(l);
2923     if (global) {
2924         /*
2925          * Global TLB flush uses async-work which must run before the
2926          * next instruction, so this must be the last in the TB.
2927          */
2928         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
2929     }
2930 }
2931 #else
2932 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
2933 #endif
2934 
2935 /* isync */
2936 static void gen_isync(DisasContext *ctx)
2937 {
2938     /*
2939      * We need to check for a pending TLB flush. This can only happen in
2940      * kernel mode however so check MSR_PR
2941      */
2942     if (!ctx->pr) {
2943         gen_check_tlb_flush(ctx, false);
2944     }
2945     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2946     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
2947 }
2948 
2949 static void gen_load_locked(DisasContext *ctx, MemOp memop)
2950 {
2951     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
2952     TCGv t0 = tcg_temp_new();
2953 
2954     gen_set_access_type(ctx, ACCESS_RES);
2955     gen_addr_reg_index(ctx, t0);
2956     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, DEF_MEMOP(memop) | MO_ALIGN);
2957     tcg_gen_mov_tl(cpu_reserve, t0);
2958     tcg_gen_movi_tl(cpu_reserve_length, memop_size(memop));
2959     tcg_gen_mov_tl(cpu_reserve_val, gpr);
2960 }
2961 
2962 #define LARX(name, memop)                  \
2963 static void gen_##name(DisasContext *ctx)  \
2964 {                                          \
2965     gen_load_locked(ctx, memop);           \
2966 }
2967 
2968 /* lwarx */
2969 LARX(lbarx, MO_UB)
2970 LARX(lharx, MO_UW)
2971 LARX(lwarx, MO_UL)
2972 
2973 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
2974                                       TCGv EA, TCGCond cond, int addend)
2975 {
2976     TCGv t = tcg_temp_new();
2977     TCGv t2 = tcg_temp_new();
2978     TCGv u = tcg_temp_new();
2979 
2980     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
2981     tcg_gen_addi_tl(t2, EA, memop_size(memop));
2982     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
2983     tcg_gen_addi_tl(u, t, addend);
2984 
2985     /* E.g. for fetch and increment bounded... */
2986     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
2987     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
2988     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
2989 
2990     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
2991     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t,
2992                        tcg_constant_tl(1 << (memop_size(memop) * 8 - 1)));
2993 }
2994 
2995 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
2996 {
2997     uint32_t gpr_FC = FC(ctx->opcode);
2998     TCGv EA = tcg_temp_new();
2999     int rt = rD(ctx->opcode);
3000     bool need_serial;
3001     TCGv src, dst;
3002 
3003     gen_addr_register(ctx, EA);
3004     dst = cpu_gpr[rt];
3005     src = cpu_gpr[(rt + 1) & 31];
3006 
3007     need_serial = false;
3008     memop |= MO_ALIGN;
3009     switch (gpr_FC) {
3010     case 0: /* Fetch and add */
3011         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3012         break;
3013     case 1: /* Fetch and xor */
3014         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3015         break;
3016     case 2: /* Fetch and or */
3017         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3018         break;
3019     case 3: /* Fetch and 'and' */
3020         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3021         break;
3022     case 4:  /* Fetch and max unsigned */
3023         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3024         break;
3025     case 5:  /* Fetch and max signed */
3026         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3027         break;
3028     case 6:  /* Fetch and min unsigned */
3029         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3030         break;
3031     case 7:  /* Fetch and min signed */
3032         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3033         break;
3034     case 8: /* Swap */
3035         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3036         break;
3037 
3038     case 16: /* Compare and swap not equal */
3039         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3040             need_serial = true;
3041         } else {
3042             TCGv t0 = tcg_temp_new();
3043             TCGv t1 = tcg_temp_new();
3044 
3045             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3046             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3047                 tcg_gen_mov_tl(t1, src);
3048             } else {
3049                 tcg_gen_ext32u_tl(t1, src);
3050             }
3051             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3052                                cpu_gpr[(rt + 2) & 31], t0);
3053             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3054             tcg_gen_mov_tl(dst, t0);
3055         }
3056         break;
3057 
3058     case 24: /* Fetch and increment bounded */
3059         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3060             need_serial = true;
3061         } else {
3062             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3063         }
3064         break;
3065     case 25: /* Fetch and increment equal */
3066         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3067             need_serial = true;
3068         } else {
3069             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3070         }
3071         break;
3072     case 28: /* Fetch and decrement bounded */
3073         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3074             need_serial = true;
3075         } else {
3076             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3077         }
3078         break;
3079 
3080     default:
3081         /* invoke data storage error handler */
3082         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3083     }
3084 
3085     if (need_serial) {
3086         /* Restart with exclusive lock.  */
3087         gen_helper_exit_atomic(tcg_env);
3088         ctx->base.is_jmp = DISAS_NORETURN;
3089     }
3090 }
3091 
3092 static void gen_lwat(DisasContext *ctx)
3093 {
3094     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3095 }
3096 
3097 #ifdef TARGET_PPC64
3098 static void gen_ldat(DisasContext *ctx)
3099 {
3100     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3101 }
3102 #endif
3103 
3104 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3105 {
3106     uint32_t gpr_FC = FC(ctx->opcode);
3107     TCGv EA = tcg_temp_new();
3108     TCGv src, discard;
3109 
3110     gen_addr_register(ctx, EA);
3111     src = cpu_gpr[rD(ctx->opcode)];
3112     discard = tcg_temp_new();
3113 
3114     memop |= MO_ALIGN;
3115     switch (gpr_FC) {
3116     case 0: /* add and Store */
3117         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3118         break;
3119     case 1: /* xor and Store */
3120         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3121         break;
3122     case 2: /* Or and Store */
3123         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3124         break;
3125     case 3: /* 'and' and Store */
3126         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3127         break;
3128     case 4:  /* Store max unsigned */
3129         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3130         break;
3131     case 5:  /* Store max signed */
3132         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3133         break;
3134     case 6:  /* Store min unsigned */
3135         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3136         break;
3137     case 7:  /* Store min signed */
3138         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3139         break;
3140     case 24: /* Store twin  */
3141         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3142             /* Restart with exclusive lock.  */
3143             gen_helper_exit_atomic(tcg_env);
3144             ctx->base.is_jmp = DISAS_NORETURN;
3145         } else {
3146             TCGv t = tcg_temp_new();
3147             TCGv t2 = tcg_temp_new();
3148             TCGv s = tcg_temp_new();
3149             TCGv s2 = tcg_temp_new();
3150             TCGv ea_plus_s = tcg_temp_new();
3151 
3152             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3153             tcg_gen_addi_tl(ea_plus_s, EA, memop_size(memop));
3154             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3155             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3156             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3157             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3158             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3159         }
3160         break;
3161     default:
3162         /* invoke data storage error handler */
3163         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3164     }
3165 }
3166 
3167 static void gen_stwat(DisasContext *ctx)
3168 {
3169     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3170 }
3171 
3172 #ifdef TARGET_PPC64
3173 static void gen_stdat(DisasContext *ctx)
3174 {
3175     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3176 }
3177 #endif
3178 
3179 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3180 {
3181     TCGLabel *lfail;
3182     TCGv EA;
3183     TCGv cr0;
3184     TCGv t0;
3185     int rs = rS(ctx->opcode);
3186 
3187     lfail = gen_new_label();
3188     EA = tcg_temp_new();
3189     cr0 = tcg_temp_new();
3190     t0 = tcg_temp_new();
3191 
3192     tcg_gen_mov_tl(cr0, cpu_so);
3193     gen_set_access_type(ctx, ACCESS_RES);
3194     gen_addr_reg_index(ctx, EA);
3195     tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lfail);
3196     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_reserve_length, memop_size(memop), lfail);
3197 
3198     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3199                               cpu_gpr[rs], ctx->mem_idx,
3200                               DEF_MEMOP(memop) | MO_ALIGN);
3201     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3202     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3203     tcg_gen_or_tl(cr0, cr0, t0);
3204 
3205     gen_set_label(lfail);
3206     tcg_gen_trunc_tl_i32(cpu_crf[0], cr0);
3207     tcg_gen_movi_tl(cpu_reserve, -1);
3208 }
3209 
3210 #define STCX(name, memop)                  \
3211 static void gen_##name(DisasContext *ctx)  \
3212 {                                          \
3213     gen_conditional_store(ctx, memop);     \
3214 }
3215 
3216 STCX(stbcx_, MO_UB)
3217 STCX(sthcx_, MO_UW)
3218 STCX(stwcx_, MO_UL)
3219 
3220 #if defined(TARGET_PPC64)
3221 /* ldarx */
3222 LARX(ldarx, MO_UQ)
3223 /* stdcx. */
3224 STCX(stdcx_, MO_UQ)
3225 
3226 /* lqarx */
3227 static void gen_lqarx(DisasContext *ctx)
3228 {
3229     int rd = rD(ctx->opcode);
3230     TCGv EA, hi, lo;
3231     TCGv_i128 t16;
3232 
3233     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3234                  (rd == rB(ctx->opcode)))) {
3235         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3236         return;
3237     }
3238 
3239     gen_set_access_type(ctx, ACCESS_RES);
3240     EA = tcg_temp_new();
3241     gen_addr_reg_index(ctx, EA);
3242 
3243     /* Note that the low part is always in RD+1, even in LE mode.  */
3244     lo = cpu_gpr[rd + 1];
3245     hi = cpu_gpr[rd];
3246 
3247     t16 = tcg_temp_new_i128();
3248     tcg_gen_qemu_ld_i128(t16, EA, ctx->mem_idx, DEF_MEMOP(MO_128 | MO_ALIGN));
3249     tcg_gen_extr_i128_i64(lo, hi, t16);
3250 
3251     tcg_gen_mov_tl(cpu_reserve, EA);
3252     tcg_gen_movi_tl(cpu_reserve_length, 16);
3253     tcg_gen_st_tl(hi, tcg_env, offsetof(CPUPPCState, reserve_val));
3254     tcg_gen_st_tl(lo, tcg_env, offsetof(CPUPPCState, reserve_val2));
3255 }
3256 
3257 /* stqcx. */
3258 static void gen_stqcx_(DisasContext *ctx)
3259 {
3260     TCGLabel *lfail;
3261     TCGv EA, t0, t1;
3262     TCGv cr0;
3263     TCGv_i128 cmp, val;
3264     int rs = rS(ctx->opcode);
3265 
3266     if (unlikely(rs & 1)) {
3267         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3268         return;
3269     }
3270 
3271     lfail = gen_new_label();
3272     EA = tcg_temp_new();
3273     cr0 = tcg_temp_new();
3274 
3275     tcg_gen_mov_tl(cr0, cpu_so);
3276     gen_set_access_type(ctx, ACCESS_RES);
3277     gen_addr_reg_index(ctx, EA);
3278     tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lfail);
3279     tcg_gen_brcondi_tl(TCG_COND_NE, cpu_reserve_length, 16, lfail);
3280 
3281     cmp = tcg_temp_new_i128();
3282     val = tcg_temp_new_i128();
3283 
3284     tcg_gen_concat_i64_i128(cmp, cpu_reserve_val2, cpu_reserve_val);
3285 
3286     /* Note that the low part is always in RS+1, even in LE mode.  */
3287     tcg_gen_concat_i64_i128(val, cpu_gpr[rs + 1], cpu_gpr[rs]);
3288 
3289     tcg_gen_atomic_cmpxchg_i128(val, cpu_reserve, cmp, val, ctx->mem_idx,
3290                                 DEF_MEMOP(MO_128 | MO_ALIGN));
3291 
3292     t0 = tcg_temp_new();
3293     t1 = tcg_temp_new();
3294     tcg_gen_extr_i128_i64(t1, t0, val);
3295 
3296     tcg_gen_xor_tl(t1, t1, cpu_reserve_val2);
3297     tcg_gen_xor_tl(t0, t0, cpu_reserve_val);
3298     tcg_gen_or_tl(t0, t0, t1);
3299 
3300     tcg_gen_setcondi_tl(TCG_COND_EQ, t0, t0, 0);
3301     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3302     tcg_gen_or_tl(cr0, cr0, t0);
3303 
3304     gen_set_label(lfail);
3305     tcg_gen_trunc_tl_i32(cpu_crf[0], cr0);
3306     tcg_gen_movi_tl(cpu_reserve, -1);
3307 }
3308 #endif /* defined(TARGET_PPC64) */
3309 
3310 /* wait */
3311 static void gen_wait(DisasContext *ctx)
3312 {
3313     uint32_t wc;
3314 
3315     if (ctx->insns_flags & PPC_WAIT) {
3316         /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
3317 
3318         if (ctx->insns_flags2 & PPC2_PM_ISA206) {
3319             /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
3320             wc = WC(ctx->opcode);
3321         } else {
3322             wc = 0;
3323         }
3324 
3325     } else if (ctx->insns_flags2 & PPC2_ISA300) {
3326         /* v3.0 defines a new 'wait' encoding. */
3327         wc = WC(ctx->opcode);
3328         if (ctx->insns_flags2 & PPC2_ISA310) {
3329             uint32_t pl = PL(ctx->opcode);
3330 
3331             /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
3332             if (wc == 3) {
3333                 gen_invalid(ctx);
3334                 return;
3335             }
3336 
3337             /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
3338             if (pl > 0 && wc != 2) {
3339                 gen_invalid(ctx);
3340                 return;
3341             }
3342 
3343         } else { /* ISA300 */
3344             /* WC 1-3 are reserved */
3345             if (wc > 0) {
3346                 gen_invalid(ctx);
3347                 return;
3348             }
3349         }
3350 
3351     } else {
3352         warn_report("wait instruction decoded with wrong ISA flags.");
3353         gen_invalid(ctx);
3354         return;
3355     }
3356 
3357     /*
3358      * wait without WC field or with WC=0 waits for an exception / interrupt
3359      * to occur.
3360      */
3361     if (wc == 0) {
3362         TCGv_i32 t0 = tcg_constant_i32(1);
3363         tcg_gen_st_i32(t0, tcg_env,
3364                        -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3365         /* Stop translation, as the CPU is supposed to sleep from now */
3366         gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3367     }
3368 
3369     /*
3370      * Other wait types must not just wait until an exception occurs because
3371      * ignoring their other wake-up conditions could cause a hang.
3372      *
3373      * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
3374      * no-ops.
3375      *
3376      * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
3377      *
3378      * wc=2 waits for an implementation-specific condition, such could be
3379      * always true, so it can be implemented as a no-op.
3380      *
3381      * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
3382      *
3383      * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
3384      * Reservation-loss may have implementation-specific conditions, so it
3385      * can be implemented as a no-op.
3386      *
3387      * wc=2 waits for an exception or an amount of time to pass. This
3388      * amount is implementation-specific so it can be implemented as a
3389      * no-op.
3390      *
3391      * ISA v3.1 allows for execution to resume "in the rare case of
3392      * an implementation-dependent event", so in any case software must
3393      * not depend on the architected resumption condition to become
3394      * true, so no-op implementations should be architecturally correct
3395      * (if suboptimal).
3396      */
3397 }
3398 
3399 #if defined(TARGET_PPC64)
3400 static void gen_doze(DisasContext *ctx)
3401 {
3402 #if defined(CONFIG_USER_ONLY)
3403     GEN_PRIV(ctx);
3404 #else
3405     TCGv_i32 t;
3406 
3407     CHK_HV(ctx);
3408     translator_io_start(&ctx->base);
3409     t = tcg_constant_i32(PPC_PM_DOZE);
3410     gen_helper_pminsn(tcg_env, t);
3411     /* Stop translation, as the CPU is supposed to sleep from now */
3412     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3413 #endif /* defined(CONFIG_USER_ONLY) */
3414 }
3415 
3416 static void gen_nap(DisasContext *ctx)
3417 {
3418 #if defined(CONFIG_USER_ONLY)
3419     GEN_PRIV(ctx);
3420 #else
3421     TCGv_i32 t;
3422 
3423     CHK_HV(ctx);
3424     translator_io_start(&ctx->base);
3425     t = tcg_constant_i32(PPC_PM_NAP);
3426     gen_helper_pminsn(tcg_env, t);
3427     /* Stop translation, as the CPU is supposed to sleep from now */
3428     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3429 #endif /* defined(CONFIG_USER_ONLY) */
3430 }
3431 
3432 static void gen_stop(DisasContext *ctx)
3433 {
3434 #if defined(CONFIG_USER_ONLY)
3435     GEN_PRIV(ctx);
3436 #else
3437     TCGv_i32 t;
3438 
3439     CHK_HV(ctx);
3440     translator_io_start(&ctx->base);
3441     t = tcg_constant_i32(PPC_PM_STOP);
3442     gen_helper_pminsn(tcg_env, t);
3443     /* Stop translation, as the CPU is supposed to sleep from now */
3444     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3445 #endif /* defined(CONFIG_USER_ONLY) */
3446 }
3447 
3448 static void gen_sleep(DisasContext *ctx)
3449 {
3450 #if defined(CONFIG_USER_ONLY)
3451     GEN_PRIV(ctx);
3452 #else
3453     TCGv_i32 t;
3454 
3455     CHK_HV(ctx);
3456     translator_io_start(&ctx->base);
3457     t = tcg_constant_i32(PPC_PM_SLEEP);
3458     gen_helper_pminsn(tcg_env, t);
3459     /* Stop translation, as the CPU is supposed to sleep from now */
3460     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3461 #endif /* defined(CONFIG_USER_ONLY) */
3462 }
3463 
3464 static void gen_rvwinkle(DisasContext *ctx)
3465 {
3466 #if defined(CONFIG_USER_ONLY)
3467     GEN_PRIV(ctx);
3468 #else
3469     TCGv_i32 t;
3470 
3471     CHK_HV(ctx);
3472     translator_io_start(&ctx->base);
3473     t = tcg_constant_i32(PPC_PM_RVWINKLE);
3474     gen_helper_pminsn(tcg_env, t);
3475     /* Stop translation, as the CPU is supposed to sleep from now */
3476     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3477 #endif /* defined(CONFIG_USER_ONLY) */
3478 }
3479 
3480 static inline TCGv gen_write_bhrb(TCGv_ptr base, TCGv offset, TCGv mask, TCGv value)
3481 {
3482     TCGv_ptr tmp = tcg_temp_new_ptr();
3483 
3484     /* add base and offset to get address of bhrb entry */
3485     tcg_gen_add_ptr(tmp, base, (TCGv_ptr)offset);
3486 
3487     /* store value into bhrb at bhrb_offset */
3488     tcg_gen_st_i64(value, tmp, 0);
3489 
3490     /* add 8 to current bhrb_offset */
3491     tcg_gen_addi_tl(offset, offset, 8);
3492 
3493     /* apply offset mask */
3494     tcg_gen_and_tl(offset, offset, mask);
3495 
3496     return offset;
3497 }
3498 #endif /* #if defined(TARGET_PPC64) */
3499 
3500 static inline void gen_update_branch_history(DisasContext *ctx,
3501                                              target_ulong nip,
3502                                              TCGv target,
3503                                              target_long inst_type)
3504 {
3505 #if defined(TARGET_PPC64)
3506     TCGv_ptr base;
3507     TCGv tmp;
3508     TCGv offset;
3509     TCGv mask;
3510     TCGLabel *no_update;
3511 
3512     if (ctx->has_cfar) {
3513         tcg_gen_movi_tl(cpu_cfar, nip);
3514     }
3515 
3516     if (!ctx->has_bhrb ||
3517         !ctx->bhrb_enable ||
3518         inst_type == BHRB_TYPE_NORECORD) {
3519         return;
3520     }
3521 
3522     tmp = tcg_temp_new();
3523     no_update = gen_new_label();
3524 
3525     /* check for bhrb filtering */
3526     tcg_gen_ld_tl(tmp, tcg_env, offsetof(CPUPPCState, bhrb_filter));
3527     tcg_gen_andi_tl(tmp, tmp, inst_type);
3528     tcg_gen_brcondi_tl(TCG_COND_EQ, tmp, 0, no_update);
3529 
3530     base = tcg_temp_new_ptr();
3531     offset = tcg_temp_new();
3532     mask = tcg_temp_new();
3533 
3534     /* load bhrb base address */
3535     tcg_gen_ld_ptr(base, tcg_env, offsetof(CPUPPCState, bhrb_base));
3536 
3537     /* load current bhrb_offset */
3538     tcg_gen_ld_tl(offset, tcg_env, offsetof(CPUPPCState, bhrb_offset));
3539 
3540     /* load a BHRB offset mask */
3541     tcg_gen_ld_tl(mask, tcg_env, offsetof(CPUPPCState, bhrb_offset_mask));
3542 
3543     offset = gen_write_bhrb(base, offset, mask, tcg_constant_i64(nip));
3544 
3545     /* Also record the target address for XL-Form branches */
3546     if (inst_type & BHRB_TYPE_XL_FORM) {
3547 
3548         /* Set the 'T' bit for target entries */
3549         tcg_gen_ori_tl(tmp, target, 0x2);
3550 
3551         offset = gen_write_bhrb(base, offset, mask, tmp);
3552     }
3553 
3554     /* save updated bhrb_offset for next time */
3555     tcg_gen_st_tl(offset, tcg_env, offsetof(CPUPPCState, bhrb_offset));
3556 
3557     gen_set_label(no_update);
3558 #endif
3559 }
3560 
3561 #if defined(TARGET_PPC64)
3562 static void pmu_count_insns(DisasContext *ctx)
3563 {
3564     /*
3565      * Do not bother calling the helper if the PMU isn't counting
3566      * instructions.
3567      */
3568     if (!ctx->pmu_insn_cnt) {
3569         return;
3570     }
3571 
3572  #if !defined(CONFIG_USER_ONLY)
3573     TCGLabel *l;
3574     TCGv t0;
3575 
3576     /*
3577      * The PMU insns_inc() helper stops the internal PMU timer if a
3578      * counter overflows happens. In that case, if the guest is
3579      * running with icount and we do not handle it beforehand,
3580      * the helper can trigger a 'bad icount read'.
3581      */
3582     translator_io_start(&ctx->base);
3583 
3584     /* Avoid helper calls when only PMC5-6 are enabled. */
3585     if (!ctx->pmc_other) {
3586         l = gen_new_label();
3587         t0 = tcg_temp_new();
3588 
3589         gen_load_spr(t0, SPR_POWER_PMC5);
3590         tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
3591         gen_store_spr(SPR_POWER_PMC5, t0);
3592         /* Check for overflow, if it's enabled */
3593         if (ctx->mmcr0_pmcjce) {
3594             tcg_gen_brcondi_tl(TCG_COND_LT, t0, PMC_COUNTER_NEGATIVE_VAL, l);
3595             gen_helper_handle_pmc5_overflow(tcg_env);
3596         }
3597 
3598         gen_set_label(l);
3599     } else {
3600         gen_helper_insns_inc(tcg_env, tcg_constant_i32(ctx->base.num_insns));
3601     }
3602   #else
3603     /*
3604      * User mode can read (but not write) PMC5 and start/stop
3605      * the PMU via MMCR0_FC. In this case just increment
3606      * PMC5 with base.num_insns.
3607      */
3608     TCGv t0 = tcg_temp_new();
3609 
3610     gen_load_spr(t0, SPR_POWER_PMC5);
3611     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
3612     gen_store_spr(SPR_POWER_PMC5, t0);
3613   #endif /* #if !defined(CONFIG_USER_ONLY) */
3614 }
3615 #else
3616 static void pmu_count_insns(DisasContext *ctx)
3617 {
3618     return;
3619 }
3620 #endif /* #if defined(TARGET_PPC64) */
3621 
3622 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
3623 {
3624     if (unlikely(ctx->singlestep_enabled)) {
3625         return false;
3626     }
3627     return translator_use_goto_tb(&ctx->base, dest);
3628 }
3629 
3630 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
3631 {
3632     if (unlikely(ctx->singlestep_enabled)) {
3633         gen_debug_exception(ctx, false);
3634     } else {
3635         /*
3636          * tcg_gen_lookup_and_goto_ptr will exit the TB if
3637          * CF_NO_GOTO_PTR is set. Count insns now.
3638          */
3639         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
3640             pmu_count_insns(ctx);
3641         }
3642 
3643         tcg_gen_lookup_and_goto_ptr();
3644     }
3645 }
3646 
3647 /***                                Branch                                 ***/
3648 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3649 {
3650     if (NARROW_MODE(ctx)) {
3651         dest = (uint32_t) dest;
3652     }
3653     if (use_goto_tb(ctx, dest)) {
3654         pmu_count_insns(ctx);
3655         tcg_gen_goto_tb(n);
3656         tcg_gen_movi_tl(cpu_nip, dest & ~3);
3657         tcg_gen_exit_tb(ctx->base.tb, n);
3658     } else {
3659         tcg_gen_movi_tl(cpu_nip, dest & ~3);
3660         gen_lookup_and_goto_ptr(ctx);
3661     }
3662 }
3663 
3664 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3665 {
3666     if (NARROW_MODE(ctx)) {
3667         nip = (uint32_t)nip;
3668     }
3669     tcg_gen_movi_tl(cpu_lr, nip);
3670 }
3671 
3672 /* b ba bl bla */
3673 static void gen_b(DisasContext *ctx)
3674 {
3675     target_ulong li, target;
3676 
3677     /* sign extend LI */
3678     li = LI(ctx->opcode);
3679     li = (li ^ 0x02000000) - 0x02000000;
3680     if (likely(AA(ctx->opcode) == 0)) {
3681         target = ctx->cia + li;
3682     } else {
3683         target = li;
3684     }
3685     if (LK(ctx->opcode)) {
3686         gen_setlr(ctx, ctx->base.pc_next);
3687         gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_CALL);
3688     } else {
3689         gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_OTHER);
3690     }
3691     gen_goto_tb(ctx, 0, target);
3692     ctx->base.is_jmp = DISAS_NORETURN;
3693 }
3694 
3695 #define BCOND_IM  0
3696 #define BCOND_LR  1
3697 #define BCOND_CTR 2
3698 #define BCOND_TAR 3
3699 
3700 static void gen_bcond(DisasContext *ctx, int type)
3701 {
3702     uint32_t bo = BO(ctx->opcode);
3703     TCGLabel *l1;
3704     TCGv target;
3705     target_long bhrb_type = BHRB_TYPE_OTHER;
3706 
3707     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
3708         target = tcg_temp_new();
3709         if (type == BCOND_CTR) {
3710             tcg_gen_mov_tl(target, cpu_ctr);
3711         } else if (type == BCOND_TAR) {
3712             gen_load_spr(target, SPR_TAR);
3713         } else {
3714             tcg_gen_mov_tl(target, cpu_lr);
3715         }
3716         if (!LK(ctx->opcode)) {
3717             bhrb_type |= BHRB_TYPE_INDIRECT;
3718         }
3719         bhrb_type |= BHRB_TYPE_XL_FORM;
3720     } else {
3721         target = NULL;
3722     }
3723     if (LK(ctx->opcode)) {
3724         gen_setlr(ctx, ctx->base.pc_next);
3725         bhrb_type |= BHRB_TYPE_CALL;
3726     }
3727     l1 = gen_new_label();
3728     if ((bo & 0x4) == 0) {
3729         /* Decrement and test CTR */
3730         TCGv temp = tcg_temp_new();
3731 
3732         if (type == BCOND_CTR) {
3733             /*
3734              * All ISAs up to v3 describe this form of bcctr as invalid but
3735              * some processors, ie. 64-bit server processors compliant with
3736              * arch 2.x, do implement a "test and decrement" logic instead,
3737              * as described in their respective UMs. This logic involves CTR
3738              * to act as both the branch target and a counter, which makes
3739              * it basically useless and thus never used in real code.
3740              *
3741              * This form was hence chosen to trigger extra micro-architectural
3742              * side-effect on real HW needed for the Spectre v2 workaround.
3743              * It is up to guests that implement such workaround, ie. linux, to
3744              * use this form in a way it just triggers the side-effect without
3745              * doing anything else harmful.
3746              */
3747             if (unlikely(!is_book3s_arch2x(ctx))) {
3748                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3749                 return;
3750             }
3751 
3752             if (NARROW_MODE(ctx)) {
3753                 tcg_gen_ext32u_tl(temp, cpu_ctr);
3754             } else {
3755                 tcg_gen_mov_tl(temp, cpu_ctr);
3756             }
3757             if (bo & 0x2) {
3758                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3759             } else {
3760                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3761             }
3762             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3763         } else {
3764             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3765             if (NARROW_MODE(ctx)) {
3766                 tcg_gen_ext32u_tl(temp, cpu_ctr);
3767             } else {
3768                 tcg_gen_mov_tl(temp, cpu_ctr);
3769             }
3770             if (bo & 0x2) {
3771                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3772             } else {
3773                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3774             }
3775         }
3776         bhrb_type |= BHRB_TYPE_COND;
3777     }
3778     if ((bo & 0x10) == 0) {
3779         /* Test CR */
3780         uint32_t bi = BI(ctx->opcode);
3781         uint32_t mask = 0x08 >> (bi & 0x03);
3782         TCGv_i32 temp = tcg_temp_new_i32();
3783 
3784         if (bo & 0x8) {
3785             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3786             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3787         } else {
3788             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3789             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3790         }
3791         bhrb_type |= BHRB_TYPE_COND;
3792     }
3793 
3794     gen_update_branch_history(ctx, ctx->cia, target, bhrb_type);
3795 
3796     if (type == BCOND_IM) {
3797         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3798         if (likely(AA(ctx->opcode) == 0)) {
3799             gen_goto_tb(ctx, 0, ctx->cia + li);
3800         } else {
3801             gen_goto_tb(ctx, 0, li);
3802         }
3803     } else {
3804         if (NARROW_MODE(ctx)) {
3805             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3806         } else {
3807             tcg_gen_andi_tl(cpu_nip, target, ~3);
3808         }
3809         gen_lookup_and_goto_ptr(ctx);
3810     }
3811     if ((bo & 0x14) != 0x14) {
3812         /* fallthrough case */
3813         gen_set_label(l1);
3814         gen_goto_tb(ctx, 1, ctx->base.pc_next);
3815     }
3816     ctx->base.is_jmp = DISAS_NORETURN;
3817 }
3818 
3819 static void gen_bc(DisasContext *ctx)
3820 {
3821     gen_bcond(ctx, BCOND_IM);
3822 }
3823 
3824 static void gen_bcctr(DisasContext *ctx)
3825 {
3826     gen_bcond(ctx, BCOND_CTR);
3827 }
3828 
3829 static void gen_bclr(DisasContext *ctx)
3830 {
3831     gen_bcond(ctx, BCOND_LR);
3832 }
3833 
3834 static void gen_bctar(DisasContext *ctx)
3835 {
3836     gen_bcond(ctx, BCOND_TAR);
3837 }
3838 
3839 /***                      Condition register logical                       ***/
3840 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
3841 static void glue(gen_, name)(DisasContext *ctx)                               \
3842 {                                                                             \
3843     uint8_t bitmask;                                                          \
3844     int sh;                                                                   \
3845     TCGv_i32 t0, t1;                                                          \
3846     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
3847     t0 = tcg_temp_new_i32();                                                  \
3848     if (sh > 0)                                                               \
3849         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
3850     else if (sh < 0)                                                          \
3851         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
3852     else                                                                      \
3853         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
3854     t1 = tcg_temp_new_i32();                                                  \
3855     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
3856     if (sh > 0)                                                               \
3857         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
3858     else if (sh < 0)                                                          \
3859         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
3860     else                                                                      \
3861         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
3862     tcg_op(t0, t0, t1);                                                       \
3863     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
3864     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
3865     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
3866     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
3867 }
3868 
3869 /* crand */
3870 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3871 /* crandc */
3872 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3873 /* creqv */
3874 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3875 /* crnand */
3876 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3877 /* crnor */
3878 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3879 /* cror */
3880 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3881 /* crorc */
3882 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3883 /* crxor */
3884 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3885 
3886 /* mcrf */
3887 static void gen_mcrf(DisasContext *ctx)
3888 {
3889     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3890 }
3891 
3892 /***                           System linkage                              ***/
3893 
3894 /* rfi (supervisor only) */
3895 static void gen_rfi(DisasContext *ctx)
3896 {
3897 #if defined(CONFIG_USER_ONLY)
3898     GEN_PRIV(ctx);
3899 #else
3900     /*
3901      * This instruction doesn't exist anymore on 64-bit server
3902      * processors compliant with arch 2.x
3903      */
3904     if (is_book3s_arch2x(ctx)) {
3905         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3906         return;
3907     }
3908     /* Restore CPU state */
3909     CHK_SV(ctx);
3910     translator_io_start(&ctx->base);
3911     gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3912     gen_helper_rfi(tcg_env);
3913     ctx->base.is_jmp = DISAS_EXIT;
3914 #endif
3915 }
3916 
3917 #if defined(TARGET_PPC64)
3918 static void gen_rfid(DisasContext *ctx)
3919 {
3920 #if defined(CONFIG_USER_ONLY)
3921     GEN_PRIV(ctx);
3922 #else
3923     /* Restore CPU state */
3924     CHK_SV(ctx);
3925     translator_io_start(&ctx->base);
3926     gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3927     gen_helper_rfid(tcg_env);
3928     ctx->base.is_jmp = DISAS_EXIT;
3929 #endif
3930 }
3931 
3932 #if !defined(CONFIG_USER_ONLY)
3933 static void gen_rfscv(DisasContext *ctx)
3934 {
3935 #if defined(CONFIG_USER_ONLY)
3936     GEN_PRIV(ctx);
3937 #else
3938     /* Restore CPU state */
3939     CHK_SV(ctx);
3940     translator_io_start(&ctx->base);
3941     gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3942     gen_helper_rfscv(tcg_env);
3943     ctx->base.is_jmp = DISAS_EXIT;
3944 #endif
3945 }
3946 #endif
3947 
3948 static void gen_hrfid(DisasContext *ctx)
3949 {
3950 #if defined(CONFIG_USER_ONLY)
3951     GEN_PRIV(ctx);
3952 #else
3953     /* Restore CPU state */
3954     CHK_HV(ctx);
3955     translator_io_start(&ctx->base);
3956     gen_helper_hrfid(tcg_env);
3957     ctx->base.is_jmp = DISAS_EXIT;
3958 #endif
3959 }
3960 #endif
3961 
3962 /* sc */
3963 #if defined(CONFIG_USER_ONLY)
3964 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3965 #else
3966 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3967 #endif
3968 static void gen_sc(DisasContext *ctx)
3969 {
3970     uint32_t lev;
3971 
3972     /*
3973      * LEV is a 7-bit field, but the top 6 bits are treated as a reserved
3974      * field (i.e., ignored). ISA v3.1 changes that to 5 bits, but that is
3975      * for Ultravisor which TCG does not support, so just ignore the top 6.
3976      */
3977     lev = (ctx->opcode >> 5) & 0x1;
3978     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3979 }
3980 
3981 #if defined(TARGET_PPC64)
3982 #if !defined(CONFIG_USER_ONLY)
3983 static void gen_scv(DisasContext *ctx)
3984 {
3985     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
3986 
3987     /* Set the PC back to the faulting instruction. */
3988     gen_update_nip(ctx, ctx->cia);
3989     gen_helper_scv(tcg_env, tcg_constant_i32(lev));
3990 
3991     ctx->base.is_jmp = DISAS_NORETURN;
3992 }
3993 #endif
3994 #endif
3995 
3996 /***                                Trap                                   ***/
3997 
3998 /* Check for unconditional traps (always or never) */
3999 static bool check_unconditional_trap(DisasContext *ctx, int to)
4000 {
4001     /* Trap never */
4002     if (to == 0) {
4003         return true;
4004     }
4005     /* Trap always */
4006     if (to == 31) {
4007         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4008         return true;
4009     }
4010     return false;
4011 }
4012 
4013 /***                          Processor control                            ***/
4014 
4015 /* mcrxr */
4016 static void gen_mcrxr(DisasContext *ctx)
4017 {
4018     TCGv_i32 t0 = tcg_temp_new_i32();
4019     TCGv_i32 t1 = tcg_temp_new_i32();
4020     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4021 
4022     tcg_gen_trunc_tl_i32(t0, cpu_so);
4023     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4024     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4025     tcg_gen_shli_i32(t0, t0, 3);
4026     tcg_gen_shli_i32(t1, t1, 2);
4027     tcg_gen_shli_i32(dst, dst, 1);
4028     tcg_gen_or_i32(dst, dst, t0);
4029     tcg_gen_or_i32(dst, dst, t1);
4030 
4031     tcg_gen_movi_tl(cpu_so, 0);
4032     tcg_gen_movi_tl(cpu_ov, 0);
4033     tcg_gen_movi_tl(cpu_ca, 0);
4034 }
4035 
4036 #ifdef TARGET_PPC64
4037 /* mcrxrx */
4038 static void gen_mcrxrx(DisasContext *ctx)
4039 {
4040     TCGv t0 = tcg_temp_new();
4041     TCGv t1 = tcg_temp_new();
4042     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4043 
4044     /* copy OV and OV32 */
4045     tcg_gen_shli_tl(t0, cpu_ov, 1);
4046     tcg_gen_or_tl(t0, t0, cpu_ov32);
4047     tcg_gen_shli_tl(t0, t0, 2);
4048     /* copy CA and CA32 */
4049     tcg_gen_shli_tl(t1, cpu_ca, 1);
4050     tcg_gen_or_tl(t1, t1, cpu_ca32);
4051     tcg_gen_or_tl(t0, t0, t1);
4052     tcg_gen_trunc_tl_i32(dst, t0);
4053 }
4054 #endif
4055 
4056 /* mfcr mfocrf */
4057 static void gen_mfcr(DisasContext *ctx)
4058 {
4059     uint32_t crm, crn;
4060 
4061     if (likely(ctx->opcode & 0x00100000)) {
4062         crm = CRM(ctx->opcode);
4063         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4064             crn = ctz32(crm);
4065             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4066             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4067                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4068         }
4069     } else {
4070         TCGv_i32 t0 = tcg_temp_new_i32();
4071         tcg_gen_mov_i32(t0, cpu_crf[0]);
4072         tcg_gen_shli_i32(t0, t0, 4);
4073         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4074         tcg_gen_shli_i32(t0, t0, 4);
4075         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4076         tcg_gen_shli_i32(t0, t0, 4);
4077         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4078         tcg_gen_shli_i32(t0, t0, 4);
4079         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4080         tcg_gen_shli_i32(t0, t0, 4);
4081         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4082         tcg_gen_shli_i32(t0, t0, 4);
4083         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4084         tcg_gen_shli_i32(t0, t0, 4);
4085         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4086         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4087     }
4088 }
4089 
4090 /* mfmsr */
4091 static void gen_mfmsr(DisasContext *ctx)
4092 {
4093     CHK_SV(ctx);
4094     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4095 }
4096 
4097 /* mfspr */
4098 static inline void gen_op_mfspr(DisasContext *ctx)
4099 {
4100     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4101     uint32_t sprn = SPR(ctx->opcode);
4102 
4103 #if defined(CONFIG_USER_ONLY)
4104     read_cb = ctx->spr_cb[sprn].uea_read;
4105 #else
4106     if (ctx->pr) {
4107         read_cb = ctx->spr_cb[sprn].uea_read;
4108     } else if (ctx->hv) {
4109         read_cb = ctx->spr_cb[sprn].hea_read;
4110     } else {
4111         read_cb = ctx->spr_cb[sprn].oea_read;
4112     }
4113 #endif
4114     if (likely(read_cb != NULL)) {
4115         if (likely(read_cb != SPR_NOACCESS)) {
4116             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4117         } else {
4118             /* Privilege exception */
4119             /*
4120              * This is a hack to avoid warnings when running Linux:
4121              * this OS breaks the PowerPC virtualisation model,
4122              * allowing userland application to read the PVR
4123              */
4124             if (sprn != SPR_PVR) {
4125                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4126                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4127                               ctx->cia);
4128             }
4129             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4130         }
4131     } else {
4132         /* ISA 2.07 defines these as no-ops */
4133         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4134             (sprn >= 808 && sprn <= 811)) {
4135             /* This is a nop */
4136             return;
4137         }
4138         /* Not defined */
4139         qemu_log_mask(LOG_GUEST_ERROR,
4140                       "Trying to read invalid spr %d (0x%03x) at "
4141                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4142 
4143         /*
4144          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4145          * generate a priv, a hv emu or a no-op
4146          */
4147         if (sprn & 0x10) {
4148             if (ctx->pr) {
4149                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4150             }
4151         } else {
4152             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4153                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4154             }
4155         }
4156     }
4157 }
4158 
4159 static void gen_mfspr(DisasContext *ctx)
4160 {
4161     gen_op_mfspr(ctx);
4162 }
4163 
4164 /* mftb */
4165 static void gen_mftb(DisasContext *ctx)
4166 {
4167     gen_op_mfspr(ctx);
4168 }
4169 
4170 /* mtcrf mtocrf*/
4171 static void gen_mtcrf(DisasContext *ctx)
4172 {
4173     uint32_t crm, crn;
4174 
4175     crm = CRM(ctx->opcode);
4176     if (likely((ctx->opcode & 0x00100000))) {
4177         if (crm && ((crm & (crm - 1)) == 0)) {
4178             TCGv_i32 temp = tcg_temp_new_i32();
4179             crn = ctz32(crm);
4180             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4181             tcg_gen_shri_i32(temp, temp, crn * 4);
4182             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4183         }
4184     } else {
4185         TCGv_i32 temp = tcg_temp_new_i32();
4186         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4187         for (crn = 0 ; crn < 8 ; crn++) {
4188             if (crm & (1 << crn)) {
4189                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4190                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4191             }
4192         }
4193     }
4194 }
4195 
4196 /* mtmsr */
4197 #if defined(TARGET_PPC64)
4198 static void gen_mtmsrd(DisasContext *ctx)
4199 {
4200     if (unlikely(!is_book3s_arch2x(ctx))) {
4201         gen_invalid(ctx);
4202         return;
4203     }
4204 
4205     CHK_SV(ctx);
4206 
4207 #if !defined(CONFIG_USER_ONLY)
4208     TCGv t0, t1;
4209     target_ulong mask;
4210 
4211     t0 = tcg_temp_new();
4212     t1 = tcg_temp_new();
4213 
4214     translator_io_start(&ctx->base);
4215 
4216     if (ctx->opcode & 0x00010000) {
4217         /* L=1 form only updates EE and RI */
4218         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4219     } else {
4220         /* mtmsrd does not alter HV, S, ME, or LE */
4221         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4222                  (1ULL << MSR_HV));
4223         /*
4224          * XXX: we need to update nip before the store if we enter
4225          *      power saving mode, we will exit the loop directly from
4226          *      ppc_store_msr
4227          */
4228         gen_update_nip(ctx, ctx->base.pc_next);
4229     }
4230 
4231     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4232     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4233     tcg_gen_or_tl(t0, t0, t1);
4234 
4235     gen_helper_store_msr(tcg_env, t0);
4236 
4237     /* Must stop the translation as machine state (may have) changed */
4238     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4239 #endif /* !defined(CONFIG_USER_ONLY) */
4240 }
4241 #endif /* defined(TARGET_PPC64) */
4242 
4243 static void gen_mtmsr(DisasContext *ctx)
4244 {
4245     CHK_SV(ctx);
4246 
4247 #if !defined(CONFIG_USER_ONLY)
4248     TCGv t0, t1;
4249     target_ulong mask = 0xFFFFFFFF;
4250 
4251     t0 = tcg_temp_new();
4252     t1 = tcg_temp_new();
4253 
4254     translator_io_start(&ctx->base);
4255     if (ctx->opcode & 0x00010000) {
4256         /* L=1 form only updates EE and RI */
4257         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
4258     } else {
4259         /* mtmsr does not alter S, ME, or LE */
4260         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
4261 
4262         /*
4263          * XXX: we need to update nip before the store if we enter
4264          *      power saving mode, we will exit the loop directly from
4265          *      ppc_store_msr
4266          */
4267         gen_update_nip(ctx, ctx->base.pc_next);
4268     }
4269 
4270     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4271     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4272     tcg_gen_or_tl(t0, t0, t1);
4273 
4274     gen_helper_store_msr(tcg_env, t0);
4275 
4276     /* Must stop the translation as machine state (may have) changed */
4277     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4278 #endif
4279 }
4280 
4281 /* mtspr */
4282 static void gen_mtspr(DisasContext *ctx)
4283 {
4284     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
4285     uint32_t sprn = SPR(ctx->opcode);
4286 
4287 #if defined(CONFIG_USER_ONLY)
4288     write_cb = ctx->spr_cb[sprn].uea_write;
4289 #else
4290     if (ctx->pr) {
4291         write_cb = ctx->spr_cb[sprn].uea_write;
4292     } else if (ctx->hv) {
4293         write_cb = ctx->spr_cb[sprn].hea_write;
4294     } else {
4295         write_cb = ctx->spr_cb[sprn].oea_write;
4296     }
4297 #endif
4298     if (likely(write_cb != NULL)) {
4299         if (likely(write_cb != SPR_NOACCESS)) {
4300             (*write_cb)(ctx, sprn, rS(ctx->opcode));
4301         } else {
4302             /* Privilege exception */
4303             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
4304                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4305                           ctx->cia);
4306             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4307         }
4308     } else {
4309         /* ISA 2.07 defines these as no-ops */
4310         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4311             (sprn >= 808 && sprn <= 811)) {
4312             /* This is a nop */
4313             return;
4314         }
4315 
4316         /* Not defined */
4317         qemu_log_mask(LOG_GUEST_ERROR,
4318                       "Trying to write invalid spr %d (0x%03x) at "
4319                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4320 
4321 
4322         /*
4323          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4324          * generate a priv, a hv emu or a no-op
4325          */
4326         if (sprn & 0x10) {
4327             if (ctx->pr) {
4328                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4329             }
4330         } else {
4331             if (ctx->pr || sprn == 0) {
4332                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4333             }
4334         }
4335     }
4336 }
4337 
4338 #if defined(TARGET_PPC64)
4339 /* setb */
4340 static void gen_setb(DisasContext *ctx)
4341 {
4342     TCGv_i32 t0 = tcg_temp_new_i32();
4343     TCGv_i32 t8 = tcg_constant_i32(8);
4344     TCGv_i32 tm1 = tcg_constant_i32(-1);
4345     int crf = crfS(ctx->opcode);
4346 
4347     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
4348     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
4349     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4350 }
4351 #endif
4352 
4353 /***                         Cache management                              ***/
4354 
4355 /* dcbf */
4356 static void gen_dcbf(DisasContext *ctx)
4357 {
4358     /* XXX: specification says this is treated as a load by the MMU */
4359     TCGv t0;
4360     gen_set_access_type(ctx, ACCESS_CACHE);
4361     t0 = tcg_temp_new();
4362     gen_addr_reg_index(ctx, t0);
4363     gen_qemu_ld8u(ctx, t0, t0);
4364 }
4365 
4366 /* dcbfep (external PID dcbf) */
4367 static void gen_dcbfep(DisasContext *ctx)
4368 {
4369     /* XXX: specification says this is treated as a load by the MMU */
4370     TCGv t0;
4371     CHK_SV(ctx);
4372     gen_set_access_type(ctx, ACCESS_CACHE);
4373     t0 = tcg_temp_new();
4374     gen_addr_reg_index(ctx, t0);
4375     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4376 }
4377 
4378 /* dcbi (Supervisor only) */
4379 static void gen_dcbi(DisasContext *ctx)
4380 {
4381 #if defined(CONFIG_USER_ONLY)
4382     GEN_PRIV(ctx);
4383 #else
4384     TCGv EA, val;
4385 
4386     CHK_SV(ctx);
4387     EA = tcg_temp_new();
4388     gen_set_access_type(ctx, ACCESS_CACHE);
4389     gen_addr_reg_index(ctx, EA);
4390     val = tcg_temp_new();
4391     /* XXX: specification says this should be treated as a store by the MMU */
4392     gen_qemu_ld8u(ctx, val, EA);
4393     gen_qemu_st8(ctx, val, EA);
4394 #endif /* defined(CONFIG_USER_ONLY) */
4395 }
4396 
4397 /* dcdst */
4398 static void gen_dcbst(DisasContext *ctx)
4399 {
4400     /* XXX: specification say this is treated as a load by the MMU */
4401     TCGv t0;
4402     gen_set_access_type(ctx, ACCESS_CACHE);
4403     t0 = tcg_temp_new();
4404     gen_addr_reg_index(ctx, t0);
4405     gen_qemu_ld8u(ctx, t0, t0);
4406 }
4407 
4408 /* dcbstep (dcbstep External PID version) */
4409 static void gen_dcbstep(DisasContext *ctx)
4410 {
4411     /* XXX: specification say this is treated as a load by the MMU */
4412     TCGv t0;
4413     gen_set_access_type(ctx, ACCESS_CACHE);
4414     t0 = tcg_temp_new();
4415     gen_addr_reg_index(ctx, t0);
4416     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4417 }
4418 
4419 /* dcbt */
4420 static void gen_dcbt(DisasContext *ctx)
4421 {
4422     /*
4423      * interpreted as no-op
4424      * XXX: specification say this is treated as a load by the MMU but
4425      *      does not generate any exception
4426      */
4427 }
4428 
4429 /* dcbtep */
4430 static void gen_dcbtep(DisasContext *ctx)
4431 {
4432     /*
4433      * interpreted as no-op
4434      * XXX: specification say this is treated as a load by the MMU but
4435      *      does not generate any exception
4436      */
4437 }
4438 
4439 /* dcbtst */
4440 static void gen_dcbtst(DisasContext *ctx)
4441 {
4442     /*
4443      * interpreted as no-op
4444      * XXX: specification say this is treated as a load by the MMU but
4445      *      does not generate any exception
4446      */
4447 }
4448 
4449 /* dcbtstep */
4450 static void gen_dcbtstep(DisasContext *ctx)
4451 {
4452     /*
4453      * interpreted as no-op
4454      * XXX: specification say this is treated as a load by the MMU but
4455      *      does not generate any exception
4456      */
4457 }
4458 
4459 /* dcbtls */
4460 static void gen_dcbtls(DisasContext *ctx)
4461 {
4462     /* Always fails locking the cache */
4463     TCGv t0 = tcg_temp_new();
4464     gen_load_spr(t0, SPR_Exxx_L1CSR0);
4465     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
4466     gen_store_spr(SPR_Exxx_L1CSR0, t0);
4467 }
4468 
4469 /* dcblc */
4470 static void gen_dcblc(DisasContext *ctx)
4471 {
4472     /*
4473      * interpreted as no-op
4474      */
4475 }
4476 
4477 /* dcbz */
4478 static void gen_dcbz(DisasContext *ctx)
4479 {
4480     TCGv tcgv_addr = tcg_temp_new();
4481 
4482     gen_set_access_type(ctx, ACCESS_CACHE);
4483     gen_addr_reg_index(ctx, tcgv_addr);
4484 
4485 #ifdef TARGET_PPC64
4486     if (ctx->excp_model == POWERPC_EXCP_970 && !(ctx->opcode & 0x00200000)) {
4487         gen_helper_dcbzl(tcg_env, tcgv_addr);
4488         return;
4489     }
4490 #endif
4491 
4492     gen_helper_dcbz(tcg_env, tcgv_addr, tcg_constant_i32(ctx->mem_idx));
4493 }
4494 
4495 /* dcbzep */
4496 static void gen_dcbzep(DisasContext *ctx)
4497 {
4498     TCGv tcgv_addr = tcg_temp_new();
4499 
4500     gen_set_access_type(ctx, ACCESS_CACHE);
4501     gen_addr_reg_index(ctx, tcgv_addr);
4502     gen_helper_dcbz(tcg_env, tcgv_addr, tcg_constant_i32(PPC_TLB_EPID_STORE));
4503 }
4504 
4505 /* dst / dstt */
4506 static void gen_dst(DisasContext *ctx)
4507 {
4508     if (rA(ctx->opcode) == 0) {
4509         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4510     } else {
4511         /* interpreted as no-op */
4512     }
4513 }
4514 
4515 /* dstst /dststt */
4516 static void gen_dstst(DisasContext *ctx)
4517 {
4518     if (rA(ctx->opcode) == 0) {
4519         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4520     } else {
4521         /* interpreted as no-op */
4522     }
4523 
4524 }
4525 
4526 /* dss / dssall */
4527 static void gen_dss(DisasContext *ctx)
4528 {
4529     /* interpreted as no-op */
4530 }
4531 
4532 /* icbi */
4533 static void gen_icbi(DisasContext *ctx)
4534 {
4535     TCGv t0;
4536     gen_set_access_type(ctx, ACCESS_CACHE);
4537     t0 = tcg_temp_new();
4538     gen_addr_reg_index(ctx, t0);
4539     gen_helper_icbi(tcg_env, t0);
4540 }
4541 
4542 /* icbiep */
4543 static void gen_icbiep(DisasContext *ctx)
4544 {
4545     TCGv t0;
4546     gen_set_access_type(ctx, ACCESS_CACHE);
4547     t0 = tcg_temp_new();
4548     gen_addr_reg_index(ctx, t0);
4549     gen_helper_icbiep(tcg_env, t0);
4550 }
4551 
4552 /* Optional: */
4553 /* dcba */
4554 static void gen_dcba(DisasContext *ctx)
4555 {
4556     /*
4557      * interpreted as no-op
4558      * XXX: specification say this is treated as a store by the MMU
4559      *      but does not generate any exception
4560      */
4561 }
4562 
4563 /***                    Segment register manipulation                      ***/
4564 /* Supervisor only: */
4565 
4566 /* mfsr */
4567 static void gen_mfsr(DisasContext *ctx)
4568 {
4569 #if defined(CONFIG_USER_ONLY)
4570     GEN_PRIV(ctx);
4571 #else
4572     TCGv t0;
4573 
4574     CHK_SV(ctx);
4575     t0 = tcg_constant_tl(SR(ctx->opcode));
4576     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4577 #endif /* defined(CONFIG_USER_ONLY) */
4578 }
4579 
4580 /* mfsrin */
4581 static void gen_mfsrin(DisasContext *ctx)
4582 {
4583 #if defined(CONFIG_USER_ONLY)
4584     GEN_PRIV(ctx);
4585 #else
4586     TCGv t0;
4587 
4588     CHK_SV(ctx);
4589     t0 = tcg_temp_new();
4590     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4591     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4592 #endif /* defined(CONFIG_USER_ONLY) */
4593 }
4594 
4595 /* mtsr */
4596 static void gen_mtsr(DisasContext *ctx)
4597 {
4598 #if defined(CONFIG_USER_ONLY)
4599     GEN_PRIV(ctx);
4600 #else
4601     TCGv t0;
4602 
4603     CHK_SV(ctx);
4604     t0 = tcg_constant_tl(SR(ctx->opcode));
4605     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4606 #endif /* defined(CONFIG_USER_ONLY) */
4607 }
4608 
4609 /* mtsrin */
4610 static void gen_mtsrin(DisasContext *ctx)
4611 {
4612 #if defined(CONFIG_USER_ONLY)
4613     GEN_PRIV(ctx);
4614 #else
4615     TCGv t0;
4616     CHK_SV(ctx);
4617 
4618     t0 = tcg_temp_new();
4619     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4620     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rD(ctx->opcode)]);
4621 #endif /* defined(CONFIG_USER_ONLY) */
4622 }
4623 
4624 #if defined(TARGET_PPC64)
4625 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4626 
4627 /* mfsr */
4628 static void gen_mfsr_64b(DisasContext *ctx)
4629 {
4630 #if defined(CONFIG_USER_ONLY)
4631     GEN_PRIV(ctx);
4632 #else
4633     TCGv t0;
4634 
4635     CHK_SV(ctx);
4636     t0 = tcg_constant_tl(SR(ctx->opcode));
4637     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4638 #endif /* defined(CONFIG_USER_ONLY) */
4639 }
4640 
4641 /* mfsrin */
4642 static void gen_mfsrin_64b(DisasContext *ctx)
4643 {
4644 #if defined(CONFIG_USER_ONLY)
4645     GEN_PRIV(ctx);
4646 #else
4647     TCGv t0;
4648 
4649     CHK_SV(ctx);
4650     t0 = tcg_temp_new();
4651     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4652     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4653 #endif /* defined(CONFIG_USER_ONLY) */
4654 }
4655 
4656 /* mtsr */
4657 static void gen_mtsr_64b(DisasContext *ctx)
4658 {
4659 #if defined(CONFIG_USER_ONLY)
4660     GEN_PRIV(ctx);
4661 #else
4662     TCGv t0;
4663 
4664     CHK_SV(ctx);
4665     t0 = tcg_constant_tl(SR(ctx->opcode));
4666     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4667 #endif /* defined(CONFIG_USER_ONLY) */
4668 }
4669 
4670 /* mtsrin */
4671 static void gen_mtsrin_64b(DisasContext *ctx)
4672 {
4673 #if defined(CONFIG_USER_ONLY)
4674     GEN_PRIV(ctx);
4675 #else
4676     TCGv t0;
4677 
4678     CHK_SV(ctx);
4679     t0 = tcg_temp_new();
4680     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4681     gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4682 #endif /* defined(CONFIG_USER_ONLY) */
4683 }
4684 
4685 #endif /* defined(TARGET_PPC64) */
4686 
4687 /***                      Lookaside buffer management                      ***/
4688 /* Optional & supervisor only: */
4689 
4690 /* tlbia */
4691 static void gen_tlbia(DisasContext *ctx)
4692 {
4693 #if defined(CONFIG_USER_ONLY)
4694     GEN_PRIV(ctx);
4695 #else
4696     CHK_HV(ctx);
4697 
4698     gen_helper_tlbia(tcg_env);
4699 #endif  /* defined(CONFIG_USER_ONLY) */
4700 }
4701 
4702 /* tlbsync */
4703 static void gen_tlbsync(DisasContext *ctx)
4704 {
4705 #if defined(CONFIG_USER_ONLY)
4706     GEN_PRIV(ctx);
4707 #else
4708 
4709     if (ctx->gtse) {
4710         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
4711     } else {
4712         CHK_HV(ctx); /* Else hypervisor privileged */
4713     }
4714 
4715     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
4716     if (ctx->insns_flags & PPC_BOOKE) {
4717         gen_check_tlb_flush(ctx, true);
4718     }
4719 #endif /* defined(CONFIG_USER_ONLY) */
4720 }
4721 
4722 /***                              External control                         ***/
4723 /* Optional: */
4724 
4725 /* eciwx */
4726 static void gen_eciwx(DisasContext *ctx)
4727 {
4728     TCGv t0;
4729     /* Should check EAR[E] ! */
4730     gen_set_access_type(ctx, ACCESS_EXT);
4731     t0 = tcg_temp_new();
4732     gen_addr_reg_index(ctx, t0);
4733     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
4734                        DEF_MEMOP(MO_UL | MO_ALIGN));
4735 }
4736 
4737 /* ecowx */
4738 static void gen_ecowx(DisasContext *ctx)
4739 {
4740     TCGv t0;
4741     /* Should check EAR[E] ! */
4742     gen_set_access_type(ctx, ACCESS_EXT);
4743     t0 = tcg_temp_new();
4744     gen_addr_reg_index(ctx, t0);
4745     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
4746                        DEF_MEMOP(MO_UL | MO_ALIGN));
4747 }
4748 
4749 /* 602 - 603 - G2 TLB management */
4750 
4751 /* tlbld */
4752 static void gen_tlbld_6xx(DisasContext *ctx)
4753 {
4754 #if defined(CONFIG_USER_ONLY)
4755     GEN_PRIV(ctx);
4756 #else
4757     CHK_SV(ctx);
4758     gen_helper_6xx_tlbd(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4759 #endif /* defined(CONFIG_USER_ONLY) */
4760 }
4761 
4762 /* tlbli */
4763 static void gen_tlbli_6xx(DisasContext *ctx)
4764 {
4765 #if defined(CONFIG_USER_ONLY)
4766     GEN_PRIV(ctx);
4767 #else
4768     CHK_SV(ctx);
4769     gen_helper_6xx_tlbi(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4770 #endif /* defined(CONFIG_USER_ONLY) */
4771 }
4772 
4773 /* BookE specific instructions */
4774 
4775 /* XXX: not implemented on 440 ? */
4776 static void gen_mfapidi(DisasContext *ctx)
4777 {
4778     /* XXX: TODO */
4779     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4780 }
4781 
4782 /* XXX: not implemented on 440 ? */
4783 static void gen_tlbiva(DisasContext *ctx)
4784 {
4785 #if defined(CONFIG_USER_ONLY)
4786     GEN_PRIV(ctx);
4787 #else
4788     TCGv t0;
4789 
4790     CHK_SV(ctx);
4791     t0 = tcg_temp_new();
4792     gen_addr_reg_index(ctx, t0);
4793     gen_helper_tlbiva(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4794 #endif /* defined(CONFIG_USER_ONLY) */
4795 }
4796 
4797 /* All 405 MAC instructions are translated here */
4798 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
4799                                         int ra, int rb, int rt, int Rc)
4800 {
4801     TCGv t0, t1;
4802 
4803     t0 = tcg_temp_new();
4804     t1 = tcg_temp_new();
4805 
4806     switch (opc3 & 0x0D) {
4807     case 0x05:
4808         /* macchw    - macchw.    - macchwo   - macchwo.   */
4809         /* macchws   - macchws.   - macchwso  - macchwso.  */
4810         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
4811         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
4812         /* mulchw - mulchw. */
4813         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
4814         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
4815         tcg_gen_ext16s_tl(t1, t1);
4816         break;
4817     case 0x04:
4818         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
4819         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
4820         /* mulchwu - mulchwu. */
4821         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
4822         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
4823         tcg_gen_ext16u_tl(t1, t1);
4824         break;
4825     case 0x01:
4826         /* machhw    - machhw.    - machhwo   - machhwo.   */
4827         /* machhws   - machhws.   - machhwso  - machhwso.  */
4828         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
4829         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
4830         /* mulhhw - mulhhw. */
4831         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
4832         tcg_gen_ext16s_tl(t0, t0);
4833         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
4834         tcg_gen_ext16s_tl(t1, t1);
4835         break;
4836     case 0x00:
4837         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
4838         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
4839         /* mulhhwu - mulhhwu. */
4840         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
4841         tcg_gen_ext16u_tl(t0, t0);
4842         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
4843         tcg_gen_ext16u_tl(t1, t1);
4844         break;
4845     case 0x0D:
4846         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
4847         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
4848         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
4849         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
4850         /* mullhw - mullhw. */
4851         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
4852         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
4853         break;
4854     case 0x0C:
4855         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
4856         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
4857         /* mullhwu - mullhwu. */
4858         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
4859         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
4860         break;
4861     }
4862     if (opc2 & 0x04) {
4863         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
4864         tcg_gen_mul_tl(t1, t0, t1);
4865         if (opc2 & 0x02) {
4866             /* nmultiply-and-accumulate (0x0E) */
4867             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
4868         } else {
4869             /* multiply-and-accumulate (0x0C) */
4870             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
4871         }
4872 
4873         if (opc3 & 0x12) {
4874             /* Check overflow and/or saturate */
4875             TCGLabel *l1 = gen_new_label();
4876 
4877             if (opc3 & 0x10) {
4878                 /* Start with XER OV disabled, the most likely case */
4879                 tcg_gen_movi_tl(cpu_ov, 0);
4880             }
4881             if (opc3 & 0x01) {
4882                 /* Signed */
4883                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
4884                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
4885                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
4886                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
4887                 if (opc3 & 0x02) {
4888                     /* Saturate */
4889                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
4890                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
4891                 }
4892             } else {
4893                 /* Unsigned */
4894                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
4895                 if (opc3 & 0x02) {
4896                     /* Saturate */
4897                     tcg_gen_movi_tl(t0, UINT32_MAX);
4898                 }
4899             }
4900             if (opc3 & 0x10) {
4901                 /* Check overflow */
4902                 tcg_gen_movi_tl(cpu_ov, 1);
4903                 tcg_gen_movi_tl(cpu_so, 1);
4904             }
4905             gen_set_label(l1);
4906             tcg_gen_mov_tl(cpu_gpr[rt], t0);
4907         }
4908     } else {
4909         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
4910     }
4911     if (unlikely(Rc) != 0) {
4912         /* Update Rc0 */
4913         gen_set_Rc0(ctx, cpu_gpr[rt]);
4914     }
4915 }
4916 
4917 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
4918 static void glue(gen_, name)(DisasContext *ctx)                               \
4919 {                                                                             \
4920     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
4921                          rD(ctx->opcode), Rc(ctx->opcode));                   \
4922 }
4923 
4924 /* macchw    - macchw.    */
4925 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
4926 /* macchwo   - macchwo.   */
4927 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
4928 /* macchws   - macchws.   */
4929 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
4930 /* macchwso  - macchwso.  */
4931 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
4932 /* macchwsu  - macchwsu.  */
4933 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
4934 /* macchwsuo - macchwsuo. */
4935 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
4936 /* macchwu   - macchwu.   */
4937 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
4938 /* macchwuo  - macchwuo.  */
4939 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
4940 /* machhw    - machhw.    */
4941 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
4942 /* machhwo   - machhwo.   */
4943 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
4944 /* machhws   - machhws.   */
4945 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
4946 /* machhwso  - machhwso.  */
4947 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
4948 /* machhwsu  - machhwsu.  */
4949 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
4950 /* machhwsuo - machhwsuo. */
4951 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
4952 /* machhwu   - machhwu.   */
4953 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
4954 /* machhwuo  - machhwuo.  */
4955 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
4956 /* maclhw    - maclhw.    */
4957 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
4958 /* maclhwo   - maclhwo.   */
4959 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
4960 /* maclhws   - maclhws.   */
4961 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
4962 /* maclhwso  - maclhwso.  */
4963 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
4964 /* maclhwu   - maclhwu.   */
4965 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
4966 /* maclhwuo  - maclhwuo.  */
4967 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
4968 /* maclhwsu  - maclhwsu.  */
4969 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
4970 /* maclhwsuo - maclhwsuo. */
4971 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
4972 /* nmacchw   - nmacchw.   */
4973 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
4974 /* nmacchwo  - nmacchwo.  */
4975 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
4976 /* nmacchws  - nmacchws.  */
4977 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
4978 /* nmacchwso - nmacchwso. */
4979 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
4980 /* nmachhw   - nmachhw.   */
4981 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
4982 /* nmachhwo  - nmachhwo.  */
4983 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
4984 /* nmachhws  - nmachhws.  */
4985 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
4986 /* nmachhwso - nmachhwso. */
4987 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
4988 /* nmaclhw   - nmaclhw.   */
4989 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
4990 /* nmaclhwo  - nmaclhwo.  */
4991 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
4992 /* nmaclhws  - nmaclhws.  */
4993 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
4994 /* nmaclhwso - nmaclhwso. */
4995 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
4996 
4997 /* mulchw  - mulchw.  */
4998 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
4999 /* mulchwu - mulchwu. */
5000 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5001 /* mulhhw  - mulhhw.  */
5002 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5003 /* mulhhwu - mulhhwu. */
5004 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5005 /* mullhw  - mullhw.  */
5006 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5007 /* mullhwu - mullhwu. */
5008 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5009 
5010 /* mfdcr */
5011 static void gen_mfdcr(DisasContext *ctx)
5012 {
5013 #if defined(CONFIG_USER_ONLY)
5014     GEN_PRIV(ctx);
5015 #else
5016     TCGv dcrn;
5017 
5018     CHK_SV(ctx);
5019     dcrn = tcg_constant_tl(SPR(ctx->opcode));
5020     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], tcg_env, dcrn);
5021 #endif /* defined(CONFIG_USER_ONLY) */
5022 }
5023 
5024 /* mtdcr */
5025 static void gen_mtdcr(DisasContext *ctx)
5026 {
5027 #if defined(CONFIG_USER_ONLY)
5028     GEN_PRIV(ctx);
5029 #else
5030     TCGv dcrn;
5031 
5032     CHK_SV(ctx);
5033     dcrn = tcg_constant_tl(SPR(ctx->opcode));
5034     gen_helper_store_dcr(tcg_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5035 #endif /* defined(CONFIG_USER_ONLY) */
5036 }
5037 
5038 /* mfdcrx */
5039 /* XXX: not implemented on 440 ? */
5040 static void gen_mfdcrx(DisasContext *ctx)
5041 {
5042 #if defined(CONFIG_USER_ONLY)
5043     GEN_PRIV(ctx);
5044 #else
5045     CHK_SV(ctx);
5046     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], tcg_env,
5047                         cpu_gpr[rA(ctx->opcode)]);
5048     /* Note: Rc update flag set leads to undefined state of Rc0 */
5049 #endif /* defined(CONFIG_USER_ONLY) */
5050 }
5051 
5052 /* mtdcrx */
5053 /* XXX: not implemented on 440 ? */
5054 static void gen_mtdcrx(DisasContext *ctx)
5055 {
5056 #if defined(CONFIG_USER_ONLY)
5057     GEN_PRIV(ctx);
5058 #else
5059     CHK_SV(ctx);
5060     gen_helper_store_dcr(tcg_env, cpu_gpr[rA(ctx->opcode)],
5061                          cpu_gpr[rS(ctx->opcode)]);
5062     /* Note: Rc update flag set leads to undefined state of Rc0 */
5063 #endif /* defined(CONFIG_USER_ONLY) */
5064 }
5065 
5066 /* dccci */
5067 static void gen_dccci(DisasContext *ctx)
5068 {
5069     CHK_SV(ctx);
5070     /* interpreted as no-op */
5071 }
5072 
5073 /* dcread */
5074 static void gen_dcread(DisasContext *ctx)
5075 {
5076 #if defined(CONFIG_USER_ONLY)
5077     GEN_PRIV(ctx);
5078 #else
5079     TCGv EA, val;
5080 
5081     CHK_SV(ctx);
5082     gen_set_access_type(ctx, ACCESS_CACHE);
5083     EA = tcg_temp_new();
5084     gen_addr_reg_index(ctx, EA);
5085     val = tcg_temp_new();
5086     gen_qemu_ld32u(ctx, val, EA);
5087     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5088 #endif /* defined(CONFIG_USER_ONLY) */
5089 }
5090 
5091 /* icbt */
5092 static void gen_icbt_40x(DisasContext *ctx)
5093 {
5094     /*
5095      * interpreted as no-op
5096      * XXX: specification say this is treated as a load by the MMU but
5097      *      does not generate any exception
5098      */
5099 }
5100 
5101 /* iccci */
5102 static void gen_iccci(DisasContext *ctx)
5103 {
5104     CHK_SV(ctx);
5105     /* interpreted as no-op */
5106 }
5107 
5108 /* icread */
5109 static void gen_icread(DisasContext *ctx)
5110 {
5111     CHK_SV(ctx);
5112     /* interpreted as no-op */
5113 }
5114 
5115 /* rfci (supervisor only) */
5116 static void gen_rfci_40x(DisasContext *ctx)
5117 {
5118 #if defined(CONFIG_USER_ONLY)
5119     GEN_PRIV(ctx);
5120 #else
5121     CHK_SV(ctx);
5122     /* Restore CPU state */
5123     gen_helper_40x_rfci(tcg_env);
5124     ctx->base.is_jmp = DISAS_EXIT;
5125 #endif /* defined(CONFIG_USER_ONLY) */
5126 }
5127 
5128 static void gen_rfci(DisasContext *ctx)
5129 {
5130 #if defined(CONFIG_USER_ONLY)
5131     GEN_PRIV(ctx);
5132 #else
5133     CHK_SV(ctx);
5134     /* Restore CPU state */
5135     gen_helper_rfci(tcg_env);
5136     ctx->base.is_jmp = DISAS_EXIT;
5137 #endif /* defined(CONFIG_USER_ONLY) */
5138 }
5139 
5140 /* BookE specific */
5141 
5142 /* XXX: not implemented on 440 ? */
5143 static void gen_rfdi(DisasContext *ctx)
5144 {
5145 #if defined(CONFIG_USER_ONLY)
5146     GEN_PRIV(ctx);
5147 #else
5148     CHK_SV(ctx);
5149     /* Restore CPU state */
5150     gen_helper_rfdi(tcg_env);
5151     ctx->base.is_jmp = DISAS_EXIT;
5152 #endif /* defined(CONFIG_USER_ONLY) */
5153 }
5154 
5155 /* XXX: not implemented on 440 ? */
5156 static void gen_rfmci(DisasContext *ctx)
5157 {
5158 #if defined(CONFIG_USER_ONLY)
5159     GEN_PRIV(ctx);
5160 #else
5161     CHK_SV(ctx);
5162     /* Restore CPU state */
5163     gen_helper_rfmci(tcg_env);
5164     ctx->base.is_jmp = DISAS_EXIT;
5165 #endif /* defined(CONFIG_USER_ONLY) */
5166 }
5167 
5168 /* TLB management - PowerPC 405 implementation */
5169 
5170 /* tlbre */
5171 static void gen_tlbre_40x(DisasContext *ctx)
5172 {
5173 #if defined(CONFIG_USER_ONLY)
5174     GEN_PRIV(ctx);
5175 #else
5176     CHK_SV(ctx);
5177     switch (rB(ctx->opcode)) {
5178     case 0:
5179         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], tcg_env,
5180                                 cpu_gpr[rA(ctx->opcode)]);
5181         break;
5182     case 1:
5183         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], tcg_env,
5184                                 cpu_gpr[rA(ctx->opcode)]);
5185         break;
5186     default:
5187         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5188         break;
5189     }
5190 #endif /* defined(CONFIG_USER_ONLY) */
5191 }
5192 
5193 /* tlbsx - tlbsx. */
5194 static void gen_tlbsx_40x(DisasContext *ctx)
5195 {
5196 #if defined(CONFIG_USER_ONLY)
5197     GEN_PRIV(ctx);
5198 #else
5199     TCGv t0;
5200 
5201     CHK_SV(ctx);
5202     t0 = tcg_temp_new();
5203     gen_addr_reg_index(ctx, t0);
5204     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
5205     if (Rc(ctx->opcode)) {
5206         TCGLabel *l1 = gen_new_label();
5207         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5208         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5209         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5210         gen_set_label(l1);
5211     }
5212 #endif /* defined(CONFIG_USER_ONLY) */
5213 }
5214 
5215 /* tlbwe */
5216 static void gen_tlbwe_40x(DisasContext *ctx)
5217 {
5218 #if defined(CONFIG_USER_ONLY)
5219     GEN_PRIV(ctx);
5220 #else
5221     CHK_SV(ctx);
5222 
5223     switch (rB(ctx->opcode)) {
5224     case 0:
5225         gen_helper_4xx_tlbwe_hi(tcg_env, cpu_gpr[rA(ctx->opcode)],
5226                                 cpu_gpr[rS(ctx->opcode)]);
5227         break;
5228     case 1:
5229         gen_helper_4xx_tlbwe_lo(tcg_env, cpu_gpr[rA(ctx->opcode)],
5230                                 cpu_gpr[rS(ctx->opcode)]);
5231         break;
5232     default:
5233         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5234         break;
5235     }
5236 #endif /* defined(CONFIG_USER_ONLY) */
5237 }
5238 
5239 /* TLB management - PowerPC 440 implementation */
5240 
5241 /* tlbre */
5242 static void gen_tlbre_440(DisasContext *ctx)
5243 {
5244 #if defined(CONFIG_USER_ONLY)
5245     GEN_PRIV(ctx);
5246 #else
5247     CHK_SV(ctx);
5248 
5249     switch (rB(ctx->opcode)) {
5250     case 0:
5251     case 1:
5252     case 2:
5253         {
5254             TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5255             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], tcg_env,
5256                                  t0, cpu_gpr[rA(ctx->opcode)]);
5257         }
5258         break;
5259     default:
5260         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5261         break;
5262     }
5263 #endif /* defined(CONFIG_USER_ONLY) */
5264 }
5265 
5266 /* tlbsx - tlbsx. */
5267 static void gen_tlbsx_440(DisasContext *ctx)
5268 {
5269 #if defined(CONFIG_USER_ONLY)
5270     GEN_PRIV(ctx);
5271 #else
5272     TCGv t0;
5273 
5274     CHK_SV(ctx);
5275     t0 = tcg_temp_new();
5276     gen_addr_reg_index(ctx, t0);
5277     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
5278     if (Rc(ctx->opcode)) {
5279         TCGLabel *l1 = gen_new_label();
5280         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5281         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5282         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5283         gen_set_label(l1);
5284     }
5285 #endif /* defined(CONFIG_USER_ONLY) */
5286 }
5287 
5288 /* tlbwe */
5289 static void gen_tlbwe_440(DisasContext *ctx)
5290 {
5291 #if defined(CONFIG_USER_ONLY)
5292     GEN_PRIV(ctx);
5293 #else
5294     CHK_SV(ctx);
5295     switch (rB(ctx->opcode)) {
5296     case 0:
5297     case 1:
5298     case 2:
5299         {
5300             TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5301             gen_helper_440_tlbwe(tcg_env, t0, cpu_gpr[rA(ctx->opcode)],
5302                                  cpu_gpr[rS(ctx->opcode)]);
5303         }
5304         break;
5305     default:
5306         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5307         break;
5308     }
5309 #endif /* defined(CONFIG_USER_ONLY) */
5310 }
5311 
5312 /* TLB management - PowerPC BookE 2.06 implementation */
5313 
5314 /* tlbre */
5315 static void gen_tlbre_booke206(DisasContext *ctx)
5316 {
5317  #if defined(CONFIG_USER_ONLY)
5318     GEN_PRIV(ctx);
5319 #else
5320    CHK_SV(ctx);
5321     gen_helper_booke206_tlbre(tcg_env);
5322 #endif /* defined(CONFIG_USER_ONLY) */
5323 }
5324 
5325 /* tlbsx - tlbsx. */
5326 static void gen_tlbsx_booke206(DisasContext *ctx)
5327 {
5328 #if defined(CONFIG_USER_ONLY)
5329     GEN_PRIV(ctx);
5330 #else
5331     TCGv t0;
5332 
5333     CHK_SV(ctx);
5334     if (rA(ctx->opcode)) {
5335         t0 = tcg_temp_new();
5336         tcg_gen_add_tl(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5337     } else {
5338         t0 = cpu_gpr[rB(ctx->opcode)];
5339     }
5340     gen_helper_booke206_tlbsx(tcg_env, t0);
5341 #endif /* defined(CONFIG_USER_ONLY) */
5342 }
5343 
5344 /* tlbwe */
5345 static void gen_tlbwe_booke206(DisasContext *ctx)
5346 {
5347 #if defined(CONFIG_USER_ONLY)
5348     GEN_PRIV(ctx);
5349 #else
5350     CHK_SV(ctx);
5351     gen_helper_booke206_tlbwe(tcg_env);
5352 #endif /* defined(CONFIG_USER_ONLY) */
5353 }
5354 
5355 static void gen_tlbivax_booke206(DisasContext *ctx)
5356 {
5357 #if defined(CONFIG_USER_ONLY)
5358     GEN_PRIV(ctx);
5359 #else
5360     TCGv t0;
5361 
5362     CHK_SV(ctx);
5363     t0 = tcg_temp_new();
5364     gen_addr_reg_index(ctx, t0);
5365     gen_helper_booke206_tlbivax(tcg_env, t0);
5366 #endif /* defined(CONFIG_USER_ONLY) */
5367 }
5368 
5369 static void gen_tlbilx_booke206(DisasContext *ctx)
5370 {
5371 #if defined(CONFIG_USER_ONLY)
5372     GEN_PRIV(ctx);
5373 #else
5374     TCGv t0;
5375 
5376     CHK_SV(ctx);
5377     t0 = tcg_temp_new();
5378     gen_addr_reg_index(ctx, t0);
5379 
5380     switch ((ctx->opcode >> 21) & 0x3) {
5381     case 0:
5382         gen_helper_booke206_tlbilx0(tcg_env, t0);
5383         break;
5384     case 1:
5385         gen_helper_booke206_tlbilx1(tcg_env, t0);
5386         break;
5387     case 3:
5388         gen_helper_booke206_tlbilx3(tcg_env, t0);
5389         break;
5390     default:
5391         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5392         break;
5393     }
5394 #endif /* defined(CONFIG_USER_ONLY) */
5395 }
5396 
5397 /* wrtee */
5398 static void gen_wrtee(DisasContext *ctx)
5399 {
5400 #if defined(CONFIG_USER_ONLY)
5401     GEN_PRIV(ctx);
5402 #else
5403     TCGv t0;
5404 
5405     CHK_SV(ctx);
5406     t0 = tcg_temp_new();
5407     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
5408     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5409     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
5410     gen_ppc_maybe_interrupt(ctx);
5411     /*
5412      * Stop translation to have a chance to raise an exception if we
5413      * just set msr_ee to 1
5414      */
5415     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5416 #endif /* defined(CONFIG_USER_ONLY) */
5417 }
5418 
5419 /* wrteei */
5420 static void gen_wrteei(DisasContext *ctx)
5421 {
5422 #if defined(CONFIG_USER_ONLY)
5423     GEN_PRIV(ctx);
5424 #else
5425     CHK_SV(ctx);
5426     if (ctx->opcode & 0x00008000) {
5427         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
5428         gen_ppc_maybe_interrupt(ctx);
5429         /* Stop translation to have a chance to raise an exception */
5430         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5431     } else {
5432         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5433     }
5434 #endif /* defined(CONFIG_USER_ONLY) */
5435 }
5436 
5437 /* PowerPC 440 specific instructions */
5438 
5439 /* dlmzb */
5440 static void gen_dlmzb(DisasContext *ctx)
5441 {
5442     TCGv_i32 t0 = tcg_constant_i32(Rc(ctx->opcode));
5443     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], tcg_env,
5444                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
5445 }
5446 
5447 /* icbt */
5448 static void gen_icbt_440(DisasContext *ctx)
5449 {
5450     /*
5451      * interpreted as no-op
5452      * XXX: specification say this is treated as a load by the MMU but
5453      *      does not generate any exception
5454      */
5455 }
5456 
5457 static void gen_tbegin(DisasContext *ctx)
5458 {
5459     if (unlikely(!ctx->tm_enabled)) {
5460         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
5461         return;
5462     }
5463     gen_helper_tbegin(tcg_env);
5464 }
5465 
5466 #define GEN_TM_NOOP(name)                                      \
5467 static inline void gen_##name(DisasContext *ctx)               \
5468 {                                                              \
5469     if (unlikely(!ctx->tm_enabled)) {                          \
5470         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
5471         return;                                                \
5472     }                                                          \
5473     /*                                                         \
5474      * Because tbegin always fails in QEMU, these user         \
5475      * space instructions all have a simple implementation:    \
5476      *                                                         \
5477      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
5478      *           = 0b0 || 0b00    || 0b0                       \
5479      */                                                        \
5480     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
5481 }
5482 
5483 GEN_TM_NOOP(tend);
5484 GEN_TM_NOOP(tabort);
5485 GEN_TM_NOOP(tabortwc);
5486 GEN_TM_NOOP(tabortwci);
5487 GEN_TM_NOOP(tabortdc);
5488 GEN_TM_NOOP(tabortdci);
5489 GEN_TM_NOOP(tsr);
5490 
5491 static inline void gen_cp_abort(DisasContext *ctx)
5492 {
5493     /* Do Nothing */
5494 }
5495 
5496 #define GEN_CP_PASTE_NOOP(name)                           \
5497 static inline void gen_##name(DisasContext *ctx)          \
5498 {                                                         \
5499     /*                                                    \
5500      * Generate invalid exception until we have an        \
5501      * implementation of the copy paste facility          \
5502      */                                                   \
5503     gen_invalid(ctx);                                     \
5504 }
5505 
5506 GEN_CP_PASTE_NOOP(copy)
5507 GEN_CP_PASTE_NOOP(paste)
5508 
5509 static void gen_tcheck(DisasContext *ctx)
5510 {
5511     if (unlikely(!ctx->tm_enabled)) {
5512         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
5513         return;
5514     }
5515     /*
5516      * Because tbegin always fails, the tcheck implementation is
5517      * simple:
5518      *
5519      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
5520      *         = 0b1 || 0b00 || 0b0
5521      */
5522     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
5523 }
5524 
5525 #if defined(CONFIG_USER_ONLY)
5526 #define GEN_TM_PRIV_NOOP(name)                                 \
5527 static inline void gen_##name(DisasContext *ctx)               \
5528 {                                                              \
5529     gen_priv_opc(ctx);                                         \
5530 }
5531 
5532 #else
5533 
5534 #define GEN_TM_PRIV_NOOP(name)                                 \
5535 static inline void gen_##name(DisasContext *ctx)               \
5536 {                                                              \
5537     CHK_SV(ctx);                                               \
5538     if (unlikely(!ctx->tm_enabled)) {                          \
5539         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
5540         return;                                                \
5541     }                                                          \
5542     /*                                                         \
5543      * Because tbegin always fails, the implementation is      \
5544      * simple:                                                 \
5545      *                                                         \
5546      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
5547      *         = 0b0 || 0b00 | 0b0                             \
5548      */                                                        \
5549     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
5550 }
5551 
5552 #endif
5553 
5554 GEN_TM_PRIV_NOOP(treclaim);
5555 GEN_TM_PRIV_NOOP(trechkpt);
5556 
5557 static inline void get_fpr(TCGv_i64 dst, int regno)
5558 {
5559     tcg_gen_ld_i64(dst, tcg_env, fpr_offset(regno));
5560 }
5561 
5562 static inline void set_fpr(int regno, TCGv_i64 src)
5563 {
5564     tcg_gen_st_i64(src, tcg_env, fpr_offset(regno));
5565     /*
5566      * Before PowerISA v3.1 the result of doubleword 1 of the VSR
5567      * corresponding to the target FPR was undefined. However,
5568      * most (if not all) real hardware were setting the result to 0.
5569      * Starting at ISA v3.1, the result for doubleword 1 is now defined
5570      * to be 0.
5571      */
5572     tcg_gen_st_i64(tcg_constant_i64(0), tcg_env, vsr64_offset(regno, false));
5573 }
5574 
5575 /*
5576  * Helpers for decodetree used by !function for decoding arguments.
5577  */
5578 static int times_2(DisasContext *ctx, int x)
5579 {
5580     return x * 2;
5581 }
5582 
5583 static int times_4(DisasContext *ctx, int x)
5584 {
5585     return x * 4;
5586 }
5587 
5588 static int times_16(DisasContext *ctx, int x)
5589 {
5590     return x * 16;
5591 }
5592 
5593 static int64_t dw_compose_ea(DisasContext *ctx, int x)
5594 {
5595     return deposit64(0xfffffffffffffe00, 3, 6, x);
5596 }
5597 
5598 /*
5599  * Helpers for trans_* functions to check for specific insns flags.
5600  * Use token pasting to ensure that we use the proper flag with the
5601  * proper variable.
5602  */
5603 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
5604     do {                                                \
5605         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
5606             return false;                               \
5607         }                                               \
5608     } while (0)
5609 
5610 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
5611     do {                                                \
5612         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
5613             return false;                               \
5614         }                                               \
5615     } while (0)
5616 
5617 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
5618 #if TARGET_LONG_BITS == 32
5619 # define REQUIRE_64BIT(CTX)  return false
5620 #else
5621 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
5622 #endif
5623 
5624 #define REQUIRE_VECTOR(CTX)                             \
5625     do {                                                \
5626         if (unlikely(!(CTX)->altivec_enabled)) {        \
5627             gen_exception((CTX), POWERPC_EXCP_VPU);     \
5628             return true;                                \
5629         }                                               \
5630     } while (0)
5631 
5632 #define REQUIRE_VSX(CTX)                                \
5633     do {                                                \
5634         if (unlikely(!(CTX)->vsx_enabled)) {            \
5635             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
5636             return true;                                \
5637         }                                               \
5638     } while (0)
5639 
5640 #define REQUIRE_FPU(ctx)                                \
5641     do {                                                \
5642         if (unlikely(!(ctx)->fpu_enabled)) {            \
5643             gen_exception((ctx), POWERPC_EXCP_FPU);     \
5644             return true;                                \
5645         }                                               \
5646     } while (0)
5647 
5648 #if !defined(CONFIG_USER_ONLY)
5649 #define REQUIRE_SV(CTX)             \
5650     do {                            \
5651         if (unlikely((CTX)->pr)) {  \
5652             gen_priv_opc(CTX);      \
5653             return true;            \
5654         }                           \
5655     } while (0)
5656 
5657 #define REQUIRE_HV(CTX)                             \
5658     do {                                            \
5659         if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
5660             gen_priv_opc(CTX);                      \
5661             return true;                            \
5662         }                                           \
5663     } while (0)
5664 #else
5665 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
5666 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
5667 #endif
5668 
5669 /*
5670  * Helpers for implementing sets of trans_* functions.
5671  * Defer the implementation of NAME to FUNC, with optional extra arguments.
5672  */
5673 #define TRANS(NAME, FUNC, ...) \
5674     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5675     { return FUNC(ctx, a, __VA_ARGS__); }
5676 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
5677     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5678     {                                                          \
5679         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
5680         return FUNC(ctx, a, __VA_ARGS__);                      \
5681     }
5682 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
5683     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5684     {                                                          \
5685         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
5686         return FUNC(ctx, a, __VA_ARGS__);                      \
5687     }
5688 
5689 #define TRANS64(NAME, FUNC, ...) \
5690     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5691     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
5692 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
5693     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5694     {                                                          \
5695         REQUIRE_64BIT(ctx);                                    \
5696         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
5697         return FUNC(ctx, a, __VA_ARGS__);                      \
5698     }
5699 
5700 /* TODO: More TRANS* helpers for extra insn_flags checks. */
5701 
5702 
5703 #include "decode-insn32.c.inc"
5704 #include "decode-insn64.c.inc"
5705 #include "power8-pmu-regs.c.inc"
5706 
5707 /*
5708  * Incorporate CIA into the constant when R=1.
5709  * Validate that when R=1, RA=0.
5710  */
5711 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
5712 {
5713     d->rt = a->rt;
5714     d->ra = a->ra;
5715     d->si = a->si;
5716     if (a->r) {
5717         if (unlikely(a->ra != 0)) {
5718             gen_invalid(ctx);
5719             return false;
5720         }
5721         d->si += ctx->cia;
5722     }
5723     return true;
5724 }
5725 
5726 #include "translate/fixedpoint-impl.c.inc"
5727 
5728 #include "translate/fp-impl.c.inc"
5729 
5730 #include "translate/vmx-impl.c.inc"
5731 
5732 #include "translate/vsx-impl.c.inc"
5733 
5734 #include "translate/dfp-impl.c.inc"
5735 
5736 #include "translate/spe-impl.c.inc"
5737 
5738 #include "translate/branch-impl.c.inc"
5739 
5740 #include "translate/processor-ctrl-impl.c.inc"
5741 
5742 #include "translate/storage-ctrl-impl.c.inc"
5743 
5744 #include "translate/misc-impl.c.inc"
5745 
5746 #include "translate/bhrb-impl.c.inc"
5747 
5748 /* Handles lfdp */
5749 static void gen_dform39(DisasContext *ctx)
5750 {
5751     if ((ctx->opcode & 0x3) == 0) {
5752         if (ctx->insns_flags2 & PPC2_ISA205) {
5753             return gen_lfdp(ctx);
5754         }
5755     }
5756     return gen_invalid(ctx);
5757 }
5758 
5759 /* Handles stfdp */
5760 static void gen_dform3D(DisasContext *ctx)
5761 {
5762     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
5763         /* stfdp */
5764         if (ctx->insns_flags2 & PPC2_ISA205) {
5765             return gen_stfdp(ctx);
5766         }
5767     }
5768     return gen_invalid(ctx);
5769 }
5770 
5771 #if defined(TARGET_PPC64)
5772 /* brd */
5773 static void gen_brd(DisasContext *ctx)
5774 {
5775     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5776 }
5777 
5778 /* brw */
5779 static void gen_brw(DisasContext *ctx)
5780 {
5781     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5782     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
5783 
5784 }
5785 
5786 /* brh */
5787 static void gen_brh(DisasContext *ctx)
5788 {
5789     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
5790     TCGv_i64 t1 = tcg_temp_new_i64();
5791     TCGv_i64 t2 = tcg_temp_new_i64();
5792 
5793     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
5794     tcg_gen_and_i64(t2, t1, mask);
5795     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
5796     tcg_gen_shli_i64(t1, t1, 8);
5797     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
5798 }
5799 #endif
5800 
5801 static opcode_t opcodes[] = {
5802 #if defined(TARGET_PPC64)
5803 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
5804 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
5805 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
5806 #endif
5807 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
5808 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
5809 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
5810 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
5811 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5812 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5813 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5814 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
5815 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
5816 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
5817 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
5818 #if defined(TARGET_PPC64)
5819 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
5820 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
5821 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
5822 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
5823 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
5824 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
5825                PPC_NONE, PPC2_ISA300),
5826 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
5827                PPC_NONE, PPC2_ISA300),
5828 #endif
5829 /* handles lfdp, lxsd, lxssp */
5830 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
5831 /* handles stfdp, stxsd, stxssp */
5832 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
5833 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5834 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5835 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
5836 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
5837 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
5838 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
5839 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
5840 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5841 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5842 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
5843 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
5844 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
5845 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5846 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5847 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
5848 #if defined(TARGET_PPC64)
5849 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
5850 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
5851 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
5852 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
5853 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
5854 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
5855 #endif
5856 /* ISA v3.0 changed the extended opcode from 62 to 30 */
5857 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
5858 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
5859 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
5860 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
5861 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
5862 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
5863 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
5864 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
5865 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
5866 #if defined(TARGET_PPC64)
5867 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
5868 #if !defined(CONFIG_USER_ONLY)
5869 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
5870 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
5871 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
5872 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
5873 #endif
5874 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
5875 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5876 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5877 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5878 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5879 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
5880 #endif
5881 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
5882 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
5883 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
5884 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
5885 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
5886 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
5887 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
5888 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
5889 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
5890 #if defined(TARGET_PPC64)
5891 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
5892 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
5893 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
5894 #endif
5895 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
5896 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
5897 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
5898 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
5899 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
5900 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
5901 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
5902 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
5903 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
5904 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
5905 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
5906 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
5907 GEN_HANDLER_E(dcblc, 0x1F, 0x06, 0x0c, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
5908 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
5909 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
5910 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
5911 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
5912 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
5913 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
5914 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
5915 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
5916 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
5917 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
5918 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
5919 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
5920 #if defined(TARGET_PPC64)
5921 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
5922 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
5923              PPC_SEGMENT_64B),
5924 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
5925 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
5926              PPC_SEGMENT_64B),
5927 #endif
5928 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
5929 /*
5930  * XXX Those instructions will need to be handled differently for
5931  * different ISA versions
5932  */
5933 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
5934 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
5935 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
5936 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
5937 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
5938 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
5939 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
5940 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
5941 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
5942 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
5943 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
5944 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
5945 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
5946 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
5947 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
5948 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
5949 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
5950 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
5951 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
5952 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
5953 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
5954 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
5955 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
5956 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
5957 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
5958 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
5959 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
5960                PPC_NONE, PPC2_BOOKE206),
5961 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
5962                PPC_NONE, PPC2_BOOKE206),
5963 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
5964                PPC_NONE, PPC2_BOOKE206),
5965 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
5966                PPC_NONE, PPC2_BOOKE206),
5967 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
5968                PPC_NONE, PPC2_BOOKE206),
5969 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
5970 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
5971 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
5972 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
5973                PPC_BOOKE, PPC2_BOOKE206),
5974 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
5975              PPC_440_SPEC),
5976 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
5977 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
5978 
5979 #if defined(TARGET_PPC64)
5980 #undef GEN_PPC64_R2
5981 #undef GEN_PPC64_R4
5982 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
5983 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
5984 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
5985              PPC_64B)
5986 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
5987 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
5988 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
5989              PPC_64B),                                                        \
5990 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
5991              PPC_64B),                                                        \
5992 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
5993              PPC_64B)
5994 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
5995 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
5996 GEN_PPC64_R4(rldic, 0x1E, 0x04),
5997 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
5998 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
5999 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
6000 #endif
6001 
6002 #undef GEN_LDX_E
6003 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
6004 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
6005 
6006 #if defined(TARGET_PPC64)
6007 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
6008 
6009 /* HV/P7 and later only */
6010 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
6011 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
6012 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
6013 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
6014 #endif
6015 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
6016 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
6017 
6018 /* External PID based load */
6019 #undef GEN_LDEPX
6020 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
6021 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6022               0x00000001, PPC_NONE, PPC2_BOOKE206),
6023 
6024 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
6025 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
6026 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
6027 #if defined(TARGET_PPC64)
6028 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
6029 #endif
6030 
6031 #undef GEN_STX_E
6032 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
6033 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
6034 
6035 #if defined(TARGET_PPC64)
6036 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
6037 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
6038 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
6039 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
6040 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
6041 #endif
6042 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
6043 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
6044 
6045 #undef GEN_STEPX
6046 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
6047 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6048               0x00000001, PPC_NONE, PPC2_BOOKE206),
6049 
6050 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
6051 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
6052 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
6053 #if defined(TARGET_PPC64)
6054 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
6055 #endif
6056 
6057 #undef GEN_CRLOGIC
6058 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
6059 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
6060 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
6061 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
6062 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
6063 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
6064 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
6065 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
6066 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
6067 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
6068 
6069 #undef GEN_MAC_HANDLER
6070 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
6071 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
6072 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
6073 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
6074 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
6075 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
6076 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
6077 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
6078 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
6079 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
6080 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
6081 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
6082 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
6083 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
6084 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
6085 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
6086 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
6087 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
6088 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
6089 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
6090 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
6091 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
6092 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
6093 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
6094 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
6095 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
6096 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
6097 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
6098 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
6099 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
6100 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
6101 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
6102 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
6103 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
6104 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
6105 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
6106 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
6107 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
6108 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
6109 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
6110 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
6111 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
6112 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
6113 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
6114 
6115 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
6116                PPC_NONE, PPC2_TM),
6117 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
6118                PPC_NONE, PPC2_TM),
6119 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
6120                PPC_NONE, PPC2_TM),
6121 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
6122                PPC_NONE, PPC2_TM),
6123 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
6124                PPC_NONE, PPC2_TM),
6125 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
6126                PPC_NONE, PPC2_TM),
6127 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
6128                PPC_NONE, PPC2_TM),
6129 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
6130                PPC_NONE, PPC2_TM),
6131 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
6132                PPC_NONE, PPC2_TM),
6133 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
6134                PPC_NONE, PPC2_TM),
6135 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
6136                PPC_NONE, PPC2_TM),
6137 
6138 #include "translate/fp-ops.c.inc"
6139 
6140 #include "translate/vmx-ops.c.inc"
6141 
6142 #include "translate/vsx-ops.c.inc"
6143 
6144 #include "translate/spe-ops.c.inc"
6145 };
6146 
6147 /*****************************************************************************/
6148 /* Opcode types */
6149 enum {
6150     PPC_DIRECT   = 0, /* Opcode routine        */
6151     PPC_INDIRECT = 1, /* Indirect opcode table */
6152 };
6153 
6154 #define PPC_OPCODE_MASK 0x3
6155 
6156 static inline int is_indirect_opcode(void *handler)
6157 {
6158     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
6159 }
6160 
6161 static inline opc_handler_t **ind_table(void *handler)
6162 {
6163     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
6164 }
6165 
6166 /* Instruction table creation */
6167 /* Opcodes tables creation */
6168 static void fill_new_table(opc_handler_t **table, int len)
6169 {
6170     int i;
6171 
6172     for (i = 0; i < len; i++) {
6173         table[i] = &invalid_handler;
6174     }
6175 }
6176 
6177 static int create_new_table(opc_handler_t **table, unsigned char idx)
6178 {
6179     opc_handler_t **tmp;
6180 
6181     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
6182     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
6183     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
6184 
6185     return 0;
6186 }
6187 
6188 static int insert_in_table(opc_handler_t **table, unsigned char idx,
6189                             opc_handler_t *handler)
6190 {
6191     if (table[idx] != &invalid_handler) {
6192         return -1;
6193     }
6194     table[idx] = handler;
6195 
6196     return 0;
6197 }
6198 
6199 static int register_direct_insn(opc_handler_t **ppc_opcodes,
6200                                 unsigned char idx, opc_handler_t *handler)
6201 {
6202     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
6203         printf("*** ERROR: opcode %02x already assigned in main "
6204                "opcode table\n", idx);
6205         return -1;
6206     }
6207 
6208     return 0;
6209 }
6210 
6211 static int register_ind_in_table(opc_handler_t **table,
6212                                  unsigned char idx1, unsigned char idx2,
6213                                  opc_handler_t *handler)
6214 {
6215     if (table[idx1] == &invalid_handler) {
6216         if (create_new_table(table, idx1) < 0) {
6217             printf("*** ERROR: unable to create indirect table "
6218                    "idx=%02x\n", idx1);
6219             return -1;
6220         }
6221     } else {
6222         if (!is_indirect_opcode(table[idx1])) {
6223             printf("*** ERROR: idx %02x already assigned to a direct "
6224                    "opcode\n", idx1);
6225             return -1;
6226         }
6227     }
6228     if (handler != NULL &&
6229         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
6230         printf("*** ERROR: opcode %02x already assigned in "
6231                "opcode table %02x\n", idx2, idx1);
6232         return -1;
6233     }
6234 
6235     return 0;
6236 }
6237 
6238 static int register_ind_insn(opc_handler_t **ppc_opcodes,
6239                              unsigned char idx1, unsigned char idx2,
6240                              opc_handler_t *handler)
6241 {
6242     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
6243 }
6244 
6245 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
6246                                 unsigned char idx1, unsigned char idx2,
6247                                 unsigned char idx3, opc_handler_t *handler)
6248 {
6249     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6250         printf("*** ERROR: unable to join indirect table idx "
6251                "[%02x-%02x]\n", idx1, idx2);
6252         return -1;
6253     }
6254     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
6255                               handler) < 0) {
6256         printf("*** ERROR: unable to insert opcode "
6257                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6258         return -1;
6259     }
6260 
6261     return 0;
6262 }
6263 
6264 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
6265                                  unsigned char idx1, unsigned char idx2,
6266                                  unsigned char idx3, unsigned char idx4,
6267                                  opc_handler_t *handler)
6268 {
6269     opc_handler_t **table;
6270 
6271     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6272         printf("*** ERROR: unable to join indirect table idx "
6273                "[%02x-%02x]\n", idx1, idx2);
6274         return -1;
6275     }
6276     table = ind_table(ppc_opcodes[idx1]);
6277     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
6278         printf("*** ERROR: unable to join 2nd-level indirect table idx "
6279                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6280         return -1;
6281     }
6282     table = ind_table(table[idx2]);
6283     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
6284         printf("*** ERROR: unable to insert opcode "
6285                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
6286         return -1;
6287     }
6288     return 0;
6289 }
6290 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
6291 {
6292     if (insn->opc2 != 0xFF) {
6293         if (insn->opc3 != 0xFF) {
6294             if (insn->opc4 != 0xFF) {
6295                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
6296                                           insn->opc3, insn->opc4,
6297                                           &insn->handler) < 0) {
6298                     return -1;
6299                 }
6300             } else {
6301                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
6302                                          insn->opc3, &insn->handler) < 0) {
6303                     return -1;
6304                 }
6305             }
6306         } else {
6307             if (register_ind_insn(ppc_opcodes, insn->opc1,
6308                                   insn->opc2, &insn->handler) < 0) {
6309                 return -1;
6310             }
6311         }
6312     } else {
6313         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
6314             return -1;
6315         }
6316     }
6317 
6318     return 0;
6319 }
6320 
6321 static int test_opcode_table(opc_handler_t **table, int len)
6322 {
6323     int i, count, tmp;
6324 
6325     for (i = 0, count = 0; i < len; i++) {
6326         /* Consistency fixup */
6327         if (table[i] == NULL) {
6328             table[i] = &invalid_handler;
6329         }
6330         if (table[i] != &invalid_handler) {
6331             if (is_indirect_opcode(table[i])) {
6332                 tmp = test_opcode_table(ind_table(table[i]),
6333                     PPC_CPU_INDIRECT_OPCODES_LEN);
6334                 if (tmp == 0) {
6335                     g_free(table[i]);
6336                     table[i] = &invalid_handler;
6337                 } else {
6338                     count++;
6339                 }
6340             } else {
6341                 count++;
6342             }
6343         }
6344     }
6345 
6346     return count;
6347 }
6348 
6349 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
6350 {
6351     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
6352         printf("*** WARNING: no opcode defined !\n");
6353     }
6354 }
6355 
6356 /*****************************************************************************/
6357 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
6358 {
6359     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
6360     opcode_t *opc;
6361 
6362     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
6363     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
6364         if (((opc->handler.type & pcc->insns_flags) != 0) ||
6365             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
6366             if (register_insn(cpu->opcodes, opc) < 0) {
6367                 error_setg(errp, "ERROR initializing PowerPC instruction "
6368                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
6369                            opc->opc3);
6370                 return;
6371             }
6372         }
6373     }
6374     fix_opcode_tables(cpu->opcodes);
6375     fflush(stdout);
6376     fflush(stderr);
6377 }
6378 
6379 void destroy_ppc_opcodes(PowerPCCPU *cpu)
6380 {
6381     opc_handler_t **table, **table_2;
6382     int i, j, k;
6383 
6384     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
6385         if (cpu->opcodes[i] == &invalid_handler) {
6386             continue;
6387         }
6388         if (is_indirect_opcode(cpu->opcodes[i])) {
6389             table = ind_table(cpu->opcodes[i]);
6390             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
6391                 if (table[j] == &invalid_handler) {
6392                     continue;
6393                 }
6394                 if (is_indirect_opcode(table[j])) {
6395                     table_2 = ind_table(table[j]);
6396                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
6397                         if (table_2[k] != &invalid_handler &&
6398                             is_indirect_opcode(table_2[k])) {
6399                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
6400                                                      ~PPC_INDIRECT));
6401                         }
6402                     }
6403                     g_free((opc_handler_t *)((uintptr_t)table[j] &
6404                                              ~PPC_INDIRECT));
6405                 }
6406             }
6407             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
6408                 ~PPC_INDIRECT));
6409         }
6410     }
6411 }
6412 
6413 int ppc_fixup_cpu(PowerPCCPU *cpu)
6414 {
6415     CPUPPCState *env = &cpu->env;
6416 
6417     /*
6418      * TCG doesn't (yet) emulate some groups of instructions that are
6419      * implemented on some otherwise supported CPUs (e.g. VSX and
6420      * decimal floating point instructions on POWER7).  We remove
6421      * unsupported instruction groups from the cpu state's instruction
6422      * masks and hope the guest can cope.  For at least the pseries
6423      * machine, the unavailability of these instructions can be
6424      * advertised to the guest via the device tree.
6425      */
6426     if ((env->insns_flags & ~PPC_TCG_INSNS)
6427         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
6428         warn_report("Disabling some instructions which are not "
6429                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
6430                     env->insns_flags & ~PPC_TCG_INSNS,
6431                     env->insns_flags2 & ~PPC_TCG_INSNS2);
6432     }
6433     env->insns_flags &= PPC_TCG_INSNS;
6434     env->insns_flags2 &= PPC_TCG_INSNS2;
6435     return 0;
6436 }
6437 
6438 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
6439 {
6440     opc_handler_t **table, *handler;
6441     uint32_t inval;
6442 
6443     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
6444               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6445               ctx->le_mode ? "little" : "big");
6446 
6447     table = cpu->opcodes;
6448     handler = table[opc1(insn)];
6449     if (is_indirect_opcode(handler)) {
6450         table = ind_table(handler);
6451         handler = table[opc2(insn)];
6452         if (is_indirect_opcode(handler)) {
6453             table = ind_table(handler);
6454             handler = table[opc3(insn)];
6455             if (is_indirect_opcode(handler)) {
6456                 table = ind_table(handler);
6457                 handler = table[opc4(insn)];
6458             }
6459         }
6460     }
6461 
6462     /* Is opcode *REALLY* valid ? */
6463     if (unlikely(handler->handler == &gen_invalid)) {
6464         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
6465                       "%02x - %02x - %02x - %02x (%08x) "
6466                       TARGET_FMT_lx "\n",
6467                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6468                       insn, ctx->cia);
6469         return false;
6470     }
6471 
6472     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
6473                  && Rc(insn))) {
6474         inval = handler->inval2;
6475     } else {
6476         inval = handler->inval1;
6477     }
6478 
6479     if (unlikely((insn & inval) != 0)) {
6480         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
6481                       "%02x - %02x - %02x - %02x (%08x) "
6482                       TARGET_FMT_lx "\n", insn & inval,
6483                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6484                       insn, ctx->cia);
6485         return false;
6486     }
6487 
6488     handler->handler(ctx);
6489     return true;
6490 }
6491 
6492 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
6493 {
6494     DisasContext *ctx = container_of(dcbase, DisasContext, base);
6495     CPUPPCState *env = cpu_env(cs);
6496     uint32_t hflags = ctx->base.tb->flags;
6497 
6498     ctx->spr_cb = env->spr_cb;
6499     ctx->pr = (hflags >> HFLAGS_PR) & 1;
6500     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
6501     ctx->dr = (hflags >> HFLAGS_DR) & 1;
6502     ctx->hv = (hflags >> HFLAGS_HV) & 1;
6503     ctx->insns_flags = env->insns_flags;
6504     ctx->insns_flags2 = env->insns_flags2;
6505     ctx->access_type = -1;
6506     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
6507     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
6508     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
6509     ctx->flags = env->flags;
6510 #if defined(TARGET_PPC64)
6511     ctx->excp_model = env->excp_model;
6512     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
6513     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
6514     ctx->has_bhrb = !!(env->flags & POWERPC_FLAG_BHRB);
6515 #endif
6516     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
6517         || env->mmu_model & POWERPC_MMU_64;
6518 
6519     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
6520     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
6521     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
6522     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
6523     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
6524     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
6525     ctx->hr = (hflags >> HFLAGS_HR) & 1;
6526     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
6527     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
6528     ctx->mmcr0_pmcjce = (hflags >> HFLAGS_PMCJCE) & 1;
6529     ctx->pmc_other = (hflags >> HFLAGS_PMC_OTHER) & 1;
6530     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
6531     ctx->bhrb_enable = (hflags >> HFLAGS_BHRB_ENABLE) & 1;
6532 
6533     ctx->singlestep_enabled = 0;
6534     if ((hflags >> HFLAGS_SE) & 1) {
6535         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
6536         ctx->base.max_insns = 1;
6537     }
6538     if ((hflags >> HFLAGS_BE) & 1) {
6539         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
6540     }
6541 }
6542 
6543 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
6544 {
6545 }
6546 
6547 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
6548 {
6549     tcg_gen_insn_start(dcbase->pc_next);
6550 }
6551 
6552 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
6553 {
6554     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
6555     return opc1(insn) == 1;
6556 }
6557 
6558 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
6559 {
6560     DisasContext *ctx = container_of(dcbase, DisasContext, base);
6561     PowerPCCPU *cpu = POWERPC_CPU(cs);
6562     CPUPPCState *env = cpu_env(cs);
6563     target_ulong pc;
6564     uint32_t insn;
6565     bool ok;
6566 
6567     LOG_DISAS("----------------\n");
6568     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
6569               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
6570 
6571     ctx->cia = pc = ctx->base.pc_next;
6572     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
6573     ctx->base.pc_next = pc += 4;
6574 
6575     if (!is_prefix_insn(ctx, insn)) {
6576         ctx->opcode = insn;
6577         ok = (decode_insn32(ctx, insn) ||
6578               decode_legacy(cpu, ctx, insn));
6579     } else if ((pc & 63) == 0) {
6580         /*
6581          * Power v3.1, section 1.9 Exceptions:
6582          * attempt to execute a prefixed instruction that crosses a
6583          * 64-byte address boundary (system alignment error).
6584          */
6585         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
6586         ok = true;
6587     } else {
6588         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
6589                                              need_byteswap(ctx));
6590         ctx->base.pc_next = pc += 4;
6591         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
6592     }
6593     if (!ok) {
6594         gen_invalid(ctx);
6595     }
6596 
6597     /* End the TB when crossing a page boundary. */
6598     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
6599         ctx->base.is_jmp = DISAS_TOO_MANY;
6600     }
6601 }
6602 
6603 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
6604 {
6605     DisasContext *ctx = container_of(dcbase, DisasContext, base);
6606     DisasJumpType is_jmp = ctx->base.is_jmp;
6607     target_ulong nip = ctx->base.pc_next;
6608 
6609     if (is_jmp == DISAS_NORETURN) {
6610         /* We have already exited the TB. */
6611         return;
6612     }
6613 
6614     /* Honor single stepping. */
6615     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)) {
6616         bool rfi_type = false;
6617 
6618         switch (is_jmp) {
6619         case DISAS_TOO_MANY:
6620         case DISAS_EXIT_UPDATE:
6621         case DISAS_CHAIN_UPDATE:
6622             gen_update_nip(ctx, nip);
6623             break;
6624         case DISAS_EXIT:
6625         case DISAS_CHAIN:
6626             /*
6627              * This is a heuristic, to put it kindly. The rfi class of
6628              * instructions are among the few outside branches that change
6629              * NIP without taking an interrupt. Single step trace interrupts
6630              * do not fire on completion of these instructions.
6631              */
6632             rfi_type = true;
6633             break;
6634         default:
6635             g_assert_not_reached();
6636         }
6637 
6638         gen_debug_exception(ctx, rfi_type);
6639         return;
6640     }
6641 
6642     switch (is_jmp) {
6643     case DISAS_TOO_MANY:
6644         if (use_goto_tb(ctx, nip)) {
6645             pmu_count_insns(ctx);
6646             tcg_gen_goto_tb(0);
6647             gen_update_nip(ctx, nip);
6648             tcg_gen_exit_tb(ctx->base.tb, 0);
6649             break;
6650         }
6651         /* fall through */
6652     case DISAS_CHAIN_UPDATE:
6653         gen_update_nip(ctx, nip);
6654         /* fall through */
6655     case DISAS_CHAIN:
6656         /*
6657          * tcg_gen_lookup_and_goto_ptr will exit the TB if
6658          * CF_NO_GOTO_PTR is set. Count insns now.
6659          */
6660         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
6661             pmu_count_insns(ctx);
6662         }
6663 
6664         tcg_gen_lookup_and_goto_ptr();
6665         break;
6666 
6667     case DISAS_EXIT_UPDATE:
6668         gen_update_nip(ctx, nip);
6669         /* fall through */
6670     case DISAS_EXIT:
6671         pmu_count_insns(ctx);
6672         tcg_gen_exit_tb(NULL, 0);
6673         break;
6674 
6675     default:
6676         g_assert_not_reached();
6677     }
6678 }
6679 
6680 static const TranslatorOps ppc_tr_ops = {
6681     .init_disas_context = ppc_tr_init_disas_context,
6682     .tb_start           = ppc_tr_tb_start,
6683     .insn_start         = ppc_tr_insn_start,
6684     .translate_insn     = ppc_tr_translate_insn,
6685     .tb_stop            = ppc_tr_tb_stop,
6686 };
6687 
6688 void ppc_translate_code(CPUState *cs, TranslationBlock *tb,
6689                         int *max_insns, vaddr pc, void *host_pc)
6690 {
6691     DisasContext ctx;
6692 
6693     translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
6694 }
6695