xref: /openbmc/qemu/target/ppc/translate.c (revision 080741abc293e79b6e860e2c8d66bfe519090c86)
1 /*
2  *  PowerPC emulation for qemu: main translation routines.
3  *
4  *  Copyright (c) 2003-2007 Jocelyn Mayer
5  *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6  *
7  * This library is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * This library is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19  */
20 
21 #include "qemu/osdep.h"
22 #include "cpu.h"
23 #include "internal.h"
24 #include "disas/disas.h"
25 #include "exec/exec-all.h"
26 #include "tcg/tcg-op.h"
27 #include "tcg/tcg-op-gvec.h"
28 #include "qemu/host-utils.h"
29 #include "qemu/main-loop.h"
30 #include "exec/cpu_ldst.h"
31 
32 #include "exec/helper-proto.h"
33 #include "exec/helper-gen.h"
34 
35 #include "exec/translator.h"
36 #include "exec/log.h"
37 #include "qemu/atomic128.h"
38 #include "spr_common.h"
39 
40 #include "qemu/qemu-print.h"
41 #include "qapi/error.h"
42 
43 #define CPU_SINGLE_STEP 0x1
44 #define CPU_BRANCH_STEP 0x2
45 
46 /* Include definitions for instructions classes and implementations flags */
47 /* #define PPC_DEBUG_DISAS */
48 
49 #ifdef PPC_DEBUG_DISAS
50 #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
51 #else
52 #  define LOG_DISAS(...) do { } while (0)
53 #endif
54 /*****************************************************************************/
55 /* Code translation helpers                                                  */
56 
57 /* global register indexes */
58 static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
59                           + 10 * 4 + 22 * 5 /* SPE GPRh */
60                           + 8 * 5           /* CRF */];
61 static TCGv cpu_gpr[32];
62 static TCGv cpu_gprh[32];
63 static TCGv_i32 cpu_crf[8];
64 static TCGv cpu_nip;
65 static TCGv cpu_msr;
66 static TCGv cpu_ctr;
67 static TCGv cpu_lr;
68 #if defined(TARGET_PPC64)
69 static TCGv cpu_cfar;
70 #endif
71 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
72 static TCGv cpu_reserve;
73 static TCGv cpu_reserve_val;
74 static TCGv cpu_fpscr;
75 static TCGv_i32 cpu_access_type;
76 
77 #include "exec/gen-icount.h"
78 
79 void ppc_translate_init(void)
80 {
81     int i;
82     char *p;
83     size_t cpu_reg_names_size;
84 
85     p = cpu_reg_names;
86     cpu_reg_names_size = sizeof(cpu_reg_names);
87 
88     for (i = 0; i < 8; i++) {
89         snprintf(p, cpu_reg_names_size, "crf%d", i);
90         cpu_crf[i] = tcg_global_mem_new_i32(cpu_env,
91                                             offsetof(CPUPPCState, crf[i]), p);
92         p += 5;
93         cpu_reg_names_size -= 5;
94     }
95 
96     for (i = 0; i < 32; i++) {
97         snprintf(p, cpu_reg_names_size, "r%d", i);
98         cpu_gpr[i] = tcg_global_mem_new(cpu_env,
99                                         offsetof(CPUPPCState, gpr[i]), p);
100         p += (i < 10) ? 3 : 4;
101         cpu_reg_names_size -= (i < 10) ? 3 : 4;
102         snprintf(p, cpu_reg_names_size, "r%dH", i);
103         cpu_gprh[i] = tcg_global_mem_new(cpu_env,
104                                          offsetof(CPUPPCState, gprh[i]), p);
105         p += (i < 10) ? 4 : 5;
106         cpu_reg_names_size -= (i < 10) ? 4 : 5;
107     }
108 
109     cpu_nip = tcg_global_mem_new(cpu_env,
110                                  offsetof(CPUPPCState, nip), "nip");
111 
112     cpu_msr = tcg_global_mem_new(cpu_env,
113                                  offsetof(CPUPPCState, msr), "msr");
114 
115     cpu_ctr = tcg_global_mem_new(cpu_env,
116                                  offsetof(CPUPPCState, ctr), "ctr");
117 
118     cpu_lr = tcg_global_mem_new(cpu_env,
119                                 offsetof(CPUPPCState, lr), "lr");
120 
121 #if defined(TARGET_PPC64)
122     cpu_cfar = tcg_global_mem_new(cpu_env,
123                                   offsetof(CPUPPCState, cfar), "cfar");
124 #endif
125 
126     cpu_xer = tcg_global_mem_new(cpu_env,
127                                  offsetof(CPUPPCState, xer), "xer");
128     cpu_so = tcg_global_mem_new(cpu_env,
129                                 offsetof(CPUPPCState, so), "SO");
130     cpu_ov = tcg_global_mem_new(cpu_env,
131                                 offsetof(CPUPPCState, ov), "OV");
132     cpu_ca = tcg_global_mem_new(cpu_env,
133                                 offsetof(CPUPPCState, ca), "CA");
134     cpu_ov32 = tcg_global_mem_new(cpu_env,
135                                   offsetof(CPUPPCState, ov32), "OV32");
136     cpu_ca32 = tcg_global_mem_new(cpu_env,
137                                   offsetof(CPUPPCState, ca32), "CA32");
138 
139     cpu_reserve = tcg_global_mem_new(cpu_env,
140                                      offsetof(CPUPPCState, reserve_addr),
141                                      "reserve_addr");
142     cpu_reserve_val = tcg_global_mem_new(cpu_env,
143                                      offsetof(CPUPPCState, reserve_val),
144                                      "reserve_val");
145 
146     cpu_fpscr = tcg_global_mem_new(cpu_env,
147                                    offsetof(CPUPPCState, fpscr), "fpscr");
148 
149     cpu_access_type = tcg_global_mem_new_i32(cpu_env,
150                                              offsetof(CPUPPCState, access_type),
151                                              "access_type");
152 }
153 
154 /* internal defines */
155 struct DisasContext {
156     DisasContextBase base;
157     target_ulong cia;  /* current instruction address */
158     uint32_t opcode;
159     /* Routine used to access memory */
160     bool pr, hv, dr, le_mode;
161     bool lazy_tlb_flush;
162     bool need_access_type;
163     int mem_idx;
164     int access_type;
165     /* Translation flags */
166     MemOp default_tcg_memop_mask;
167 #if defined(TARGET_PPC64)
168     bool sf_mode;
169     bool has_cfar;
170 #endif
171     bool fpu_enabled;
172     bool altivec_enabled;
173     bool vsx_enabled;
174     bool spe_enabled;
175     bool tm_enabled;
176     bool gtse;
177     bool hr;
178     bool mmcr0_pmcc0;
179     bool mmcr0_pmcc1;
180     bool pmu_insn_cnt;
181     ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
182     int singlestep_enabled;
183     uint32_t flags;
184     uint64_t insns_flags;
185     uint64_t insns_flags2;
186 };
187 
188 #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
189 #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
190 #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
191 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
192 
193 /* Return true iff byteswap is needed in a scalar memop */
194 static inline bool need_byteswap(const DisasContext *ctx)
195 {
196 #if TARGET_BIG_ENDIAN
197      return ctx->le_mode;
198 #else
199      return !ctx->le_mode;
200 #endif
201 }
202 
203 /* True when active word size < size of target_long.  */
204 #ifdef TARGET_PPC64
205 # define NARROW_MODE(C)  (!(C)->sf_mode)
206 #else
207 # define NARROW_MODE(C)  0
208 #endif
209 
210 struct opc_handler_t {
211     /* invalid bits for instruction 1 (Rc(opcode) == 0) */
212     uint32_t inval1;
213     /* invalid bits for instruction 2 (Rc(opcode) == 1) */
214     uint32_t inval2;
215     /* instruction type */
216     uint64_t type;
217     /* extended instruction type */
218     uint64_t type2;
219     /* handler */
220     void (*handler)(DisasContext *ctx);
221 };
222 
223 /* SPR load/store helpers */
224 static inline void gen_load_spr(TCGv t, int reg)
225 {
226     tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
227 }
228 
229 static inline void gen_store_spr(int reg, TCGv t)
230 {
231     tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg]));
232 }
233 
234 static inline void gen_set_access_type(DisasContext *ctx, int access_type)
235 {
236     if (ctx->need_access_type && ctx->access_type != access_type) {
237         tcg_gen_movi_i32(cpu_access_type, access_type);
238         ctx->access_type = access_type;
239     }
240 }
241 
242 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
243 {
244     if (NARROW_MODE(ctx)) {
245         nip = (uint32_t)nip;
246     }
247     tcg_gen_movi_tl(cpu_nip, nip);
248 }
249 
250 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error)
251 {
252     TCGv_i32 t0, t1;
253 
254     /*
255      * These are all synchronous exceptions, we set the PC back to the
256      * faulting instruction
257      */
258     gen_update_nip(ctx, ctx->cia);
259     t0 = tcg_const_i32(excp);
260     t1 = tcg_const_i32(error);
261     gen_helper_raise_exception_err(cpu_env, t0, t1);
262     tcg_temp_free_i32(t0);
263     tcg_temp_free_i32(t1);
264     ctx->base.is_jmp = DISAS_NORETURN;
265 }
266 
267 static void gen_exception(DisasContext *ctx, uint32_t excp)
268 {
269     TCGv_i32 t0;
270 
271     /*
272      * These are all synchronous exceptions, we set the PC back to the
273      * faulting instruction
274      */
275     gen_update_nip(ctx, ctx->cia);
276     t0 = tcg_const_i32(excp);
277     gen_helper_raise_exception(cpu_env, t0);
278     tcg_temp_free_i32(t0);
279     ctx->base.is_jmp = DISAS_NORETURN;
280 }
281 
282 static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
283                               target_ulong nip)
284 {
285     TCGv_i32 t0;
286 
287     gen_update_nip(ctx, nip);
288     t0 = tcg_const_i32(excp);
289     gen_helper_raise_exception(cpu_env, t0);
290     tcg_temp_free_i32(t0);
291     ctx->base.is_jmp = DISAS_NORETURN;
292 }
293 
294 static void gen_icount_io_start(DisasContext *ctx)
295 {
296     if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) {
297         gen_io_start();
298         /*
299          * An I/O instruction must be last in the TB.
300          * Chain to the next TB, and let the code from gen_tb_start
301          * decide if we need to return to the main loop.
302          * Doing this first also allows this value to be overridden.
303          */
304         ctx->base.is_jmp = DISAS_TOO_MANY;
305     }
306 }
307 
308 #if !defined(CONFIG_USER_ONLY)
309 static void gen_ppc_maybe_interrupt(DisasContext *ctx)
310 {
311     gen_icount_io_start(ctx);
312     gen_helper_ppc_maybe_interrupt(cpu_env);
313 }
314 #endif
315 
316 /*
317  * Tells the caller what is the appropriate exception to generate and prepares
318  * SPR registers for this exception.
319  *
320  * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
321  * POWERPC_EXCP_DEBUG (on BookE).
322  */
323 static uint32_t gen_prep_dbgex(DisasContext *ctx)
324 {
325     if (ctx->flags & POWERPC_FLAG_DE) {
326         target_ulong dbsr = 0;
327         if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
328             dbsr = DBCR0_ICMP;
329         } else {
330             /* Must have been branch */
331             dbsr = DBCR0_BRT;
332         }
333         TCGv t0 = tcg_temp_new();
334         gen_load_spr(t0, SPR_BOOKE_DBSR);
335         tcg_gen_ori_tl(t0, t0, dbsr);
336         gen_store_spr(SPR_BOOKE_DBSR, t0);
337         tcg_temp_free(t0);
338         return POWERPC_EXCP_DEBUG;
339     } else {
340         return POWERPC_EXCP_TRACE;
341     }
342 }
343 
344 static void gen_debug_exception(DisasContext *ctx)
345 {
346     gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx)));
347     ctx->base.is_jmp = DISAS_NORETURN;
348 }
349 
350 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
351 {
352     /* Will be converted to program check if needed */
353     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
354 }
355 
356 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
357 {
358     gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
359 }
360 
361 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
362 {
363     /* Will be converted to program check if needed */
364     gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
365 }
366 
367 /*****************************************************************************/
368 /* SPR READ/WRITE CALLBACKS */
369 
370 void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
371 {
372 #if 0
373     sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
374     printf("ERROR: try to access SPR %d !\n", sprn);
375 #endif
376 }
377 
378 /* #define PPC_DUMP_SPR_ACCESSES */
379 
380 /*
381  * Generic callbacks:
382  * do nothing but store/retrieve spr value
383  */
384 static void spr_load_dump_spr(int sprn)
385 {
386 #ifdef PPC_DUMP_SPR_ACCESSES
387     TCGv_i32 t0 = tcg_const_i32(sprn);
388     gen_helper_load_dump_spr(cpu_env, t0);
389     tcg_temp_free_i32(t0);
390 #endif
391 }
392 
393 void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
394 {
395     gen_load_spr(cpu_gpr[gprn], sprn);
396     spr_load_dump_spr(sprn);
397 }
398 
399 static void spr_store_dump_spr(int sprn)
400 {
401 #ifdef PPC_DUMP_SPR_ACCESSES
402     TCGv_i32 t0 = tcg_const_i32(sprn);
403     gen_helper_store_dump_spr(cpu_env, t0);
404     tcg_temp_free_i32(t0);
405 #endif
406 }
407 
408 void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
409 {
410     gen_store_spr(sprn, cpu_gpr[gprn]);
411     spr_store_dump_spr(sprn);
412 }
413 
414 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
415 {
416     spr_write_generic(ctx, sprn, gprn);
417 
418     /*
419      * SPR_CTRL writes must force a new translation block,
420      * allowing the PMU to calculate the run latch events with
421      * more accuracy.
422      */
423     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
424 }
425 
426 #if !defined(CONFIG_USER_ONLY)
427 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
428 {
429 #ifdef TARGET_PPC64
430     TCGv t0 = tcg_temp_new();
431     tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
432     gen_store_spr(sprn, t0);
433     tcg_temp_free(t0);
434     spr_store_dump_spr(sprn);
435 #else
436     spr_write_generic(ctx, sprn, gprn);
437 #endif
438 }
439 
440 void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
441 {
442     TCGv t0 = tcg_temp_new();
443     TCGv t1 = tcg_temp_new();
444     gen_load_spr(t0, sprn);
445     tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
446     tcg_gen_and_tl(t0, t0, t1);
447     gen_store_spr(sprn, t0);
448     tcg_temp_free(t0);
449     tcg_temp_free(t1);
450 }
451 
452 void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
453 {
454 }
455 
456 #endif
457 
458 /* SPR common to all PowerPC */
459 /* XER */
460 void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
461 {
462     TCGv dst = cpu_gpr[gprn];
463     TCGv t0 = tcg_temp_new();
464     TCGv t1 = tcg_temp_new();
465     TCGv t2 = tcg_temp_new();
466     tcg_gen_mov_tl(dst, cpu_xer);
467     tcg_gen_shli_tl(t0, cpu_so, XER_SO);
468     tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
469     tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
470     tcg_gen_or_tl(t0, t0, t1);
471     tcg_gen_or_tl(dst, dst, t2);
472     tcg_gen_or_tl(dst, dst, t0);
473     if (is_isa300(ctx)) {
474         tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
475         tcg_gen_or_tl(dst, dst, t0);
476         tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
477         tcg_gen_or_tl(dst, dst, t0);
478     }
479     tcg_temp_free(t0);
480     tcg_temp_free(t1);
481     tcg_temp_free(t2);
482 }
483 
484 void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
485 {
486     TCGv src = cpu_gpr[gprn];
487     /* Write all flags, while reading back check for isa300 */
488     tcg_gen_andi_tl(cpu_xer, src,
489                     ~((1u << XER_SO) |
490                       (1u << XER_OV) | (1u << XER_OV32) |
491                       (1u << XER_CA) | (1u << XER_CA32)));
492     tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
493     tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
494     tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
495     tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
496     tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
497 }
498 
499 /* LR */
500 void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
501 {
502     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
503 }
504 
505 void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
506 {
507     tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
508 }
509 
510 /* CFAR */
511 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
512 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
513 {
514     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
515 }
516 
517 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
518 {
519     tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
520 }
521 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
522 
523 /* CTR */
524 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
525 {
526     tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
527 }
528 
529 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
530 {
531     tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
532 }
533 
534 /* User read access to SPR */
535 /* USPRx */
536 /* UMMCRx */
537 /* UPMCx */
538 /* USIA */
539 /* UDECR */
540 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
541 {
542     gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
543 }
544 
545 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
546 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
547 {
548     gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
549 }
550 #endif
551 
552 /* SPR common to all non-embedded PowerPC */
553 /* DECR */
554 #if !defined(CONFIG_USER_ONLY)
555 void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
556 {
557     gen_icount_io_start(ctx);
558     gen_helper_load_decr(cpu_gpr[gprn], cpu_env);
559 }
560 
561 void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
562 {
563     gen_icount_io_start(ctx);
564     gen_helper_store_decr(cpu_env, cpu_gpr[gprn]);
565 }
566 #endif
567 
568 /* SPR common to all non-embedded PowerPC, except 601 */
569 /* Time base */
570 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
571 {
572     gen_icount_io_start(ctx);
573     gen_helper_load_tbl(cpu_gpr[gprn], cpu_env);
574 }
575 
576 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
577 {
578     gen_icount_io_start(ctx);
579     gen_helper_load_tbu(cpu_gpr[gprn], cpu_env);
580 }
581 
582 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
583 {
584     gen_helper_load_atbl(cpu_gpr[gprn], cpu_env);
585 }
586 
587 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
588 {
589     gen_helper_load_atbu(cpu_gpr[gprn], cpu_env);
590 }
591 
592 #if !defined(CONFIG_USER_ONLY)
593 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
594 {
595     gen_icount_io_start(ctx);
596     gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]);
597 }
598 
599 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
600 {
601     gen_icount_io_start(ctx);
602     gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]);
603 }
604 
605 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
606 {
607     gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]);
608 }
609 
610 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
611 {
612     gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]);
613 }
614 
615 #if defined(TARGET_PPC64)
616 void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
617 {
618     gen_icount_io_start(ctx);
619     gen_helper_load_purr(cpu_gpr[gprn], cpu_env);
620 }
621 
622 void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
623 {
624     gen_icount_io_start(ctx);
625     gen_helper_store_purr(cpu_env, cpu_gpr[gprn]);
626 }
627 
628 /* HDECR */
629 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
630 {
631     gen_icount_io_start(ctx);
632     gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env);
633 }
634 
635 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
636 {
637     gen_icount_io_start(ctx);
638     gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]);
639 }
640 
641 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
642 {
643     gen_icount_io_start(ctx);
644     gen_helper_load_vtb(cpu_gpr[gprn], cpu_env);
645 }
646 
647 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
648 {
649     gen_icount_io_start(ctx);
650     gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]);
651 }
652 
653 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
654 {
655     gen_icount_io_start(ctx);
656     gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]);
657 }
658 
659 #endif
660 #endif
661 
662 #if !defined(CONFIG_USER_ONLY)
663 /* IBAT0U...IBAT0U */
664 /* IBAT0L...IBAT7L */
665 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
666 {
667     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
668                   offsetof(CPUPPCState,
669                            IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
670 }
671 
672 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
673 {
674     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
675                   offsetof(CPUPPCState,
676                            IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
677 }
678 
679 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
680 {
681     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2);
682     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
683     tcg_temp_free_i32(t0);
684 }
685 
686 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
687 {
688     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4);
689     gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]);
690     tcg_temp_free_i32(t0);
691 }
692 
693 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
694 {
695     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2);
696     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
697     tcg_temp_free_i32(t0);
698 }
699 
700 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
701 {
702     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4);
703     gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]);
704     tcg_temp_free_i32(t0);
705 }
706 
707 /* DBAT0U...DBAT7U */
708 /* DBAT0L...DBAT7L */
709 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
710 {
711     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
712                   offsetof(CPUPPCState,
713                            DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
714 }
715 
716 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
717 {
718     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env,
719                   offsetof(CPUPPCState,
720                            DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
721 }
722 
723 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
724 {
725     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2);
726     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
727     tcg_temp_free_i32(t0);
728 }
729 
730 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
731 {
732     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4);
733     gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]);
734     tcg_temp_free_i32(t0);
735 }
736 
737 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
738 {
739     TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2);
740     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
741     tcg_temp_free_i32(t0);
742 }
743 
744 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
745 {
746     TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4);
747     gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]);
748     tcg_temp_free_i32(t0);
749 }
750 
751 /* SDR1 */
752 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
753 {
754     gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]);
755 }
756 
757 #if defined(TARGET_PPC64)
758 /* 64 bits PowerPC specific SPRs */
759 /* PIDR */
760 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
761 {
762     gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]);
763 }
764 
765 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
766 {
767     gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]);
768 }
769 
770 void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
771 {
772     tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix));
773 }
774 
775 void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
776 {
777     TCGv t0 = tcg_temp_new();
778     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
779     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
780     tcg_temp_free(t0);
781 }
782 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
783 {
784     gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]);
785 }
786 
787 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
788 {
789     gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]);
790 }
791 
792 /* DPDES */
793 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
794 {
795     gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env);
796 }
797 
798 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
799 {
800     gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]);
801 }
802 #endif
803 #endif
804 
805 /* PowerPC 40x specific registers */
806 #if !defined(CONFIG_USER_ONLY)
807 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
808 {
809     gen_icount_io_start(ctx);
810     gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env);
811 }
812 
813 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
814 {
815     gen_icount_io_start(ctx);
816     gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]);
817 }
818 
819 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
820 {
821     gen_icount_io_start(ctx);
822     gen_store_spr(sprn, cpu_gpr[gprn]);
823     gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]);
824     /* We must stop translation as we may have rebooted */
825     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
826 }
827 
828 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
829 {
830     gen_icount_io_start(ctx);
831     gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]);
832 }
833 
834 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
835 {
836     gen_icount_io_start(ctx);
837     gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]);
838 }
839 
840 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
841 {
842     gen_icount_io_start(ctx);
843     gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]);
844 }
845 
846 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
847 {
848     TCGv t0 = tcg_temp_new();
849     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
850     gen_helper_store_40x_pid(cpu_env, t0);
851     tcg_temp_free(t0);
852 }
853 
854 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
855 {
856     gen_icount_io_start(ctx);
857     gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]);
858 }
859 
860 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
861 {
862     gen_icount_io_start(ctx);
863     gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]);
864 }
865 #endif
866 
867 /* PIR */
868 #if !defined(CONFIG_USER_ONLY)
869 void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
870 {
871     TCGv t0 = tcg_temp_new();
872     tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
873     gen_store_spr(SPR_PIR, t0);
874     tcg_temp_free(t0);
875 }
876 #endif
877 
878 /* SPE specific registers */
879 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
880 {
881     TCGv_i32 t0 = tcg_temp_new_i32();
882     tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
883     tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
884     tcg_temp_free_i32(t0);
885 }
886 
887 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
888 {
889     TCGv_i32 t0 = tcg_temp_new_i32();
890     tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
891     tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr));
892     tcg_temp_free_i32(t0);
893 }
894 
895 #if !defined(CONFIG_USER_ONLY)
896 /* Callback used to write the exception vector base */
897 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
898 {
899     TCGv t0 = tcg_temp_new();
900     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask));
901     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
902     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix));
903     gen_store_spr(sprn, t0);
904     tcg_temp_free(t0);
905 }
906 
907 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
908 {
909     int sprn_offs;
910 
911     if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
912         sprn_offs = sprn - SPR_BOOKE_IVOR0;
913     } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
914         sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
915     } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
916         sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
917     } else {
918         qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
919                       " vector 0x%03x\n", sprn);
920         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
921         return;
922     }
923 
924     TCGv t0 = tcg_temp_new();
925     tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask));
926     tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
927     tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
928     gen_store_spr(sprn, t0);
929     tcg_temp_free(t0);
930 }
931 #endif
932 
933 #ifdef TARGET_PPC64
934 #ifndef CONFIG_USER_ONLY
935 void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
936 {
937     TCGv t0 = tcg_temp_new();
938     TCGv t1 = tcg_temp_new();
939     TCGv t2 = tcg_temp_new();
940 
941     /*
942      * Note, the HV=1 PR=0 case is handled earlier by simply using
943      * spr_write_generic for HV mode in the SPR table
944      */
945 
946     /* Build insertion mask into t1 based on context */
947     if (ctx->pr) {
948         gen_load_spr(t1, SPR_UAMOR);
949     } else {
950         gen_load_spr(t1, SPR_AMOR);
951     }
952 
953     /* Mask new bits into t2 */
954     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
955 
956     /* Load AMR and clear new bits in t0 */
957     gen_load_spr(t0, SPR_AMR);
958     tcg_gen_andc_tl(t0, t0, t1);
959 
960     /* Or'in new bits and write it out */
961     tcg_gen_or_tl(t0, t0, t2);
962     gen_store_spr(SPR_AMR, t0);
963     spr_store_dump_spr(SPR_AMR);
964 
965     tcg_temp_free(t0);
966     tcg_temp_free(t1);
967     tcg_temp_free(t2);
968 }
969 
970 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
971 {
972     TCGv t0 = tcg_temp_new();
973     TCGv t1 = tcg_temp_new();
974     TCGv t2 = tcg_temp_new();
975 
976     /*
977      * Note, the HV=1 case is handled earlier by simply using
978      * spr_write_generic for HV mode in the SPR table
979      */
980 
981     /* Build insertion mask into t1 based on context */
982     gen_load_spr(t1, SPR_AMOR);
983 
984     /* Mask new bits into t2 */
985     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
986 
987     /* Load AMR and clear new bits in t0 */
988     gen_load_spr(t0, SPR_UAMOR);
989     tcg_gen_andc_tl(t0, t0, t1);
990 
991     /* Or'in new bits and write it out */
992     tcg_gen_or_tl(t0, t0, t2);
993     gen_store_spr(SPR_UAMOR, t0);
994     spr_store_dump_spr(SPR_UAMOR);
995 
996     tcg_temp_free(t0);
997     tcg_temp_free(t1);
998     tcg_temp_free(t2);
999 }
1000 
1001 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1002 {
1003     TCGv t0 = tcg_temp_new();
1004     TCGv t1 = tcg_temp_new();
1005     TCGv t2 = tcg_temp_new();
1006 
1007     /*
1008      * Note, the HV=1 case is handled earlier by simply using
1009      * spr_write_generic for HV mode in the SPR table
1010      */
1011 
1012     /* Build insertion mask into t1 based on context */
1013     gen_load_spr(t1, SPR_AMOR);
1014 
1015     /* Mask new bits into t2 */
1016     tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1017 
1018     /* Load AMR and clear new bits in t0 */
1019     gen_load_spr(t0, SPR_IAMR);
1020     tcg_gen_andc_tl(t0, t0, t1);
1021 
1022     /* Or'in new bits and write it out */
1023     tcg_gen_or_tl(t0, t0, t2);
1024     gen_store_spr(SPR_IAMR, t0);
1025     spr_store_dump_spr(SPR_IAMR);
1026 
1027     tcg_temp_free(t0);
1028     tcg_temp_free(t1);
1029     tcg_temp_free(t2);
1030 }
1031 #endif
1032 #endif
1033 
1034 #ifndef CONFIG_USER_ONLY
1035 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1036 {
1037     gen_helper_fixup_thrm(cpu_env);
1038     gen_load_spr(cpu_gpr[gprn], sprn);
1039     spr_load_dump_spr(sprn);
1040 }
1041 #endif /* !CONFIG_USER_ONLY */
1042 
1043 #if !defined(CONFIG_USER_ONLY)
1044 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1045 {
1046     TCGv t0 = tcg_temp_new();
1047 
1048     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1049     gen_store_spr(sprn, t0);
1050     tcg_temp_free(t0);
1051 }
1052 
1053 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1054 {
1055     TCGv t0 = tcg_temp_new();
1056 
1057     tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1058     gen_store_spr(sprn, t0);
1059     tcg_temp_free(t0);
1060 }
1061 
1062 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1063 {
1064     TCGv t0 = tcg_temp_new();
1065 
1066     tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1067                     ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1068     gen_store_spr(sprn, t0);
1069     tcg_temp_free(t0);
1070 }
1071 
1072 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1073 {
1074     gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]);
1075 }
1076 
1077 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1078 {
1079     TCGv_i32 t0 = tcg_const_i32(sprn);
1080     gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]);
1081     tcg_temp_free_i32(t0);
1082 }
1083 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1084 {
1085     gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]);
1086 }
1087 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1088 {
1089     gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]);
1090 }
1091 
1092 #endif
1093 
1094 #if !defined(CONFIG_USER_ONLY)
1095 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1096 {
1097     TCGv val = tcg_temp_new();
1098     tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1099     gen_store_spr(SPR_BOOKE_MAS3, val);
1100     tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1101     gen_store_spr(SPR_BOOKE_MAS7, val);
1102     tcg_temp_free(val);
1103 }
1104 
1105 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1106 {
1107     TCGv mas7 = tcg_temp_new();
1108     TCGv mas3 = tcg_temp_new();
1109     gen_load_spr(mas7, SPR_BOOKE_MAS7);
1110     tcg_gen_shli_tl(mas7, mas7, 32);
1111     gen_load_spr(mas3, SPR_BOOKE_MAS3);
1112     tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1113     tcg_temp_free(mas3);
1114     tcg_temp_free(mas7);
1115 }
1116 
1117 #endif
1118 
1119 #ifdef TARGET_PPC64
1120 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1121                                     int bit, int sprn, int cause)
1122 {
1123     TCGv_i32 t1 = tcg_const_i32(bit);
1124     TCGv_i32 t2 = tcg_const_i32(sprn);
1125     TCGv_i32 t3 = tcg_const_i32(cause);
1126 
1127     gen_helper_fscr_facility_check(cpu_env, t1, t2, t3);
1128 
1129     tcg_temp_free_i32(t3);
1130     tcg_temp_free_i32(t2);
1131     tcg_temp_free_i32(t1);
1132 }
1133 
1134 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1135                                    int bit, int sprn, int cause)
1136 {
1137     TCGv_i32 t1 = tcg_const_i32(bit);
1138     TCGv_i32 t2 = tcg_const_i32(sprn);
1139     TCGv_i32 t3 = tcg_const_i32(cause);
1140 
1141     gen_helper_msr_facility_check(cpu_env, t1, t2, t3);
1142 
1143     tcg_temp_free_i32(t3);
1144     tcg_temp_free_i32(t2);
1145     tcg_temp_free_i32(t1);
1146 }
1147 
1148 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1149 {
1150     TCGv spr_up = tcg_temp_new();
1151     TCGv spr = tcg_temp_new();
1152 
1153     gen_load_spr(spr, sprn - 1);
1154     tcg_gen_shri_tl(spr_up, spr, 32);
1155     tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1156 
1157     tcg_temp_free(spr);
1158     tcg_temp_free(spr_up);
1159 }
1160 
1161 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1162 {
1163     TCGv spr = tcg_temp_new();
1164 
1165     gen_load_spr(spr, sprn - 1);
1166     tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1167     gen_store_spr(sprn - 1, spr);
1168 
1169     tcg_temp_free(spr);
1170 }
1171 
1172 #if !defined(CONFIG_USER_ONLY)
1173 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1174 {
1175     TCGv hmer = tcg_temp_new();
1176 
1177     gen_load_spr(hmer, sprn);
1178     tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1179     gen_store_spr(sprn, hmer);
1180     spr_store_dump_spr(sprn);
1181     tcg_temp_free(hmer);
1182 }
1183 
1184 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1185 {
1186     gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]);
1187 }
1188 #endif /* !defined(CONFIG_USER_ONLY) */
1189 
1190 void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1191 {
1192     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1193     spr_read_generic(ctx, gprn, sprn);
1194 }
1195 
1196 void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1197 {
1198     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1199     spr_write_generic(ctx, sprn, gprn);
1200 }
1201 
1202 void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1203 {
1204     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1205     spr_read_generic(ctx, gprn, sprn);
1206 }
1207 
1208 void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1209 {
1210     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1211     spr_write_generic(ctx, sprn, gprn);
1212 }
1213 
1214 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1215 {
1216     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1217     spr_read_prev_upper32(ctx, gprn, sprn);
1218 }
1219 
1220 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1221 {
1222     gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1223     spr_write_prev_upper32(ctx, sprn, gprn);
1224 }
1225 
1226 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1227 {
1228     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1229     spr_read_generic(ctx, gprn, sprn);
1230 }
1231 
1232 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1233 {
1234     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1235     spr_write_generic(ctx, sprn, gprn);
1236 }
1237 
1238 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1239 {
1240     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1241     spr_read_prev_upper32(ctx, gprn, sprn);
1242 }
1243 
1244 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1245 {
1246     gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1247     spr_write_prev_upper32(ctx, sprn, gprn);
1248 }
1249 #endif
1250 
1251 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1252 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1253 
1254 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1255 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1256 
1257 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1258 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1259 
1260 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1261 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1262 
1263 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1264 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1265 
1266 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1267 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1268 
1269 typedef struct opcode_t {
1270     unsigned char opc1, opc2, opc3, opc4;
1271 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1272     unsigned char pad[4];
1273 #endif
1274     opc_handler_t handler;
1275     const char *oname;
1276 } opcode_t;
1277 
1278 static void gen_priv_opc(DisasContext *ctx)
1279 {
1280     gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1281 }
1282 
1283 /* Helpers for priv. check */
1284 #define GEN_PRIV(CTX)              \
1285     do {                           \
1286         gen_priv_opc(CTX); return; \
1287     } while (0)
1288 
1289 #if defined(CONFIG_USER_ONLY)
1290 #define CHK_HV(CTX) GEN_PRIV(CTX)
1291 #define CHK_SV(CTX) GEN_PRIV(CTX)
1292 #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1293 #else
1294 #define CHK_HV(CTX)                         \
1295     do {                                    \
1296         if (unlikely(ctx->pr || !ctx->hv)) {\
1297             GEN_PRIV(CTX);                  \
1298         }                                   \
1299     } while (0)
1300 #define CHK_SV(CTX)              \
1301     do {                         \
1302         if (unlikely(ctx->pr)) { \
1303             GEN_PRIV(CTX);       \
1304         }                        \
1305     } while (0)
1306 #define CHK_HVRM(CTX)                                   \
1307     do {                                                \
1308         if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1309             GEN_PRIV(CTX);                              \
1310         }                                               \
1311     } while (0)
1312 #endif
1313 
1314 #define CHK_NONE(CTX)
1315 
1316 /*****************************************************************************/
1317 /* PowerPC instructions table                                                */
1318 
1319 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1320 {                                                                             \
1321     .opc1 = op1,                                                              \
1322     .opc2 = op2,                                                              \
1323     .opc3 = op3,                                                              \
1324     .opc4 = 0xff,                                                             \
1325     .handler = {                                                              \
1326         .inval1  = invl,                                                      \
1327         .type = _typ,                                                         \
1328         .type2 = _typ2,                                                       \
1329         .handler = &gen_##name,                                               \
1330     },                                                                        \
1331     .oname = stringify(name),                                                 \
1332 }
1333 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1334 {                                                                             \
1335     .opc1 = op1,                                                              \
1336     .opc2 = op2,                                                              \
1337     .opc3 = op3,                                                              \
1338     .opc4 = 0xff,                                                             \
1339     .handler = {                                                              \
1340         .inval1  = invl1,                                                     \
1341         .inval2  = invl2,                                                     \
1342         .type = _typ,                                                         \
1343         .type2 = _typ2,                                                       \
1344         .handler = &gen_##name,                                               \
1345     },                                                                        \
1346     .oname = stringify(name),                                                 \
1347 }
1348 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1349 {                                                                             \
1350     .opc1 = op1,                                                              \
1351     .opc2 = op2,                                                              \
1352     .opc3 = op3,                                                              \
1353     .opc4 = 0xff,                                                             \
1354     .handler = {                                                              \
1355         .inval1  = invl,                                                      \
1356         .type = _typ,                                                         \
1357         .type2 = _typ2,                                                       \
1358         .handler = &gen_##name,                                               \
1359     },                                                                        \
1360     .oname = onam,                                                            \
1361 }
1362 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1363 {                                                                             \
1364     .opc1 = op1,                                                              \
1365     .opc2 = op2,                                                              \
1366     .opc3 = op3,                                                              \
1367     .opc4 = op4,                                                              \
1368     .handler = {                                                              \
1369         .inval1  = invl,                                                      \
1370         .type = _typ,                                                         \
1371         .type2 = _typ2,                                                       \
1372         .handler = &gen_##name,                                               \
1373     },                                                                        \
1374     .oname = stringify(name),                                                 \
1375 }
1376 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1377 {                                                                             \
1378     .opc1 = op1,                                                              \
1379     .opc2 = op2,                                                              \
1380     .opc3 = op3,                                                              \
1381     .opc4 = op4,                                                              \
1382     .handler = {                                                              \
1383         .inval1  = invl,                                                      \
1384         .type = _typ,                                                         \
1385         .type2 = _typ2,                                                       \
1386         .handler = &gen_##name,                                               \
1387     },                                                                        \
1388     .oname = onam,                                                            \
1389 }
1390 
1391 /* Invalid instruction */
1392 static void gen_invalid(DisasContext *ctx)
1393 {
1394     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1395 }
1396 
1397 static opc_handler_t invalid_handler = {
1398     .inval1  = 0xFFFFFFFF,
1399     .inval2  = 0xFFFFFFFF,
1400     .type    = PPC_NONE,
1401     .type2   = PPC_NONE,
1402     .handler = gen_invalid,
1403 };
1404 
1405 /***                           Integer comparison                          ***/
1406 
1407 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1408 {
1409     TCGv t0 = tcg_temp_new();
1410     TCGv t1 = tcg_temp_new();
1411     TCGv_i32 t = tcg_temp_new_i32();
1412 
1413     tcg_gen_movi_tl(t0, CRF_EQ);
1414     tcg_gen_movi_tl(t1, CRF_LT);
1415     tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1416                        t0, arg0, arg1, t1, t0);
1417     tcg_gen_movi_tl(t1, CRF_GT);
1418     tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1419                        t0, arg0, arg1, t1, t0);
1420 
1421     tcg_gen_trunc_tl_i32(t, t0);
1422     tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1423     tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1424 
1425     tcg_temp_free(t0);
1426     tcg_temp_free(t1);
1427     tcg_temp_free_i32(t);
1428 }
1429 
1430 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1431 {
1432     TCGv t0 = tcg_const_tl(arg1);
1433     gen_op_cmp(arg0, t0, s, crf);
1434     tcg_temp_free(t0);
1435 }
1436 
1437 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1438 {
1439     TCGv t0, t1;
1440     t0 = tcg_temp_new();
1441     t1 = tcg_temp_new();
1442     if (s) {
1443         tcg_gen_ext32s_tl(t0, arg0);
1444         tcg_gen_ext32s_tl(t1, arg1);
1445     } else {
1446         tcg_gen_ext32u_tl(t0, arg0);
1447         tcg_gen_ext32u_tl(t1, arg1);
1448     }
1449     gen_op_cmp(t0, t1, s, crf);
1450     tcg_temp_free(t1);
1451     tcg_temp_free(t0);
1452 }
1453 
1454 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1455 {
1456     TCGv t0 = tcg_const_tl(arg1);
1457     gen_op_cmp32(arg0, t0, s, crf);
1458     tcg_temp_free(t0);
1459 }
1460 
1461 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1462 {
1463     if (NARROW_MODE(ctx)) {
1464         gen_op_cmpi32(reg, 0, 1, 0);
1465     } else {
1466         gen_op_cmpi(reg, 0, 1, 0);
1467     }
1468 }
1469 
1470 /* cmprb - range comparison: isupper, isaplha, islower*/
1471 static void gen_cmprb(DisasContext *ctx)
1472 {
1473     TCGv_i32 src1 = tcg_temp_new_i32();
1474     TCGv_i32 src2 = tcg_temp_new_i32();
1475     TCGv_i32 src2lo = tcg_temp_new_i32();
1476     TCGv_i32 src2hi = tcg_temp_new_i32();
1477     TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)];
1478 
1479     tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]);
1480     tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]);
1481 
1482     tcg_gen_andi_i32(src1, src1, 0xFF);
1483     tcg_gen_ext8u_i32(src2lo, src2);
1484     tcg_gen_shri_i32(src2, src2, 8);
1485     tcg_gen_ext8u_i32(src2hi, src2);
1486 
1487     tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1488     tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1489     tcg_gen_and_i32(crf, src2lo, src2hi);
1490 
1491     if (ctx->opcode & 0x00200000) {
1492         tcg_gen_shri_i32(src2, src2, 8);
1493         tcg_gen_ext8u_i32(src2lo, src2);
1494         tcg_gen_shri_i32(src2, src2, 8);
1495         tcg_gen_ext8u_i32(src2hi, src2);
1496         tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1);
1497         tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi);
1498         tcg_gen_and_i32(src2lo, src2lo, src2hi);
1499         tcg_gen_or_i32(crf, crf, src2lo);
1500     }
1501     tcg_gen_shli_i32(crf, crf, CRF_GT_BIT);
1502     tcg_temp_free_i32(src1);
1503     tcg_temp_free_i32(src2);
1504     tcg_temp_free_i32(src2lo);
1505     tcg_temp_free_i32(src2hi);
1506 }
1507 
1508 #if defined(TARGET_PPC64)
1509 /* cmpeqb */
1510 static void gen_cmpeqb(DisasContext *ctx)
1511 {
1512     gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1513                       cpu_gpr[rB(ctx->opcode)]);
1514 }
1515 #endif
1516 
1517 /* isel (PowerPC 2.03 specification) */
1518 static void gen_isel(DisasContext *ctx)
1519 {
1520     uint32_t bi = rC(ctx->opcode);
1521     uint32_t mask = 0x08 >> (bi & 0x03);
1522     TCGv t0 = tcg_temp_new();
1523     TCGv zr;
1524 
1525     tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]);
1526     tcg_gen_andi_tl(t0, t0, mask);
1527 
1528     zr = tcg_const_tl(0);
1529     tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr,
1530                        rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr,
1531                        cpu_gpr[rB(ctx->opcode)]);
1532     tcg_temp_free(zr);
1533     tcg_temp_free(t0);
1534 }
1535 
1536 /* cmpb: PowerPC 2.05 specification */
1537 static void gen_cmpb(DisasContext *ctx)
1538 {
1539     gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
1540                     cpu_gpr[rB(ctx->opcode)]);
1541 }
1542 
1543 /***                           Integer arithmetic                          ***/
1544 
1545 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1546                                            TCGv arg1, TCGv arg2, int sub)
1547 {
1548     TCGv t0 = tcg_temp_new();
1549 
1550     tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1551     tcg_gen_xor_tl(t0, arg1, arg2);
1552     if (sub) {
1553         tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1554     } else {
1555         tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1556     }
1557     tcg_temp_free(t0);
1558     if (NARROW_MODE(ctx)) {
1559         tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1560         if (is_isa300(ctx)) {
1561             tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1562         }
1563     } else {
1564         if (is_isa300(ctx)) {
1565             tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1566         }
1567         tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1568     }
1569     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1570 }
1571 
1572 static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1573                                              TCGv res, TCGv arg0, TCGv arg1,
1574                                              TCGv ca32, int sub)
1575 {
1576     TCGv t0;
1577 
1578     if (!is_isa300(ctx)) {
1579         return;
1580     }
1581 
1582     t0 = tcg_temp_new();
1583     if (sub) {
1584         tcg_gen_eqv_tl(t0, arg0, arg1);
1585     } else {
1586         tcg_gen_xor_tl(t0, arg0, arg1);
1587     }
1588     tcg_gen_xor_tl(t0, t0, res);
1589     tcg_gen_extract_tl(ca32, t0, 32, 1);
1590     tcg_temp_free(t0);
1591 }
1592 
1593 /* Common add function */
1594 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1595                                     TCGv arg2, TCGv ca, TCGv ca32,
1596                                     bool add_ca, bool compute_ca,
1597                                     bool compute_ov, bool compute_rc0)
1598 {
1599     TCGv t0 = ret;
1600 
1601     if (compute_ca || compute_ov) {
1602         t0 = tcg_temp_new();
1603     }
1604 
1605     if (compute_ca) {
1606         if (NARROW_MODE(ctx)) {
1607             /*
1608              * Caution: a non-obvious corner case of the spec is that
1609              * we must produce the *entire* 64-bit addition, but
1610              * produce the carry into bit 32.
1611              */
1612             TCGv t1 = tcg_temp_new();
1613             tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1614             tcg_gen_add_tl(t0, arg1, arg2);
1615             if (add_ca) {
1616                 tcg_gen_add_tl(t0, t0, ca);
1617             }
1618             tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1619             tcg_temp_free(t1);
1620             tcg_gen_extract_tl(ca, ca, 32, 1);
1621             if (is_isa300(ctx)) {
1622                 tcg_gen_mov_tl(ca32, ca);
1623             }
1624         } else {
1625             TCGv zero = tcg_const_tl(0);
1626             if (add_ca) {
1627                 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1628                 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1629             } else {
1630                 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1631             }
1632             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1633             tcg_temp_free(zero);
1634         }
1635     } else {
1636         tcg_gen_add_tl(t0, arg1, arg2);
1637         if (add_ca) {
1638             tcg_gen_add_tl(t0, t0, ca);
1639         }
1640     }
1641 
1642     if (compute_ov) {
1643         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1644     }
1645     if (unlikely(compute_rc0)) {
1646         gen_set_Rc0(ctx, t0);
1647     }
1648 
1649     if (t0 != ret) {
1650         tcg_gen_mov_tl(ret, t0);
1651         tcg_temp_free(t0);
1652     }
1653 }
1654 /* Add functions with two operands */
1655 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov)     \
1656 static void glue(gen_, name)(DisasContext *ctx)                               \
1657 {                                                                             \
1658     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1659                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1660                      ca, glue(ca, 32),                                        \
1661                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1662 }
1663 /* Add functions with one operand and one immediate */
1664 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca,                    \
1665                                 add_ca, compute_ca, compute_ov)               \
1666 static void glue(gen_, name)(DisasContext *ctx)                               \
1667 {                                                                             \
1668     TCGv t0 = tcg_const_tl(const_val);                                        \
1669     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)],                           \
1670                      cpu_gpr[rA(ctx->opcode)], t0,                            \
1671                      ca, glue(ca, 32),                                        \
1672                      add_ca, compute_ca, compute_ov, Rc(ctx->opcode));        \
1673     tcg_temp_free(t0);                                                        \
1674 }
1675 
1676 /* add  add.  addo  addo. */
1677 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0)
1678 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1)
1679 /* addc  addc.  addco  addco. */
1680 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0)
1681 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1)
1682 /* adde  adde.  addeo  addeo. */
1683 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0)
1684 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1)
1685 /* addme  addme.  addmeo  addmeo.  */
1686 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0)
1687 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1)
1688 /* addex */
1689 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0);
1690 /* addze  addze.  addzeo  addzeo.*/
1691 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0)
1692 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1)
1693 /* addic  addic.*/
1694 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0)
1695 {
1696     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
1697     gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1698                      c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0);
1699     tcg_temp_free(c);
1700 }
1701 
1702 static void gen_addic(DisasContext *ctx)
1703 {
1704     gen_op_addic(ctx, 0);
1705 }
1706 
1707 static void gen_addic_(DisasContext *ctx)
1708 {
1709     gen_op_addic(ctx, 1);
1710 }
1711 
1712 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1,
1713                                      TCGv arg2, int sign, int compute_ov)
1714 {
1715     TCGv_i32 t0 = tcg_temp_new_i32();
1716     TCGv_i32 t1 = tcg_temp_new_i32();
1717     TCGv_i32 t2 = tcg_temp_new_i32();
1718     TCGv_i32 t3 = tcg_temp_new_i32();
1719 
1720     tcg_gen_trunc_tl_i32(t0, arg1);
1721     tcg_gen_trunc_tl_i32(t1, arg2);
1722     if (sign) {
1723         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1724         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1725         tcg_gen_and_i32(t2, t2, t3);
1726         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1727         tcg_gen_or_i32(t2, t2, t3);
1728         tcg_gen_movi_i32(t3, 0);
1729         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1730         tcg_gen_div_i32(t3, t0, t1);
1731         tcg_gen_extu_i32_tl(ret, t3);
1732     } else {
1733         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1734         tcg_gen_movi_i32(t3, 0);
1735         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1736         tcg_gen_divu_i32(t3, t0, t1);
1737         tcg_gen_extu_i32_tl(ret, t3);
1738     }
1739     if (compute_ov) {
1740         tcg_gen_extu_i32_tl(cpu_ov, t2);
1741         if (is_isa300(ctx)) {
1742             tcg_gen_extu_i32_tl(cpu_ov32, t2);
1743         }
1744         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1745     }
1746     tcg_temp_free_i32(t0);
1747     tcg_temp_free_i32(t1);
1748     tcg_temp_free_i32(t2);
1749     tcg_temp_free_i32(t3);
1750 
1751     if (unlikely(Rc(ctx->opcode) != 0)) {
1752         gen_set_Rc0(ctx, ret);
1753     }
1754 }
1755 /* Div functions */
1756 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
1757 static void glue(gen_, name)(DisasContext *ctx)                               \
1758 {                                                                             \
1759     gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1760                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],      \
1761                      sign, compute_ov);                                       \
1762 }
1763 /* divwu  divwu.  divwuo  divwuo.   */
1764 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0);
1765 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1);
1766 /* divw  divw.  divwo  divwo.   */
1767 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0);
1768 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1);
1769 
1770 /* div[wd]eu[o][.] */
1771 #define GEN_DIVE(name, hlpr, compute_ov)                                      \
1772 static void gen_##name(DisasContext *ctx)                                     \
1773 {                                                                             \
1774     TCGv_i32 t0 = tcg_const_i32(compute_ov);                                  \
1775     gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env,                      \
1776                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \
1777     tcg_temp_free_i32(t0);                                                    \
1778     if (unlikely(Rc(ctx->opcode) != 0)) {                                     \
1779         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);                           \
1780     }                                                                         \
1781 }
1782 
1783 GEN_DIVE(divweu, divweu, 0);
1784 GEN_DIVE(divweuo, divweu, 1);
1785 GEN_DIVE(divwe, divwe, 0);
1786 GEN_DIVE(divweo, divwe, 1);
1787 
1788 #if defined(TARGET_PPC64)
1789 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1,
1790                                      TCGv arg2, int sign, int compute_ov)
1791 {
1792     TCGv_i64 t0 = tcg_temp_new_i64();
1793     TCGv_i64 t1 = tcg_temp_new_i64();
1794     TCGv_i64 t2 = tcg_temp_new_i64();
1795     TCGv_i64 t3 = tcg_temp_new_i64();
1796 
1797     tcg_gen_mov_i64(t0, arg1);
1798     tcg_gen_mov_i64(t1, arg2);
1799     if (sign) {
1800         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1801         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1802         tcg_gen_and_i64(t2, t2, t3);
1803         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1804         tcg_gen_or_i64(t2, t2, t3);
1805         tcg_gen_movi_i64(t3, 0);
1806         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1807         tcg_gen_div_i64(ret, t0, t1);
1808     } else {
1809         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1810         tcg_gen_movi_i64(t3, 0);
1811         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1812         tcg_gen_divu_i64(ret, t0, t1);
1813     }
1814     if (compute_ov) {
1815         tcg_gen_mov_tl(cpu_ov, t2);
1816         if (is_isa300(ctx)) {
1817             tcg_gen_mov_tl(cpu_ov32, t2);
1818         }
1819         tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1820     }
1821     tcg_temp_free_i64(t0);
1822     tcg_temp_free_i64(t1);
1823     tcg_temp_free_i64(t2);
1824     tcg_temp_free_i64(t3);
1825 
1826     if (unlikely(Rc(ctx->opcode) != 0)) {
1827         gen_set_Rc0(ctx, ret);
1828     }
1829 }
1830 
1831 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
1832 static void glue(gen_, name)(DisasContext *ctx)                               \
1833 {                                                                             \
1834     gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)],                          \
1835                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
1836                       sign, compute_ov);                                      \
1837 }
1838 /* divdu  divdu.  divduo  divduo.   */
1839 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0);
1840 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1);
1841 /* divd  divd.  divdo  divdo.   */
1842 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0);
1843 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1);
1844 
1845 GEN_DIVE(divdeu, divdeu, 0);
1846 GEN_DIVE(divdeuo, divdeu, 1);
1847 GEN_DIVE(divde, divde, 0);
1848 GEN_DIVE(divdeo, divde, 1);
1849 #endif
1850 
1851 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1852                                      TCGv arg2, int sign)
1853 {
1854     TCGv_i32 t0 = tcg_temp_new_i32();
1855     TCGv_i32 t1 = tcg_temp_new_i32();
1856 
1857     tcg_gen_trunc_tl_i32(t0, arg1);
1858     tcg_gen_trunc_tl_i32(t1, arg2);
1859     if (sign) {
1860         TCGv_i32 t2 = tcg_temp_new_i32();
1861         TCGv_i32 t3 = tcg_temp_new_i32();
1862         tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1863         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1864         tcg_gen_and_i32(t2, t2, t3);
1865         tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1866         tcg_gen_or_i32(t2, t2, t3);
1867         tcg_gen_movi_i32(t3, 0);
1868         tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1869         tcg_gen_rem_i32(t3, t0, t1);
1870         tcg_gen_ext_i32_tl(ret, t3);
1871         tcg_temp_free_i32(t2);
1872         tcg_temp_free_i32(t3);
1873     } else {
1874         TCGv_i32 t2 = tcg_const_i32(1);
1875         TCGv_i32 t3 = tcg_const_i32(0);
1876         tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1877         tcg_gen_remu_i32(t3, t0, t1);
1878         tcg_gen_extu_i32_tl(ret, t3);
1879         tcg_temp_free_i32(t2);
1880         tcg_temp_free_i32(t3);
1881     }
1882     tcg_temp_free_i32(t0);
1883     tcg_temp_free_i32(t1);
1884 }
1885 
1886 #define GEN_INT_ARITH_MODW(name, opc3, sign)                                \
1887 static void glue(gen_, name)(DisasContext *ctx)                             \
1888 {                                                                           \
1889     gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1890                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1891                       sign);                                                \
1892 }
1893 
1894 GEN_INT_ARITH_MODW(moduw, 0x08, 0);
1895 GEN_INT_ARITH_MODW(modsw, 0x18, 1);
1896 
1897 #if defined(TARGET_PPC64)
1898 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1899                                      TCGv arg2, int sign)
1900 {
1901     TCGv_i64 t0 = tcg_temp_new_i64();
1902     TCGv_i64 t1 = tcg_temp_new_i64();
1903 
1904     tcg_gen_mov_i64(t0, arg1);
1905     tcg_gen_mov_i64(t1, arg2);
1906     if (sign) {
1907         TCGv_i64 t2 = tcg_temp_new_i64();
1908         TCGv_i64 t3 = tcg_temp_new_i64();
1909         tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1910         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1911         tcg_gen_and_i64(t2, t2, t3);
1912         tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1913         tcg_gen_or_i64(t2, t2, t3);
1914         tcg_gen_movi_i64(t3, 0);
1915         tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1916         tcg_gen_rem_i64(ret, t0, t1);
1917         tcg_temp_free_i64(t2);
1918         tcg_temp_free_i64(t3);
1919     } else {
1920         TCGv_i64 t2 = tcg_const_i64(1);
1921         TCGv_i64 t3 = tcg_const_i64(0);
1922         tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1923         tcg_gen_remu_i64(ret, t0, t1);
1924         tcg_temp_free_i64(t2);
1925         tcg_temp_free_i64(t3);
1926     }
1927     tcg_temp_free_i64(t0);
1928     tcg_temp_free_i64(t1);
1929 }
1930 
1931 #define GEN_INT_ARITH_MODD(name, opc3, sign)                            \
1932 static void glue(gen_, name)(DisasContext *ctx)                           \
1933 {                                                                         \
1934   gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)],                        \
1935                     cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],   \
1936                     sign);                                                \
1937 }
1938 
1939 GEN_INT_ARITH_MODD(modud, 0x08, 0);
1940 GEN_INT_ARITH_MODD(modsd, 0x18, 1);
1941 #endif
1942 
1943 /* mulhw  mulhw. */
1944 static void gen_mulhw(DisasContext *ctx)
1945 {
1946     TCGv_i32 t0 = tcg_temp_new_i32();
1947     TCGv_i32 t1 = tcg_temp_new_i32();
1948 
1949     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1950     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1951     tcg_gen_muls2_i32(t0, t1, t0, t1);
1952     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1953     tcg_temp_free_i32(t0);
1954     tcg_temp_free_i32(t1);
1955     if (unlikely(Rc(ctx->opcode) != 0)) {
1956         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1957     }
1958 }
1959 
1960 /* mulhwu  mulhwu.  */
1961 static void gen_mulhwu(DisasContext *ctx)
1962 {
1963     TCGv_i32 t0 = tcg_temp_new_i32();
1964     TCGv_i32 t1 = tcg_temp_new_i32();
1965 
1966     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
1967     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
1968     tcg_gen_mulu2_i32(t0, t1, t0, t1);
1969     tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1);
1970     tcg_temp_free_i32(t0);
1971     tcg_temp_free_i32(t1);
1972     if (unlikely(Rc(ctx->opcode) != 0)) {
1973         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1974     }
1975 }
1976 
1977 /* mullw  mullw. */
1978 static void gen_mullw(DisasContext *ctx)
1979 {
1980 #if defined(TARGET_PPC64)
1981     TCGv_i64 t0, t1;
1982     t0 = tcg_temp_new_i64();
1983     t1 = tcg_temp_new_i64();
1984     tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]);
1985     tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]);
1986     tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
1987     tcg_temp_free(t0);
1988     tcg_temp_free(t1);
1989 #else
1990     tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
1991                     cpu_gpr[rB(ctx->opcode)]);
1992 #endif
1993     if (unlikely(Rc(ctx->opcode) != 0)) {
1994         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
1995     }
1996 }
1997 
1998 /* mullwo  mullwo. */
1999 static void gen_mullwo(DisasContext *ctx)
2000 {
2001     TCGv_i32 t0 = tcg_temp_new_i32();
2002     TCGv_i32 t1 = tcg_temp_new_i32();
2003 
2004     tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]);
2005     tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]);
2006     tcg_gen_muls2_i32(t0, t1, t0, t1);
2007 #if defined(TARGET_PPC64)
2008     tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1);
2009 #else
2010     tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0);
2011 #endif
2012 
2013     tcg_gen_sari_i32(t0, t0, 31);
2014     tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1);
2015     tcg_gen_extu_i32_tl(cpu_ov, t0);
2016     if (is_isa300(ctx)) {
2017         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2018     }
2019     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2020 
2021     tcg_temp_free_i32(t0);
2022     tcg_temp_free_i32(t1);
2023     if (unlikely(Rc(ctx->opcode) != 0)) {
2024         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2025     }
2026 }
2027 
2028 /* mulli */
2029 static void gen_mulli(DisasContext *ctx)
2030 {
2031     tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2032                     SIMM(ctx->opcode));
2033 }
2034 
2035 #if defined(TARGET_PPC64)
2036 /* mulhd  mulhd. */
2037 static void gen_mulhd(DisasContext *ctx)
2038 {
2039     TCGv lo = tcg_temp_new();
2040     tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2041                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2042     tcg_temp_free(lo);
2043     if (unlikely(Rc(ctx->opcode) != 0)) {
2044         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2045     }
2046 }
2047 
2048 /* mulhdu  mulhdu. */
2049 static void gen_mulhdu(DisasContext *ctx)
2050 {
2051     TCGv lo = tcg_temp_new();
2052     tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)],
2053                      cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2054     tcg_temp_free(lo);
2055     if (unlikely(Rc(ctx->opcode) != 0)) {
2056         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2057     }
2058 }
2059 
2060 /* mulld  mulld. */
2061 static void gen_mulld(DisasContext *ctx)
2062 {
2063     tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2064                    cpu_gpr[rB(ctx->opcode)]);
2065     if (unlikely(Rc(ctx->opcode) != 0)) {
2066         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2067     }
2068 }
2069 
2070 /* mulldo  mulldo. */
2071 static void gen_mulldo(DisasContext *ctx)
2072 {
2073     TCGv_i64 t0 = tcg_temp_new_i64();
2074     TCGv_i64 t1 = tcg_temp_new_i64();
2075 
2076     tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)],
2077                       cpu_gpr[rB(ctx->opcode)]);
2078     tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0);
2079 
2080     tcg_gen_sari_i64(t0, t0, 63);
2081     tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1);
2082     if (is_isa300(ctx)) {
2083         tcg_gen_mov_tl(cpu_ov32, cpu_ov);
2084     }
2085     tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
2086 
2087     tcg_temp_free_i64(t0);
2088     tcg_temp_free_i64(t1);
2089 
2090     if (unlikely(Rc(ctx->opcode) != 0)) {
2091         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2092     }
2093 }
2094 #endif
2095 
2096 /* Common subf function */
2097 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
2098                                      TCGv arg2, bool add_ca, bool compute_ca,
2099                                      bool compute_ov, bool compute_rc0)
2100 {
2101     TCGv t0 = ret;
2102 
2103     if (compute_ca || compute_ov) {
2104         t0 = tcg_temp_new();
2105     }
2106 
2107     if (compute_ca) {
2108         /* dest = ~arg1 + arg2 [+ ca].  */
2109         if (NARROW_MODE(ctx)) {
2110             /*
2111              * Caution: a non-obvious corner case of the spec is that
2112              * we must produce the *entire* 64-bit addition, but
2113              * produce the carry into bit 32.
2114              */
2115             TCGv inv1 = tcg_temp_new();
2116             TCGv t1 = tcg_temp_new();
2117             tcg_gen_not_tl(inv1, arg1);
2118             if (add_ca) {
2119                 tcg_gen_add_tl(t0, arg2, cpu_ca);
2120             } else {
2121                 tcg_gen_addi_tl(t0, arg2, 1);
2122             }
2123             tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
2124             tcg_gen_add_tl(t0, t0, inv1);
2125             tcg_temp_free(inv1);
2126             tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
2127             tcg_temp_free(t1);
2128             tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
2129             if (is_isa300(ctx)) {
2130                 tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2131             }
2132         } else if (add_ca) {
2133             TCGv zero, inv1 = tcg_temp_new();
2134             tcg_gen_not_tl(inv1, arg1);
2135             zero = tcg_const_tl(0);
2136             tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
2137             tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
2138             gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
2139             tcg_temp_free(zero);
2140             tcg_temp_free(inv1);
2141         } else {
2142             tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
2143             tcg_gen_sub_tl(t0, arg2, arg1);
2144             gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
2145         }
2146     } else if (add_ca) {
2147         /*
2148          * Since we're ignoring carry-out, we can simplify the
2149          * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
2150          */
2151         tcg_gen_sub_tl(t0, arg2, arg1);
2152         tcg_gen_add_tl(t0, t0, cpu_ca);
2153         tcg_gen_subi_tl(t0, t0, 1);
2154     } else {
2155         tcg_gen_sub_tl(t0, arg2, arg1);
2156     }
2157 
2158     if (compute_ov) {
2159         gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
2160     }
2161     if (unlikely(compute_rc0)) {
2162         gen_set_Rc0(ctx, t0);
2163     }
2164 
2165     if (t0 != ret) {
2166         tcg_gen_mov_tl(ret, t0);
2167         tcg_temp_free(t0);
2168     }
2169 }
2170 /* Sub functions with Two operands functions */
2171 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
2172 static void glue(gen_, name)(DisasContext *ctx)                               \
2173 {                                                                             \
2174     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2175                       cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],     \
2176                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2177 }
2178 /* Sub functions with one operand and one immediate */
2179 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
2180                                 add_ca, compute_ca, compute_ov)               \
2181 static void glue(gen_, name)(DisasContext *ctx)                               \
2182 {                                                                             \
2183     TCGv t0 = tcg_const_tl(const_val);                                        \
2184     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)],                          \
2185                       cpu_gpr[rA(ctx->opcode)], t0,                           \
2186                       add_ca, compute_ca, compute_ov, Rc(ctx->opcode));       \
2187     tcg_temp_free(t0);                                                        \
2188 }
2189 /* subf  subf.  subfo  subfo. */
2190 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
2191 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
2192 /* subfc  subfc.  subfco  subfco. */
2193 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
2194 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
2195 /* subfe  subfe.  subfeo  subfo. */
2196 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
2197 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
2198 /* subfme  subfme.  subfmeo  subfmeo.  */
2199 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
2200 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
2201 /* subfze  subfze.  subfzeo  subfzeo.*/
2202 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
2203 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
2204 
2205 /* subfic */
2206 static void gen_subfic(DisasContext *ctx)
2207 {
2208     TCGv c = tcg_const_tl(SIMM(ctx->opcode));
2209     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2210                       c, 0, 1, 0, 0);
2211     tcg_temp_free(c);
2212 }
2213 
2214 /* neg neg. nego nego. */
2215 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov)
2216 {
2217     TCGv zero = tcg_const_tl(0);
2218     gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)],
2219                       zero, 0, 0, compute_ov, Rc(ctx->opcode));
2220     tcg_temp_free(zero);
2221 }
2222 
2223 static void gen_neg(DisasContext *ctx)
2224 {
2225     tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2226     if (unlikely(Rc(ctx->opcode))) {
2227         gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]);
2228     }
2229 }
2230 
2231 static void gen_nego(DisasContext *ctx)
2232 {
2233     gen_op_arith_neg(ctx, 1);
2234 }
2235 
2236 /***                            Integer logical                            ***/
2237 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
2238 static void glue(gen_, name)(DisasContext *ctx)                               \
2239 {                                                                             \
2240     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],                \
2241        cpu_gpr[rB(ctx->opcode)]);                                             \
2242     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2243         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2244 }
2245 
2246 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
2247 static void glue(gen_, name)(DisasContext *ctx)                               \
2248 {                                                                             \
2249     tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);               \
2250     if (unlikely(Rc(ctx->opcode) != 0))                                       \
2251         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);                           \
2252 }
2253 
2254 /* and & and. */
2255 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER);
2256 /* andc & andc. */
2257 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER);
2258 
2259 /* andi. */
2260 static void gen_andi_(DisasContext *ctx)
2261 {
2262     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2263                     UIMM(ctx->opcode));
2264     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2265 }
2266 
2267 /* andis. */
2268 static void gen_andis_(DisasContext *ctx)
2269 {
2270     tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2271                     UIMM(ctx->opcode) << 16);
2272     gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2273 }
2274 
2275 /* cntlzw */
2276 static void gen_cntlzw(DisasContext *ctx)
2277 {
2278     TCGv_i32 t = tcg_temp_new_i32();
2279 
2280     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2281     tcg_gen_clzi_i32(t, t, 32);
2282     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2283     tcg_temp_free_i32(t);
2284 
2285     if (unlikely(Rc(ctx->opcode) != 0)) {
2286         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2287     }
2288 }
2289 
2290 /* cnttzw */
2291 static void gen_cnttzw(DisasContext *ctx)
2292 {
2293     TCGv_i32 t = tcg_temp_new_i32();
2294 
2295     tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]);
2296     tcg_gen_ctzi_i32(t, t, 32);
2297     tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t);
2298     tcg_temp_free_i32(t);
2299 
2300     if (unlikely(Rc(ctx->opcode) != 0)) {
2301         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2302     }
2303 }
2304 
2305 /* eqv & eqv. */
2306 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER);
2307 /* extsb & extsb. */
2308 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER);
2309 /* extsh & extsh. */
2310 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER);
2311 /* nand & nand. */
2312 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER);
2313 /* nor & nor. */
2314 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER);
2315 
2316 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
2317 static void gen_pause(DisasContext *ctx)
2318 {
2319     TCGv_i32 t0 = tcg_const_i32(0);
2320     tcg_gen_st_i32(t0, cpu_env,
2321                    -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
2322     tcg_temp_free_i32(t0);
2323 
2324     /* Stop translation, this gives other CPUs a chance to run */
2325     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
2326 }
2327 #endif /* defined(TARGET_PPC64) */
2328 
2329 /* or & or. */
2330 static void gen_or(DisasContext *ctx)
2331 {
2332     int rs, ra, rb;
2333 
2334     rs = rS(ctx->opcode);
2335     ra = rA(ctx->opcode);
2336     rb = rB(ctx->opcode);
2337     /* Optimisation for mr. ri case */
2338     if (rs != ra || rs != rb) {
2339         if (rs != rb) {
2340             tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]);
2341         } else {
2342             tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]);
2343         }
2344         if (unlikely(Rc(ctx->opcode) != 0)) {
2345             gen_set_Rc0(ctx, cpu_gpr[ra]);
2346         }
2347     } else if (unlikely(Rc(ctx->opcode) != 0)) {
2348         gen_set_Rc0(ctx, cpu_gpr[rs]);
2349 #if defined(TARGET_PPC64)
2350     } else if (rs != 0) { /* 0 is nop */
2351         int prio = 0;
2352 
2353         switch (rs) {
2354         case 1:
2355             /* Set process priority to low */
2356             prio = 2;
2357             break;
2358         case 6:
2359             /* Set process priority to medium-low */
2360             prio = 3;
2361             break;
2362         case 2:
2363             /* Set process priority to normal */
2364             prio = 4;
2365             break;
2366 #if !defined(CONFIG_USER_ONLY)
2367         case 31:
2368             if (!ctx->pr) {
2369                 /* Set process priority to very low */
2370                 prio = 1;
2371             }
2372             break;
2373         case 5:
2374             if (!ctx->pr) {
2375                 /* Set process priority to medium-hight */
2376                 prio = 5;
2377             }
2378             break;
2379         case 3:
2380             if (!ctx->pr) {
2381                 /* Set process priority to high */
2382                 prio = 6;
2383             }
2384             break;
2385         case 7:
2386             if (ctx->hv && !ctx->pr) {
2387                 /* Set process priority to very high */
2388                 prio = 7;
2389             }
2390             break;
2391 #endif
2392         default:
2393             break;
2394         }
2395         if (prio) {
2396             TCGv t0 = tcg_temp_new();
2397             gen_load_spr(t0, SPR_PPR);
2398             tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL);
2399             tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50);
2400             gen_store_spr(SPR_PPR, t0);
2401             tcg_temp_free(t0);
2402         }
2403 #if !defined(CONFIG_USER_ONLY)
2404         /*
2405          * Pause out of TCG otherwise spin loops with smt_low eat too
2406          * much CPU and the kernel hangs.  This applies to all
2407          * encodings other than no-op, e.g., miso(rs=26), yield(27),
2408          * mdoio(29), mdoom(30), and all currently undefined.
2409          */
2410         gen_pause(ctx);
2411 #endif
2412 #endif
2413     }
2414 }
2415 /* orc & orc. */
2416 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER);
2417 
2418 /* xor & xor. */
2419 static void gen_xor(DisasContext *ctx)
2420 {
2421     /* Optimisation for "set to zero" case */
2422     if (rS(ctx->opcode) != rB(ctx->opcode)) {
2423         tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2424                        cpu_gpr[rB(ctx->opcode)]);
2425     } else {
2426         tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0);
2427     }
2428     if (unlikely(Rc(ctx->opcode) != 0)) {
2429         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2430     }
2431 }
2432 
2433 /* ori */
2434 static void gen_ori(DisasContext *ctx)
2435 {
2436     target_ulong uimm = UIMM(ctx->opcode);
2437 
2438     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2439         return;
2440     }
2441     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2442 }
2443 
2444 /* oris */
2445 static void gen_oris(DisasContext *ctx)
2446 {
2447     target_ulong uimm = UIMM(ctx->opcode);
2448 
2449     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2450         /* NOP */
2451         return;
2452     }
2453     tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2454                    uimm << 16);
2455 }
2456 
2457 /* xori */
2458 static void gen_xori(DisasContext *ctx)
2459 {
2460     target_ulong uimm = UIMM(ctx->opcode);
2461 
2462     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2463         /* NOP */
2464         return;
2465     }
2466     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm);
2467 }
2468 
2469 /* xoris */
2470 static void gen_xoris(DisasContext *ctx)
2471 {
2472     target_ulong uimm = UIMM(ctx->opcode);
2473 
2474     if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) {
2475         /* NOP */
2476         return;
2477     }
2478     tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)],
2479                     uimm << 16);
2480 }
2481 
2482 /* popcntb : PowerPC 2.03 specification */
2483 static void gen_popcntb(DisasContext *ctx)
2484 {
2485     gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2486 }
2487 
2488 static void gen_popcntw(DisasContext *ctx)
2489 {
2490 #if defined(TARGET_PPC64)
2491     gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2492 #else
2493     tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2494 #endif
2495 }
2496 
2497 #if defined(TARGET_PPC64)
2498 /* popcntd: PowerPC 2.06 specification */
2499 static void gen_popcntd(DisasContext *ctx)
2500 {
2501     tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
2502 }
2503 #endif
2504 
2505 /* prtyw: PowerPC 2.05 specification */
2506 static void gen_prtyw(DisasContext *ctx)
2507 {
2508     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2509     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2510     TCGv t0 = tcg_temp_new();
2511     tcg_gen_shri_tl(t0, rs, 16);
2512     tcg_gen_xor_tl(ra, rs, t0);
2513     tcg_gen_shri_tl(t0, ra, 8);
2514     tcg_gen_xor_tl(ra, ra, t0);
2515     tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL);
2516     tcg_temp_free(t0);
2517 }
2518 
2519 #if defined(TARGET_PPC64)
2520 /* prtyd: PowerPC 2.05 specification */
2521 static void gen_prtyd(DisasContext *ctx)
2522 {
2523     TCGv ra = cpu_gpr[rA(ctx->opcode)];
2524     TCGv rs = cpu_gpr[rS(ctx->opcode)];
2525     TCGv t0 = tcg_temp_new();
2526     tcg_gen_shri_tl(t0, rs, 32);
2527     tcg_gen_xor_tl(ra, rs, t0);
2528     tcg_gen_shri_tl(t0, ra, 16);
2529     tcg_gen_xor_tl(ra, ra, t0);
2530     tcg_gen_shri_tl(t0, ra, 8);
2531     tcg_gen_xor_tl(ra, ra, t0);
2532     tcg_gen_andi_tl(ra, ra, 1);
2533     tcg_temp_free(t0);
2534 }
2535 #endif
2536 
2537 #if defined(TARGET_PPC64)
2538 /* bpermd */
2539 static void gen_bpermd(DisasContext *ctx)
2540 {
2541     gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)],
2542                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2543 }
2544 #endif
2545 
2546 #if defined(TARGET_PPC64)
2547 /* extsw & extsw. */
2548 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B);
2549 
2550 /* cntlzd */
2551 static void gen_cntlzd(DisasContext *ctx)
2552 {
2553     tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2554     if (unlikely(Rc(ctx->opcode) != 0)) {
2555         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2556     }
2557 }
2558 
2559 /* cnttzd */
2560 static void gen_cnttzd(DisasContext *ctx)
2561 {
2562     tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64);
2563     if (unlikely(Rc(ctx->opcode) != 0)) {
2564         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2565     }
2566 }
2567 
2568 /* darn */
2569 static void gen_darn(DisasContext *ctx)
2570 {
2571     int l = L(ctx->opcode);
2572 
2573     if (l > 2) {
2574         tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1);
2575     } else {
2576         gen_icount_io_start(ctx);
2577         if (l == 0) {
2578             gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]);
2579         } else {
2580             /* Return 64-bit random for both CRN and RRN */
2581             gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]);
2582         }
2583     }
2584 }
2585 #endif
2586 
2587 /***                             Integer rotate                            ***/
2588 
2589 /* rlwimi & rlwimi. */
2590 static void gen_rlwimi(DisasContext *ctx)
2591 {
2592     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2593     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2594     uint32_t sh = SH(ctx->opcode);
2595     uint32_t mb = MB(ctx->opcode);
2596     uint32_t me = ME(ctx->opcode);
2597 
2598     if (sh == (31 - me) && mb <= me) {
2599         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2600     } else {
2601         target_ulong mask;
2602         bool mask_in_32b = true;
2603         TCGv t1;
2604 
2605 #if defined(TARGET_PPC64)
2606         mb += 32;
2607         me += 32;
2608 #endif
2609         mask = MASK(mb, me);
2610 
2611 #if defined(TARGET_PPC64)
2612         if (mask > 0xffffffffu) {
2613             mask_in_32b = false;
2614         }
2615 #endif
2616         t1 = tcg_temp_new();
2617         if (mask_in_32b) {
2618             TCGv_i32 t0 = tcg_temp_new_i32();
2619             tcg_gen_trunc_tl_i32(t0, t_rs);
2620             tcg_gen_rotli_i32(t0, t0, sh);
2621             tcg_gen_extu_i32_tl(t1, t0);
2622             tcg_temp_free_i32(t0);
2623         } else {
2624 #if defined(TARGET_PPC64)
2625             tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2626             tcg_gen_rotli_i64(t1, t1, sh);
2627 #else
2628             g_assert_not_reached();
2629 #endif
2630         }
2631 
2632         tcg_gen_andi_tl(t1, t1, mask);
2633         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2634         tcg_gen_or_tl(t_ra, t_ra, t1);
2635         tcg_temp_free(t1);
2636     }
2637     if (unlikely(Rc(ctx->opcode) != 0)) {
2638         gen_set_Rc0(ctx, t_ra);
2639     }
2640 }
2641 
2642 /* rlwinm & rlwinm. */
2643 static void gen_rlwinm(DisasContext *ctx)
2644 {
2645     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2646     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2647     int sh = SH(ctx->opcode);
2648     int mb = MB(ctx->opcode);
2649     int me = ME(ctx->opcode);
2650     int len = me - mb + 1;
2651     int rsh = (32 - sh) & 31;
2652 
2653     if (sh != 0 && len > 0 && me == (31 - sh)) {
2654         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2655     } else if (me == 31 && rsh + len <= 32) {
2656         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2657     } else {
2658         target_ulong mask;
2659         bool mask_in_32b = true;
2660 #if defined(TARGET_PPC64)
2661         mb += 32;
2662         me += 32;
2663 #endif
2664         mask = MASK(mb, me);
2665 #if defined(TARGET_PPC64)
2666         if (mask > 0xffffffffu) {
2667             mask_in_32b = false;
2668         }
2669 #endif
2670         if (mask_in_32b) {
2671             if (sh == 0) {
2672                 tcg_gen_andi_tl(t_ra, t_rs, mask);
2673             } else {
2674                 TCGv_i32 t0 = tcg_temp_new_i32();
2675                 tcg_gen_trunc_tl_i32(t0, t_rs);
2676                 tcg_gen_rotli_i32(t0, t0, sh);
2677                 tcg_gen_andi_i32(t0, t0, mask);
2678                 tcg_gen_extu_i32_tl(t_ra, t0);
2679                 tcg_temp_free_i32(t0);
2680             }
2681         } else {
2682 #if defined(TARGET_PPC64)
2683             tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2684             tcg_gen_rotli_i64(t_ra, t_ra, sh);
2685             tcg_gen_andi_i64(t_ra, t_ra, mask);
2686 #else
2687             g_assert_not_reached();
2688 #endif
2689         }
2690     }
2691     if (unlikely(Rc(ctx->opcode) != 0)) {
2692         gen_set_Rc0(ctx, t_ra);
2693     }
2694 }
2695 
2696 /* rlwnm & rlwnm. */
2697 static void gen_rlwnm(DisasContext *ctx)
2698 {
2699     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2700     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2701     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2702     uint32_t mb = MB(ctx->opcode);
2703     uint32_t me = ME(ctx->opcode);
2704     target_ulong mask;
2705     bool mask_in_32b = true;
2706 
2707 #if defined(TARGET_PPC64)
2708     mb += 32;
2709     me += 32;
2710 #endif
2711     mask = MASK(mb, me);
2712 
2713 #if defined(TARGET_PPC64)
2714     if (mask > 0xffffffffu) {
2715         mask_in_32b = false;
2716     }
2717 #endif
2718     if (mask_in_32b) {
2719         TCGv_i32 t0 = tcg_temp_new_i32();
2720         TCGv_i32 t1 = tcg_temp_new_i32();
2721         tcg_gen_trunc_tl_i32(t0, t_rb);
2722         tcg_gen_trunc_tl_i32(t1, t_rs);
2723         tcg_gen_andi_i32(t0, t0, 0x1f);
2724         tcg_gen_rotl_i32(t1, t1, t0);
2725         tcg_gen_extu_i32_tl(t_ra, t1);
2726         tcg_temp_free_i32(t0);
2727         tcg_temp_free_i32(t1);
2728     } else {
2729 #if defined(TARGET_PPC64)
2730         TCGv_i64 t0 = tcg_temp_new_i64();
2731         tcg_gen_andi_i64(t0, t_rb, 0x1f);
2732         tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2733         tcg_gen_rotl_i64(t_ra, t_ra, t0);
2734         tcg_temp_free_i64(t0);
2735 #else
2736         g_assert_not_reached();
2737 #endif
2738     }
2739 
2740     tcg_gen_andi_tl(t_ra, t_ra, mask);
2741 
2742     if (unlikely(Rc(ctx->opcode) != 0)) {
2743         gen_set_Rc0(ctx, t_ra);
2744     }
2745 }
2746 
2747 #if defined(TARGET_PPC64)
2748 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2749 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2750 {                                                                             \
2751     gen_##name(ctx, 0);                                                       \
2752 }                                                                             \
2753                                                                               \
2754 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2755 {                                                                             \
2756     gen_##name(ctx, 1);                                                       \
2757 }
2758 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2759 static void glue(gen_, name##0)(DisasContext *ctx)                            \
2760 {                                                                             \
2761     gen_##name(ctx, 0, 0);                                                    \
2762 }                                                                             \
2763                                                                               \
2764 static void glue(gen_, name##1)(DisasContext *ctx)                            \
2765 {                                                                             \
2766     gen_##name(ctx, 0, 1);                                                    \
2767 }                                                                             \
2768                                                                               \
2769 static void glue(gen_, name##2)(DisasContext *ctx)                            \
2770 {                                                                             \
2771     gen_##name(ctx, 1, 0);                                                    \
2772 }                                                                             \
2773                                                                               \
2774 static void glue(gen_, name##3)(DisasContext *ctx)                            \
2775 {                                                                             \
2776     gen_##name(ctx, 1, 1);                                                    \
2777 }
2778 
2779 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2780 {
2781     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2782     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2783     int len = me - mb + 1;
2784     int rsh = (64 - sh) & 63;
2785 
2786     if (sh != 0 && len > 0 && me == (63 - sh)) {
2787         tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2788     } else if (me == 63 && rsh + len <= 64) {
2789         tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2790     } else {
2791         tcg_gen_rotli_tl(t_ra, t_rs, sh);
2792         tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2793     }
2794     if (unlikely(Rc(ctx->opcode) != 0)) {
2795         gen_set_Rc0(ctx, t_ra);
2796     }
2797 }
2798 
2799 /* rldicl - rldicl. */
2800 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2801 {
2802     uint32_t sh, mb;
2803 
2804     sh = SH(ctx->opcode) | (shn << 5);
2805     mb = MB(ctx->opcode) | (mbn << 5);
2806     gen_rldinm(ctx, mb, 63, sh);
2807 }
2808 GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2809 
2810 /* rldicr - rldicr. */
2811 static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2812 {
2813     uint32_t sh, me;
2814 
2815     sh = SH(ctx->opcode) | (shn << 5);
2816     me = MB(ctx->opcode) | (men << 5);
2817     gen_rldinm(ctx, 0, me, sh);
2818 }
2819 GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2820 
2821 /* rldic - rldic. */
2822 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2823 {
2824     uint32_t sh, mb;
2825 
2826     sh = SH(ctx->opcode) | (shn << 5);
2827     mb = MB(ctx->opcode) | (mbn << 5);
2828     gen_rldinm(ctx, mb, 63 - sh, sh);
2829 }
2830 GEN_PPC64_R4(rldic, 0x1E, 0x04);
2831 
2832 static void gen_rldnm(DisasContext *ctx, int mb, int me)
2833 {
2834     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2835     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2836     TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2837     TCGv t0;
2838 
2839     t0 = tcg_temp_new();
2840     tcg_gen_andi_tl(t0, t_rb, 0x3f);
2841     tcg_gen_rotl_tl(t_ra, t_rs, t0);
2842     tcg_temp_free(t0);
2843 
2844     tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2845     if (unlikely(Rc(ctx->opcode) != 0)) {
2846         gen_set_Rc0(ctx, t_ra);
2847     }
2848 }
2849 
2850 /* rldcl - rldcl. */
2851 static inline void gen_rldcl(DisasContext *ctx, int mbn)
2852 {
2853     uint32_t mb;
2854 
2855     mb = MB(ctx->opcode) | (mbn << 5);
2856     gen_rldnm(ctx, mb, 63);
2857 }
2858 GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2859 
2860 /* rldcr - rldcr. */
2861 static inline void gen_rldcr(DisasContext *ctx, int men)
2862 {
2863     uint32_t me;
2864 
2865     me = MB(ctx->opcode) | (men << 5);
2866     gen_rldnm(ctx, 0, me);
2867 }
2868 GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2869 
2870 /* rldimi - rldimi. */
2871 static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2872 {
2873     TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2874     TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2875     uint32_t sh = SH(ctx->opcode) | (shn << 5);
2876     uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2877     uint32_t me = 63 - sh;
2878 
2879     if (mb <= me) {
2880         tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2881     } else {
2882         target_ulong mask = MASK(mb, me);
2883         TCGv t1 = tcg_temp_new();
2884 
2885         tcg_gen_rotli_tl(t1, t_rs, sh);
2886         tcg_gen_andi_tl(t1, t1, mask);
2887         tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2888         tcg_gen_or_tl(t_ra, t_ra, t1);
2889         tcg_temp_free(t1);
2890     }
2891     if (unlikely(Rc(ctx->opcode) != 0)) {
2892         gen_set_Rc0(ctx, t_ra);
2893     }
2894 }
2895 GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2896 #endif
2897 
2898 /***                             Integer shift                             ***/
2899 
2900 /* slw & slw. */
2901 static void gen_slw(DisasContext *ctx)
2902 {
2903     TCGv t0, t1;
2904 
2905     t0 = tcg_temp_new();
2906     /* AND rS with a mask that is 0 when rB >= 0x20 */
2907 #if defined(TARGET_PPC64)
2908     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2909     tcg_gen_sari_tl(t0, t0, 0x3f);
2910 #else
2911     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2912     tcg_gen_sari_tl(t0, t0, 0x1f);
2913 #endif
2914     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2915     t1 = tcg_temp_new();
2916     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2917     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2918     tcg_temp_free(t1);
2919     tcg_temp_free(t0);
2920     tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2921     if (unlikely(Rc(ctx->opcode) != 0)) {
2922         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2923     }
2924 }
2925 
2926 /* sraw & sraw. */
2927 static void gen_sraw(DisasContext *ctx)
2928 {
2929     gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env,
2930                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2931     if (unlikely(Rc(ctx->opcode) != 0)) {
2932         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2933     }
2934 }
2935 
2936 /* srawi & srawi. */
2937 static void gen_srawi(DisasContext *ctx)
2938 {
2939     int sh = SH(ctx->opcode);
2940     TCGv dst = cpu_gpr[rA(ctx->opcode)];
2941     TCGv src = cpu_gpr[rS(ctx->opcode)];
2942     if (sh == 0) {
2943         tcg_gen_ext32s_tl(dst, src);
2944         tcg_gen_movi_tl(cpu_ca, 0);
2945         if (is_isa300(ctx)) {
2946             tcg_gen_movi_tl(cpu_ca32, 0);
2947         }
2948     } else {
2949         TCGv t0;
2950         tcg_gen_ext32s_tl(dst, src);
2951         tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2952         t0 = tcg_temp_new();
2953         tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2954         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2955         tcg_temp_free(t0);
2956         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2957         if (is_isa300(ctx)) {
2958             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2959         }
2960         tcg_gen_sari_tl(dst, dst, sh);
2961     }
2962     if (unlikely(Rc(ctx->opcode) != 0)) {
2963         gen_set_Rc0(ctx, dst);
2964     }
2965 }
2966 
2967 /* srw & srw. */
2968 static void gen_srw(DisasContext *ctx)
2969 {
2970     TCGv t0, t1;
2971 
2972     t0 = tcg_temp_new();
2973     /* AND rS with a mask that is 0 when rB >= 0x20 */
2974 #if defined(TARGET_PPC64)
2975     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2976     tcg_gen_sari_tl(t0, t0, 0x3f);
2977 #else
2978     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2979     tcg_gen_sari_tl(t0, t0, 0x1f);
2980 #endif
2981     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2982     tcg_gen_ext32u_tl(t0, t0);
2983     t1 = tcg_temp_new();
2984     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2985     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2986     tcg_temp_free(t1);
2987     tcg_temp_free(t0);
2988     if (unlikely(Rc(ctx->opcode) != 0)) {
2989         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2990     }
2991 }
2992 
2993 #if defined(TARGET_PPC64)
2994 /* sld & sld. */
2995 static void gen_sld(DisasContext *ctx)
2996 {
2997     TCGv t0, t1;
2998 
2999     t0 = tcg_temp_new();
3000     /* AND rS with a mask that is 0 when rB >= 0x40 */
3001     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3002     tcg_gen_sari_tl(t0, t0, 0x3f);
3003     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3004     t1 = tcg_temp_new();
3005     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3006     tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3007     tcg_temp_free(t1);
3008     tcg_temp_free(t0);
3009     if (unlikely(Rc(ctx->opcode) != 0)) {
3010         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3011     }
3012 }
3013 
3014 /* srad & srad. */
3015 static void gen_srad(DisasContext *ctx)
3016 {
3017     gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env,
3018                     cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3019     if (unlikely(Rc(ctx->opcode) != 0)) {
3020         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3021     }
3022 }
3023 /* sradi & sradi. */
3024 static inline void gen_sradi(DisasContext *ctx, int n)
3025 {
3026     int sh = SH(ctx->opcode) + (n << 5);
3027     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3028     TCGv src = cpu_gpr[rS(ctx->opcode)];
3029     if (sh == 0) {
3030         tcg_gen_mov_tl(dst, src);
3031         tcg_gen_movi_tl(cpu_ca, 0);
3032         if (is_isa300(ctx)) {
3033             tcg_gen_movi_tl(cpu_ca32, 0);
3034         }
3035     } else {
3036         TCGv t0;
3037         tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
3038         t0 = tcg_temp_new();
3039         tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
3040         tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
3041         tcg_temp_free(t0);
3042         tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
3043         if (is_isa300(ctx)) {
3044             tcg_gen_mov_tl(cpu_ca32, cpu_ca);
3045         }
3046         tcg_gen_sari_tl(dst, src, sh);
3047     }
3048     if (unlikely(Rc(ctx->opcode) != 0)) {
3049         gen_set_Rc0(ctx, dst);
3050     }
3051 }
3052 
3053 static void gen_sradi0(DisasContext *ctx)
3054 {
3055     gen_sradi(ctx, 0);
3056 }
3057 
3058 static void gen_sradi1(DisasContext *ctx)
3059 {
3060     gen_sradi(ctx, 1);
3061 }
3062 
3063 /* extswsli & extswsli. */
3064 static inline void gen_extswsli(DisasContext *ctx, int n)
3065 {
3066     int sh = SH(ctx->opcode) + (n << 5);
3067     TCGv dst = cpu_gpr[rA(ctx->opcode)];
3068     TCGv src = cpu_gpr[rS(ctx->opcode)];
3069 
3070     tcg_gen_ext32s_tl(dst, src);
3071     tcg_gen_shli_tl(dst, dst, sh);
3072     if (unlikely(Rc(ctx->opcode) != 0)) {
3073         gen_set_Rc0(ctx, dst);
3074     }
3075 }
3076 
3077 static void gen_extswsli0(DisasContext *ctx)
3078 {
3079     gen_extswsli(ctx, 0);
3080 }
3081 
3082 static void gen_extswsli1(DisasContext *ctx)
3083 {
3084     gen_extswsli(ctx, 1);
3085 }
3086 
3087 /* srd & srd. */
3088 static void gen_srd(DisasContext *ctx)
3089 {
3090     TCGv t0, t1;
3091 
3092     t0 = tcg_temp_new();
3093     /* AND rS with a mask that is 0 when rB >= 0x40 */
3094     tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
3095     tcg_gen_sari_tl(t0, t0, 0x3f);
3096     tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
3097     t1 = tcg_temp_new();
3098     tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
3099     tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
3100     tcg_temp_free(t1);
3101     tcg_temp_free(t0);
3102     if (unlikely(Rc(ctx->opcode) != 0)) {
3103         gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
3104     }
3105 }
3106 #endif
3107 
3108 /***                           Addressing modes                            ***/
3109 /* Register indirect with immediate index : EA = (rA|0) + SIMM */
3110 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
3111                                       target_long maskl)
3112 {
3113     target_long simm = SIMM(ctx->opcode);
3114 
3115     simm &= ~maskl;
3116     if (rA(ctx->opcode) == 0) {
3117         if (NARROW_MODE(ctx)) {
3118             simm = (uint32_t)simm;
3119         }
3120         tcg_gen_movi_tl(EA, simm);
3121     } else if (likely(simm != 0)) {
3122         tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
3123         if (NARROW_MODE(ctx)) {
3124             tcg_gen_ext32u_tl(EA, EA);
3125         }
3126     } else {
3127         if (NARROW_MODE(ctx)) {
3128             tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3129         } else {
3130             tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3131         }
3132     }
3133 }
3134 
3135 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
3136 {
3137     if (rA(ctx->opcode) == 0) {
3138         if (NARROW_MODE(ctx)) {
3139             tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3140         } else {
3141             tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
3142         }
3143     } else {
3144         tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
3145         if (NARROW_MODE(ctx)) {
3146             tcg_gen_ext32u_tl(EA, EA);
3147         }
3148     }
3149 }
3150 
3151 static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
3152 {
3153     if (rA(ctx->opcode) == 0) {
3154         tcg_gen_movi_tl(EA, 0);
3155     } else if (NARROW_MODE(ctx)) {
3156         tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3157     } else {
3158         tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
3159     }
3160 }
3161 
3162 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
3163                                 target_long val)
3164 {
3165     tcg_gen_addi_tl(ret, arg1, val);
3166     if (NARROW_MODE(ctx)) {
3167         tcg_gen_ext32u_tl(ret, ret);
3168     }
3169 }
3170 
3171 static inline void gen_align_no_le(DisasContext *ctx)
3172 {
3173     gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
3174                       (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
3175 }
3176 
3177 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
3178 {
3179     TCGv ea = tcg_temp_new();
3180     if (ra) {
3181         tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
3182     } else {
3183         tcg_gen_mov_tl(ea, displ);
3184     }
3185     if (NARROW_MODE(ctx)) {
3186         tcg_gen_ext32u_tl(ea, ea);
3187     }
3188     return ea;
3189 }
3190 
3191 /***                             Integer load                              ***/
3192 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
3193 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
3194 
3195 #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
3196 static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
3197                                   TCGv val,                             \
3198                                   TCGv addr)                            \
3199 {                                                                       \
3200     tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
3201 }
3202 
3203 GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
3204 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
3205 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
3206 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
3207 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
3208 
3209 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
3210 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
3211 
3212 #define GEN_QEMU_LOAD_64(ldop, op)                                  \
3213 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
3214                                              TCGv_i64 val,          \
3215                                              TCGv addr)             \
3216 {                                                                   \
3217     tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
3218 }
3219 
3220 GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
3221 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
3222 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
3223 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
3224 GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
3225 
3226 #if defined(TARGET_PPC64)
3227 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
3228 #endif
3229 
3230 #define GEN_QEMU_STORE_TL(stop, op)                                     \
3231 static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
3232                                   TCGv val,                             \
3233                                   TCGv addr)                            \
3234 {                                                                       \
3235     tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
3236 }
3237 
3238 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
3239 GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
3240 #endif
3241 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
3242 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
3243 
3244 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
3245 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
3246 
3247 #define GEN_QEMU_STORE_64(stop, op)                               \
3248 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
3249                                               TCGv_i64 val,       \
3250                                               TCGv addr)          \
3251 {                                                                 \
3252     tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
3253 }
3254 
3255 GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
3256 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
3257 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
3258 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
3259 
3260 #if defined(TARGET_PPC64)
3261 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
3262 #endif
3263 
3264 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
3265 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3266 {                                                                             \
3267     TCGv EA;                                                                  \
3268     chk(ctx);                                                                 \
3269     gen_set_access_type(ctx, ACCESS_INT);                                     \
3270     EA = tcg_temp_new();                                                      \
3271     gen_addr_reg_index(ctx, EA);                                              \
3272     gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
3273     tcg_temp_free(EA);                                                        \
3274 }
3275 
3276 #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
3277     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3278 
3279 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
3280     GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3281 
3282 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
3283 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3284 {                                                                             \
3285     TCGv EA;                                                                  \
3286     CHK_SV(ctx);                                                              \
3287     gen_set_access_type(ctx, ACCESS_INT);                                     \
3288     EA = tcg_temp_new();                                                      \
3289     gen_addr_reg_index(ctx, EA);                                              \
3290     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
3291     tcg_temp_free(EA);                                                        \
3292 }
3293 
3294 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
3295 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
3296 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
3297 #if defined(TARGET_PPC64)
3298 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
3299 #endif
3300 
3301 #if defined(TARGET_PPC64)
3302 /* CI load/store variants */
3303 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
3304 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
3305 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
3306 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
3307 #endif
3308 
3309 /***                              Integer store                            ***/
3310 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
3311 static void glue(gen_, name##x)(DisasContext *ctx)                            \
3312 {                                                                             \
3313     TCGv EA;                                                                  \
3314     chk(ctx);                                                                 \
3315     gen_set_access_type(ctx, ACCESS_INT);                                     \
3316     EA = tcg_temp_new();                                                      \
3317     gen_addr_reg_index(ctx, EA);                                              \
3318     gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
3319     tcg_temp_free(EA);                                                        \
3320 }
3321 #define GEN_STX(name, stop, opc2, opc3, type)                                 \
3322     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
3323 
3324 #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
3325     GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
3326 
3327 #define GEN_STEPX(name, stop, opc2, opc3)                                     \
3328 static void glue(gen_, name##epx)(DisasContext *ctx)                          \
3329 {                                                                             \
3330     TCGv EA;                                                                  \
3331     CHK_SV(ctx);                                                              \
3332     gen_set_access_type(ctx, ACCESS_INT);                                     \
3333     EA = tcg_temp_new();                                                      \
3334     gen_addr_reg_index(ctx, EA);                                              \
3335     tcg_gen_qemu_st_tl(                                                       \
3336         cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
3337     tcg_temp_free(EA);                                                        \
3338 }
3339 
3340 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
3341 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
3342 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
3343 #if defined(TARGET_PPC64)
3344 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
3345 #endif
3346 
3347 #if defined(TARGET_PPC64)
3348 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
3349 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
3350 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
3351 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
3352 #endif
3353 /***                Integer load and store with byte reverse               ***/
3354 
3355 /* lhbrx */
3356 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
3357 
3358 /* lwbrx */
3359 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
3360 
3361 #if defined(TARGET_PPC64)
3362 /* ldbrx */
3363 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
3364 /* stdbrx */
3365 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
3366 #endif  /* TARGET_PPC64 */
3367 
3368 /* sthbrx */
3369 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
3370 /* stwbrx */
3371 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
3372 
3373 /***                    Integer load and store multiple                    ***/
3374 
3375 /* lmw */
3376 static void gen_lmw(DisasContext *ctx)
3377 {
3378     TCGv t0;
3379     TCGv_i32 t1;
3380 
3381     if (ctx->le_mode) {
3382         gen_align_no_le(ctx);
3383         return;
3384     }
3385     gen_set_access_type(ctx, ACCESS_INT);
3386     t0 = tcg_temp_new();
3387     t1 = tcg_const_i32(rD(ctx->opcode));
3388     gen_addr_imm_index(ctx, t0, 0);
3389     gen_helper_lmw(cpu_env, t0, t1);
3390     tcg_temp_free(t0);
3391     tcg_temp_free_i32(t1);
3392 }
3393 
3394 /* stmw */
3395 static void gen_stmw(DisasContext *ctx)
3396 {
3397     TCGv t0;
3398     TCGv_i32 t1;
3399 
3400     if (ctx->le_mode) {
3401         gen_align_no_le(ctx);
3402         return;
3403     }
3404     gen_set_access_type(ctx, ACCESS_INT);
3405     t0 = tcg_temp_new();
3406     t1 = tcg_const_i32(rS(ctx->opcode));
3407     gen_addr_imm_index(ctx, t0, 0);
3408     gen_helper_stmw(cpu_env, t0, t1);
3409     tcg_temp_free(t0);
3410     tcg_temp_free_i32(t1);
3411 }
3412 
3413 /***                    Integer load and store strings                     ***/
3414 
3415 /* lswi */
3416 /*
3417  * PowerPC32 specification says we must generate an exception if rA is
3418  * in the range of registers to be loaded.  In an other hand, IBM says
3419  * this is valid, but rA won't be loaded.  For now, I'll follow the
3420  * spec...
3421  */
3422 static void gen_lswi(DisasContext *ctx)
3423 {
3424     TCGv t0;
3425     TCGv_i32 t1, t2;
3426     int nb = NB(ctx->opcode);
3427     int start = rD(ctx->opcode);
3428     int ra = rA(ctx->opcode);
3429     int nr;
3430 
3431     if (ctx->le_mode) {
3432         gen_align_no_le(ctx);
3433         return;
3434     }
3435     if (nb == 0) {
3436         nb = 32;
3437     }
3438     nr = DIV_ROUND_UP(nb, 4);
3439     if (unlikely(lsw_reg_in_range(start, nr, ra))) {
3440         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
3441         return;
3442     }
3443     gen_set_access_type(ctx, ACCESS_INT);
3444     t0 = tcg_temp_new();
3445     gen_addr_register(ctx, t0);
3446     t1 = tcg_const_i32(nb);
3447     t2 = tcg_const_i32(start);
3448     gen_helper_lsw(cpu_env, t0, t1, t2);
3449     tcg_temp_free(t0);
3450     tcg_temp_free_i32(t1);
3451     tcg_temp_free_i32(t2);
3452 }
3453 
3454 /* lswx */
3455 static void gen_lswx(DisasContext *ctx)
3456 {
3457     TCGv t0;
3458     TCGv_i32 t1, t2, t3;
3459 
3460     if (ctx->le_mode) {
3461         gen_align_no_le(ctx);
3462         return;
3463     }
3464     gen_set_access_type(ctx, ACCESS_INT);
3465     t0 = tcg_temp_new();
3466     gen_addr_reg_index(ctx, t0);
3467     t1 = tcg_const_i32(rD(ctx->opcode));
3468     t2 = tcg_const_i32(rA(ctx->opcode));
3469     t3 = tcg_const_i32(rB(ctx->opcode));
3470     gen_helper_lswx(cpu_env, t0, t1, t2, t3);
3471     tcg_temp_free(t0);
3472     tcg_temp_free_i32(t1);
3473     tcg_temp_free_i32(t2);
3474     tcg_temp_free_i32(t3);
3475 }
3476 
3477 /* stswi */
3478 static void gen_stswi(DisasContext *ctx)
3479 {
3480     TCGv t0;
3481     TCGv_i32 t1, t2;
3482     int nb = NB(ctx->opcode);
3483 
3484     if (ctx->le_mode) {
3485         gen_align_no_le(ctx);
3486         return;
3487     }
3488     gen_set_access_type(ctx, ACCESS_INT);
3489     t0 = tcg_temp_new();
3490     gen_addr_register(ctx, t0);
3491     if (nb == 0) {
3492         nb = 32;
3493     }
3494     t1 = tcg_const_i32(nb);
3495     t2 = tcg_const_i32(rS(ctx->opcode));
3496     gen_helper_stsw(cpu_env, t0, t1, t2);
3497     tcg_temp_free(t0);
3498     tcg_temp_free_i32(t1);
3499     tcg_temp_free_i32(t2);
3500 }
3501 
3502 /* stswx */
3503 static void gen_stswx(DisasContext *ctx)
3504 {
3505     TCGv t0;
3506     TCGv_i32 t1, t2;
3507 
3508     if (ctx->le_mode) {
3509         gen_align_no_le(ctx);
3510         return;
3511     }
3512     gen_set_access_type(ctx, ACCESS_INT);
3513     t0 = tcg_temp_new();
3514     gen_addr_reg_index(ctx, t0);
3515     t1 = tcg_temp_new_i32();
3516     tcg_gen_trunc_tl_i32(t1, cpu_xer);
3517     tcg_gen_andi_i32(t1, t1, 0x7F);
3518     t2 = tcg_const_i32(rS(ctx->opcode));
3519     gen_helper_stsw(cpu_env, t0, t1, t2);
3520     tcg_temp_free(t0);
3521     tcg_temp_free_i32(t1);
3522     tcg_temp_free_i32(t2);
3523 }
3524 
3525 /***                        Memory synchronisation                         ***/
3526 /* eieio */
3527 static void gen_eieio(DisasContext *ctx)
3528 {
3529     TCGBar bar = TCG_MO_ALL;
3530 
3531     /*
3532      * eieio has complex semanitcs. It provides memory ordering between
3533      * operations in the set:
3534      * - loads from CI memory.
3535      * - stores to CI memory.
3536      * - stores to WT memory.
3537      *
3538      * It separately also orders memory for operations in the set:
3539      * - stores to cacheble memory.
3540      *
3541      * It also serializes instructions:
3542      * - dcbt and dcbst.
3543      *
3544      * It separately serializes:
3545      * - tlbie and tlbsync.
3546      *
3547      * And separately serializes:
3548      * - slbieg, slbiag, and slbsync.
3549      *
3550      * The end result is that CI memory ordering requires TCG_MO_ALL
3551      * and it is not possible to special-case more relaxed ordering for
3552      * cacheable accesses. TCG_BAR_SC is required to provide this
3553      * serialization.
3554      */
3555 
3556     /*
3557      * POWER9 has a eieio instruction variant using bit 6 as a hint to
3558      * tell the CPU it is a store-forwarding barrier.
3559      */
3560     if (ctx->opcode & 0x2000000) {
3561         /*
3562          * ISA says that "Reserved fields in instructions are ignored
3563          * by the processor". So ignore the bit 6 on non-POWER9 CPU but
3564          * as this is not an instruction software should be using,
3565          * complain to the user.
3566          */
3567         if (!(ctx->insns_flags2 & PPC2_ISA300)) {
3568             qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @"
3569                           TARGET_FMT_lx "\n", ctx->cia);
3570         } else {
3571             bar = TCG_MO_ST_LD;
3572         }
3573     }
3574 
3575     tcg_gen_mb(bar | TCG_BAR_SC);
3576 }
3577 
3578 #if !defined(CONFIG_USER_ONLY)
3579 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
3580 {
3581     TCGv_i32 t;
3582     TCGLabel *l;
3583 
3584     if (!ctx->lazy_tlb_flush) {
3585         return;
3586     }
3587     l = gen_new_label();
3588     t = tcg_temp_new_i32();
3589     tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush));
3590     tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
3591     if (global) {
3592         gen_helper_check_tlb_flush_global(cpu_env);
3593     } else {
3594         gen_helper_check_tlb_flush_local(cpu_env);
3595     }
3596     gen_set_label(l);
3597     tcg_temp_free_i32(t);
3598 }
3599 #else
3600 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
3601 #endif
3602 
3603 /* isync */
3604 static void gen_isync(DisasContext *ctx)
3605 {
3606     /*
3607      * We need to check for a pending TLB flush. This can only happen in
3608      * kernel mode however so check MSR_PR
3609      */
3610     if (!ctx->pr) {
3611         gen_check_tlb_flush(ctx, false);
3612     }
3613     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
3614     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
3615 }
3616 
3617 #define MEMOP_GET_SIZE(x)  (1 << ((x) & MO_SIZE))
3618 
3619 static void gen_load_locked(DisasContext *ctx, MemOp memop)
3620 {
3621     TCGv gpr = cpu_gpr[rD(ctx->opcode)];
3622     TCGv t0 = tcg_temp_new();
3623 
3624     gen_set_access_type(ctx, ACCESS_RES);
3625     gen_addr_reg_index(ctx, t0);
3626     tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN);
3627     tcg_gen_mov_tl(cpu_reserve, t0);
3628     tcg_gen_mov_tl(cpu_reserve_val, gpr);
3629     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ);
3630     tcg_temp_free(t0);
3631 }
3632 
3633 #define LARX(name, memop)                  \
3634 static void gen_##name(DisasContext *ctx)  \
3635 {                                          \
3636     gen_load_locked(ctx, memop);           \
3637 }
3638 
3639 /* lwarx */
3640 LARX(lbarx, DEF_MEMOP(MO_UB))
3641 LARX(lharx, DEF_MEMOP(MO_UW))
3642 LARX(lwarx, DEF_MEMOP(MO_UL))
3643 
3644 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
3645                                       TCGv EA, TCGCond cond, int addend)
3646 {
3647     TCGv t = tcg_temp_new();
3648     TCGv t2 = tcg_temp_new();
3649     TCGv u = tcg_temp_new();
3650 
3651     tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3652     tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop));
3653     tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
3654     tcg_gen_addi_tl(u, t, addend);
3655 
3656     /* E.g. for fetch and increment bounded... */
3657     /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
3658     tcg_gen_movcond_tl(cond, u, t, t2, u, t);
3659     tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
3660 
3661     /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
3662     tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1));
3663     tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u);
3664 
3665     tcg_temp_free(t);
3666     tcg_temp_free(t2);
3667     tcg_temp_free(u);
3668 }
3669 
3670 static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
3671 {
3672     uint32_t gpr_FC = FC(ctx->opcode);
3673     TCGv EA = tcg_temp_new();
3674     int rt = rD(ctx->opcode);
3675     bool need_serial;
3676     TCGv src, dst;
3677 
3678     gen_addr_register(ctx, EA);
3679     dst = cpu_gpr[rt];
3680     src = cpu_gpr[(rt + 1) & 31];
3681 
3682     need_serial = false;
3683     memop |= MO_ALIGN;
3684     switch (gpr_FC) {
3685     case 0: /* Fetch and add */
3686         tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
3687         break;
3688     case 1: /* Fetch and xor */
3689         tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
3690         break;
3691     case 2: /* Fetch and or */
3692         tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3693         break;
3694     case 3: /* Fetch and 'and' */
3695         tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3696         break;
3697     case 4:  /* Fetch and max unsigned */
3698         tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3699         break;
3700     case 5:  /* Fetch and max signed */
3701         tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3702         break;
3703     case 6:  /* Fetch and min unsigned */
3704         tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3705         break;
3706     case 7:  /* Fetch and min signed */
3707         tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3708         break;
3709     case 8: /* Swap */
3710         tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3711         break;
3712 
3713     case 16: /* Compare and swap not equal */
3714         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3715             need_serial = true;
3716         } else {
3717             TCGv t0 = tcg_temp_new();
3718             TCGv t1 = tcg_temp_new();
3719 
3720             tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3721             if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3722                 tcg_gen_mov_tl(t1, src);
3723             } else {
3724                 tcg_gen_ext32u_tl(t1, src);
3725             }
3726             tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3727                                cpu_gpr[(rt + 2) & 31], t0);
3728             tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3729             tcg_gen_mov_tl(dst, t0);
3730 
3731             tcg_temp_free(t0);
3732             tcg_temp_free(t1);
3733         }
3734         break;
3735 
3736     case 24: /* Fetch and increment bounded */
3737         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3738             need_serial = true;
3739         } else {
3740             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3741         }
3742         break;
3743     case 25: /* Fetch and increment equal */
3744         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3745             need_serial = true;
3746         } else {
3747             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3748         }
3749         break;
3750     case 28: /* Fetch and decrement bounded */
3751         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3752             need_serial = true;
3753         } else {
3754             gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3755         }
3756         break;
3757 
3758     default:
3759         /* invoke data storage error handler */
3760         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3761     }
3762     tcg_temp_free(EA);
3763 
3764     if (need_serial) {
3765         /* Restart with exclusive lock.  */
3766         gen_helper_exit_atomic(cpu_env);
3767         ctx->base.is_jmp = DISAS_NORETURN;
3768     }
3769 }
3770 
3771 static void gen_lwat(DisasContext *ctx)
3772 {
3773     gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3774 }
3775 
3776 #ifdef TARGET_PPC64
3777 static void gen_ldat(DisasContext *ctx)
3778 {
3779     gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3780 }
3781 #endif
3782 
3783 static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3784 {
3785     uint32_t gpr_FC = FC(ctx->opcode);
3786     TCGv EA = tcg_temp_new();
3787     TCGv src, discard;
3788 
3789     gen_addr_register(ctx, EA);
3790     src = cpu_gpr[rD(ctx->opcode)];
3791     discard = tcg_temp_new();
3792 
3793     memop |= MO_ALIGN;
3794     switch (gpr_FC) {
3795     case 0: /* add and Store */
3796         tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3797         break;
3798     case 1: /* xor and Store */
3799         tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3800         break;
3801     case 2: /* Or and Store */
3802         tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3803         break;
3804     case 3: /* 'and' and Store */
3805         tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3806         break;
3807     case 4:  /* Store max unsigned */
3808         tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3809         break;
3810     case 5:  /* Store max signed */
3811         tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3812         break;
3813     case 6:  /* Store min unsigned */
3814         tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3815         break;
3816     case 7:  /* Store min signed */
3817         tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3818         break;
3819     case 24: /* Store twin  */
3820         if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3821             /* Restart with exclusive lock.  */
3822             gen_helper_exit_atomic(cpu_env);
3823             ctx->base.is_jmp = DISAS_NORETURN;
3824         } else {
3825             TCGv t = tcg_temp_new();
3826             TCGv t2 = tcg_temp_new();
3827             TCGv s = tcg_temp_new();
3828             TCGv s2 = tcg_temp_new();
3829             TCGv ea_plus_s = tcg_temp_new();
3830 
3831             tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3832             tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop));
3833             tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3834             tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3835             tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3836             tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3837             tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3838 
3839             tcg_temp_free(ea_plus_s);
3840             tcg_temp_free(s2);
3841             tcg_temp_free(s);
3842             tcg_temp_free(t2);
3843             tcg_temp_free(t);
3844         }
3845         break;
3846     default:
3847         /* invoke data storage error handler */
3848         gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3849     }
3850     tcg_temp_free(discard);
3851     tcg_temp_free(EA);
3852 }
3853 
3854 static void gen_stwat(DisasContext *ctx)
3855 {
3856     gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3857 }
3858 
3859 #ifdef TARGET_PPC64
3860 static void gen_stdat(DisasContext *ctx)
3861 {
3862     gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3863 }
3864 #endif
3865 
3866 static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3867 {
3868     TCGLabel *l1 = gen_new_label();
3869     TCGLabel *l2 = gen_new_label();
3870     TCGv t0 = tcg_temp_new();
3871     int reg = rS(ctx->opcode);
3872 
3873     gen_set_access_type(ctx, ACCESS_RES);
3874     gen_addr_reg_index(ctx, t0);
3875     tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1);
3876     tcg_temp_free(t0);
3877 
3878     t0 = tcg_temp_new();
3879     tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3880                               cpu_gpr[reg], ctx->mem_idx,
3881                               DEF_MEMOP(memop) | MO_ALIGN);
3882     tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3883     tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3884     tcg_gen_or_tl(t0, t0, cpu_so);
3885     tcg_gen_trunc_tl_i32(cpu_crf[0], t0);
3886     tcg_temp_free(t0);
3887     tcg_gen_br(l2);
3888 
3889     gen_set_label(l1);
3890 
3891     /*
3892      * Address mismatch implies failure.  But we still need to provide
3893      * the memory barrier semantics of the instruction.
3894      */
3895     tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL);
3896     tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
3897 
3898     gen_set_label(l2);
3899     tcg_gen_movi_tl(cpu_reserve, -1);
3900 }
3901 
3902 #define STCX(name, memop)                  \
3903 static void gen_##name(DisasContext *ctx)  \
3904 {                                          \
3905     gen_conditional_store(ctx, memop);     \
3906 }
3907 
3908 STCX(stbcx_, DEF_MEMOP(MO_UB))
3909 STCX(sthcx_, DEF_MEMOP(MO_UW))
3910 STCX(stwcx_, DEF_MEMOP(MO_UL))
3911 
3912 #if defined(TARGET_PPC64)
3913 /* ldarx */
3914 LARX(ldarx, DEF_MEMOP(MO_UQ))
3915 /* stdcx. */
3916 STCX(stdcx_, DEF_MEMOP(MO_UQ))
3917 
3918 /* lqarx */
3919 static void gen_lqarx(DisasContext *ctx)
3920 {
3921     int rd = rD(ctx->opcode);
3922     TCGv EA, hi, lo;
3923 
3924     if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3925                  (rd == rB(ctx->opcode)))) {
3926         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3927         return;
3928     }
3929 
3930     gen_set_access_type(ctx, ACCESS_RES);
3931     EA = tcg_temp_new();
3932     gen_addr_reg_index(ctx, EA);
3933 
3934     /* Note that the low part is always in RD+1, even in LE mode.  */
3935     lo = cpu_gpr[rd + 1];
3936     hi = cpu_gpr[rd];
3937 
3938     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3939         if (HAVE_ATOMIC128) {
3940             TCGv_i32 oi = tcg_temp_new_i32();
3941             if (ctx->le_mode) {
3942                 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN,
3943                                                     ctx->mem_idx));
3944                 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi);
3945             } else {
3946                 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN,
3947                                                     ctx->mem_idx));
3948                 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi);
3949             }
3950             tcg_temp_free_i32(oi);
3951             tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh));
3952         } else {
3953             /* Restart with exclusive lock.  */
3954             gen_helper_exit_atomic(cpu_env);
3955             ctx->base.is_jmp = DISAS_NORETURN;
3956             tcg_temp_free(EA);
3957             return;
3958         }
3959     } else if (ctx->le_mode) {
3960         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEUQ | MO_ALIGN_16);
3961         tcg_gen_mov_tl(cpu_reserve, EA);
3962         gen_addr_add(ctx, EA, EA, 8);
3963         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEUQ);
3964     } else {
3965         tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEUQ | MO_ALIGN_16);
3966         tcg_gen_mov_tl(cpu_reserve, EA);
3967         gen_addr_add(ctx, EA, EA, 8);
3968         tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEUQ);
3969     }
3970     tcg_temp_free(EA);
3971 
3972     tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val));
3973     tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2));
3974 }
3975 
3976 /* stqcx. */
3977 static void gen_stqcx_(DisasContext *ctx)
3978 {
3979     int rs = rS(ctx->opcode);
3980     TCGv EA, hi, lo;
3981 
3982     if (unlikely(rs & 1)) {
3983         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3984         return;
3985     }
3986 
3987     gen_set_access_type(ctx, ACCESS_RES);
3988     EA = tcg_temp_new();
3989     gen_addr_reg_index(ctx, EA);
3990 
3991     /* Note that the low part is always in RS+1, even in LE mode.  */
3992     lo = cpu_gpr[rs + 1];
3993     hi = cpu_gpr[rs];
3994 
3995     if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3996         if (HAVE_CMPXCHG128) {
3997             TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN);
3998             if (ctx->le_mode) {
3999                 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env,
4000                                              EA, lo, hi, oi);
4001             } else {
4002                 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env,
4003                                              EA, lo, hi, oi);
4004             }
4005             tcg_temp_free_i32(oi);
4006         } else {
4007             /* Restart with exclusive lock.  */
4008             gen_helper_exit_atomic(cpu_env);
4009             ctx->base.is_jmp = DISAS_NORETURN;
4010         }
4011         tcg_temp_free(EA);
4012     } else {
4013         TCGLabel *lab_fail = gen_new_label();
4014         TCGLabel *lab_over = gen_new_label();
4015         TCGv_i64 t0 = tcg_temp_new_i64();
4016         TCGv_i64 t1 = tcg_temp_new_i64();
4017 
4018         tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail);
4019         tcg_temp_free(EA);
4020 
4021         gen_qemu_ld64_i64(ctx, t0, cpu_reserve);
4022         tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4023                                      ? offsetof(CPUPPCState, reserve_val2)
4024                                      : offsetof(CPUPPCState, reserve_val)));
4025         tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4026 
4027         tcg_gen_addi_i64(t0, cpu_reserve, 8);
4028         gen_qemu_ld64_i64(ctx, t0, t0);
4029         tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode
4030                                      ? offsetof(CPUPPCState, reserve_val)
4031                                      : offsetof(CPUPPCState, reserve_val2)));
4032         tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail);
4033 
4034         /* Success */
4035         gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve);
4036         tcg_gen_addi_i64(t0, cpu_reserve, 8);
4037         gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0);
4038 
4039         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4040         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ);
4041         tcg_gen_br(lab_over);
4042 
4043         gen_set_label(lab_fail);
4044         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
4045 
4046         gen_set_label(lab_over);
4047         tcg_gen_movi_tl(cpu_reserve, -1);
4048         tcg_temp_free_i64(t0);
4049         tcg_temp_free_i64(t1);
4050     }
4051 }
4052 #endif /* defined(TARGET_PPC64) */
4053 
4054 /* sync */
4055 static void gen_sync(DisasContext *ctx)
4056 {
4057     TCGBar bar = TCG_MO_ALL;
4058     uint32_t l = (ctx->opcode >> 21) & 3;
4059 
4060     if ((l == 1) && (ctx->insns_flags2 & PPC2_MEM_LWSYNC)) {
4061         bar = TCG_MO_LD_LD | TCG_MO_LD_ST | TCG_MO_ST_ST;
4062     }
4063 
4064     /*
4065      * We may need to check for a pending TLB flush.
4066      *
4067      * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32.
4068      *
4069      * Additionally, this can only happen in kernel mode however so
4070      * check MSR_PR as well.
4071      */
4072     if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) {
4073         gen_check_tlb_flush(ctx, true);
4074     }
4075 
4076     tcg_gen_mb(bar | TCG_BAR_SC);
4077 }
4078 
4079 /* wait */
4080 static void gen_wait(DisasContext *ctx)
4081 {
4082     uint32_t wc;
4083 
4084     if (ctx->insns_flags & PPC_WAIT) {
4085         /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
4086 
4087         if (ctx->insns_flags2 & PPC2_PM_ISA206) {
4088             /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
4089             wc = WC(ctx->opcode);
4090         } else {
4091             wc = 0;
4092         }
4093 
4094     } else if (ctx->insns_flags2 & PPC2_ISA300) {
4095         /* v3.0 defines a new 'wait' encoding. */
4096         wc = WC(ctx->opcode);
4097         if (ctx->insns_flags2 & PPC2_ISA310) {
4098             uint32_t pl = PL(ctx->opcode);
4099 
4100             /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
4101             if (wc == 3) {
4102                 gen_invalid(ctx);
4103                 return;
4104             }
4105 
4106             /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
4107             if (pl > 0 && wc != 2) {
4108                 gen_invalid(ctx);
4109                 return;
4110             }
4111 
4112         } else { /* ISA300 */
4113             /* WC 1-3 are reserved */
4114             if (wc > 0) {
4115                 gen_invalid(ctx);
4116                 return;
4117             }
4118         }
4119 
4120     } else {
4121         warn_report("wait instruction decoded with wrong ISA flags.");
4122         gen_invalid(ctx);
4123         return;
4124     }
4125 
4126     /*
4127      * wait without WC field or with WC=0 waits for an exception / interrupt
4128      * to occur.
4129      */
4130     if (wc == 0) {
4131         TCGv_i32 t0 = tcg_const_i32(1);
4132         tcg_gen_st_i32(t0, cpu_env,
4133                        -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
4134         tcg_temp_free_i32(t0);
4135         /* Stop translation, as the CPU is supposed to sleep from now */
4136         gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4137     }
4138 
4139     /*
4140      * Other wait types must not just wait until an exception occurs because
4141      * ignoring their other wake-up conditions could cause a hang.
4142      *
4143      * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
4144      * no-ops.
4145      *
4146      * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
4147      *
4148      * wc=2 waits for an implementation-specific condition, such could be
4149      * always true, so it can be implemented as a no-op.
4150      *
4151      * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
4152      *
4153      * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
4154      * Reservation-loss may have implementation-specific conditions, so it
4155      * can be implemented as a no-op.
4156      *
4157      * wc=2 waits for an exception or an amount of time to pass. This
4158      * amount is implementation-specific so it can be implemented as a
4159      * no-op.
4160      *
4161      * ISA v3.1 allows for execution to resume "in the rare case of
4162      * an implementation-dependent event", so in any case software must
4163      * not depend on the architected resumption condition to become
4164      * true, so no-op implementations should be architecturally correct
4165      * (if suboptimal).
4166      */
4167 }
4168 
4169 #if defined(TARGET_PPC64)
4170 static void gen_doze(DisasContext *ctx)
4171 {
4172 #if defined(CONFIG_USER_ONLY)
4173     GEN_PRIV(ctx);
4174 #else
4175     TCGv_i32 t;
4176 
4177     CHK_HV(ctx);
4178     t = tcg_const_i32(PPC_PM_DOZE);
4179     gen_helper_pminsn(cpu_env, t);
4180     tcg_temp_free_i32(t);
4181     /* Stop translation, as the CPU is supposed to sleep from now */
4182     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4183 #endif /* defined(CONFIG_USER_ONLY) */
4184 }
4185 
4186 static void gen_nap(DisasContext *ctx)
4187 {
4188 #if defined(CONFIG_USER_ONLY)
4189     GEN_PRIV(ctx);
4190 #else
4191     TCGv_i32 t;
4192 
4193     CHK_HV(ctx);
4194     t = tcg_const_i32(PPC_PM_NAP);
4195     gen_helper_pminsn(cpu_env, t);
4196     tcg_temp_free_i32(t);
4197     /* Stop translation, as the CPU is supposed to sleep from now */
4198     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4199 #endif /* defined(CONFIG_USER_ONLY) */
4200 }
4201 
4202 static void gen_stop(DisasContext *ctx)
4203 {
4204 #if defined(CONFIG_USER_ONLY)
4205     GEN_PRIV(ctx);
4206 #else
4207     TCGv_i32 t;
4208 
4209     CHK_HV(ctx);
4210     t = tcg_const_i32(PPC_PM_STOP);
4211     gen_helper_pminsn(cpu_env, t);
4212     tcg_temp_free_i32(t);
4213     /* Stop translation, as the CPU is supposed to sleep from now */
4214     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4215 #endif /* defined(CONFIG_USER_ONLY) */
4216 }
4217 
4218 static void gen_sleep(DisasContext *ctx)
4219 {
4220 #if defined(CONFIG_USER_ONLY)
4221     GEN_PRIV(ctx);
4222 #else
4223     TCGv_i32 t;
4224 
4225     CHK_HV(ctx);
4226     t = tcg_const_i32(PPC_PM_SLEEP);
4227     gen_helper_pminsn(cpu_env, t);
4228     tcg_temp_free_i32(t);
4229     /* Stop translation, as the CPU is supposed to sleep from now */
4230     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4231 #endif /* defined(CONFIG_USER_ONLY) */
4232 }
4233 
4234 static void gen_rvwinkle(DisasContext *ctx)
4235 {
4236 #if defined(CONFIG_USER_ONLY)
4237     GEN_PRIV(ctx);
4238 #else
4239     TCGv_i32 t;
4240 
4241     CHK_HV(ctx);
4242     t = tcg_const_i32(PPC_PM_RVWINKLE);
4243     gen_helper_pminsn(cpu_env, t);
4244     tcg_temp_free_i32(t);
4245     /* Stop translation, as the CPU is supposed to sleep from now */
4246     gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
4247 #endif /* defined(CONFIG_USER_ONLY) */
4248 }
4249 #endif /* #if defined(TARGET_PPC64) */
4250 
4251 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip)
4252 {
4253 #if defined(TARGET_PPC64)
4254     if (ctx->has_cfar) {
4255         tcg_gen_movi_tl(cpu_cfar, nip);
4256     }
4257 #endif
4258 }
4259 
4260 #if defined(TARGET_PPC64)
4261 static void pmu_count_insns(DisasContext *ctx)
4262 {
4263     /*
4264      * Do not bother calling the helper if the PMU isn't counting
4265      * instructions.
4266      */
4267     if (!ctx->pmu_insn_cnt) {
4268         return;
4269     }
4270 
4271  #if !defined(CONFIG_USER_ONLY)
4272     /*
4273      * The PMU insns_inc() helper stops the internal PMU timer if a
4274      * counter overflows happens. In that case, if the guest is
4275      * running with icount and we do not handle it beforehand,
4276      * the helper can trigger a 'bad icount read'.
4277      */
4278     gen_icount_io_start(ctx);
4279 
4280     gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns));
4281 #else
4282     /*
4283      * User mode can read (but not write) PMC5 and start/stop
4284      * the PMU via MMCR0_FC. In this case just increment
4285      * PMC5 with base.num_insns.
4286      */
4287     TCGv t0 = tcg_temp_new();
4288 
4289     gen_load_spr(t0, SPR_POWER_PMC5);
4290     tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
4291     gen_store_spr(SPR_POWER_PMC5, t0);
4292 
4293     tcg_temp_free(t0);
4294 #endif /* #if !defined(CONFIG_USER_ONLY) */
4295 }
4296 #else
4297 static void pmu_count_insns(DisasContext *ctx)
4298 {
4299     return;
4300 }
4301 #endif /* #if defined(TARGET_PPC64) */
4302 
4303 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
4304 {
4305     return translator_use_goto_tb(&ctx->base, dest);
4306 }
4307 
4308 static void gen_lookup_and_goto_ptr(DisasContext *ctx)
4309 {
4310     if (unlikely(ctx->singlestep_enabled)) {
4311         gen_debug_exception(ctx);
4312     } else {
4313         /*
4314          * tcg_gen_lookup_and_goto_ptr will exit the TB if
4315          * CF_NO_GOTO_PTR is set. Count insns now.
4316          */
4317         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
4318             pmu_count_insns(ctx);
4319         }
4320 
4321         tcg_gen_lookup_and_goto_ptr();
4322     }
4323 }
4324 
4325 /***                                Branch                                 ***/
4326 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
4327 {
4328     if (NARROW_MODE(ctx)) {
4329         dest = (uint32_t) dest;
4330     }
4331     if (use_goto_tb(ctx, dest)) {
4332         pmu_count_insns(ctx);
4333         tcg_gen_goto_tb(n);
4334         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4335         tcg_gen_exit_tb(ctx->base.tb, n);
4336     } else {
4337         tcg_gen_movi_tl(cpu_nip, dest & ~3);
4338         gen_lookup_and_goto_ptr(ctx);
4339     }
4340 }
4341 
4342 static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
4343 {
4344     if (NARROW_MODE(ctx)) {
4345         nip = (uint32_t)nip;
4346     }
4347     tcg_gen_movi_tl(cpu_lr, nip);
4348 }
4349 
4350 /* b ba bl bla */
4351 static void gen_b(DisasContext *ctx)
4352 {
4353     target_ulong li, target;
4354 
4355     /* sign extend LI */
4356     li = LI(ctx->opcode);
4357     li = (li ^ 0x02000000) - 0x02000000;
4358     if (likely(AA(ctx->opcode) == 0)) {
4359         target = ctx->cia + li;
4360     } else {
4361         target = li;
4362     }
4363     if (LK(ctx->opcode)) {
4364         gen_setlr(ctx, ctx->base.pc_next);
4365     }
4366     gen_update_cfar(ctx, ctx->cia);
4367     gen_goto_tb(ctx, 0, target);
4368     ctx->base.is_jmp = DISAS_NORETURN;
4369 }
4370 
4371 #define BCOND_IM  0
4372 #define BCOND_LR  1
4373 #define BCOND_CTR 2
4374 #define BCOND_TAR 3
4375 
4376 static void gen_bcond(DisasContext *ctx, int type)
4377 {
4378     uint32_t bo = BO(ctx->opcode);
4379     TCGLabel *l1;
4380     TCGv target;
4381 
4382     if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
4383         target = tcg_temp_local_new();
4384         if (type == BCOND_CTR) {
4385             tcg_gen_mov_tl(target, cpu_ctr);
4386         } else if (type == BCOND_TAR) {
4387             gen_load_spr(target, SPR_TAR);
4388         } else {
4389             tcg_gen_mov_tl(target, cpu_lr);
4390         }
4391     } else {
4392         target = NULL;
4393     }
4394     if (LK(ctx->opcode)) {
4395         gen_setlr(ctx, ctx->base.pc_next);
4396     }
4397     l1 = gen_new_label();
4398     if ((bo & 0x4) == 0) {
4399         /* Decrement and test CTR */
4400         TCGv temp = tcg_temp_new();
4401 
4402         if (type == BCOND_CTR) {
4403             /*
4404              * All ISAs up to v3 describe this form of bcctr as invalid but
4405              * some processors, ie. 64-bit server processors compliant with
4406              * arch 2.x, do implement a "test and decrement" logic instead,
4407              * as described in their respective UMs. This logic involves CTR
4408              * to act as both the branch target and a counter, which makes
4409              * it basically useless and thus never used in real code.
4410              *
4411              * This form was hence chosen to trigger extra micro-architectural
4412              * side-effect on real HW needed for the Spectre v2 workaround.
4413              * It is up to guests that implement such workaround, ie. linux, to
4414              * use this form in a way it just triggers the side-effect without
4415              * doing anything else harmful.
4416              */
4417             if (unlikely(!is_book3s_arch2x(ctx))) {
4418                 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4419                 tcg_temp_free(temp);
4420                 tcg_temp_free(target);
4421                 return;
4422             }
4423 
4424             if (NARROW_MODE(ctx)) {
4425                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4426             } else {
4427                 tcg_gen_mov_tl(temp, cpu_ctr);
4428             }
4429             if (bo & 0x2) {
4430                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4431             } else {
4432                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4433             }
4434             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4435         } else {
4436             tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
4437             if (NARROW_MODE(ctx)) {
4438                 tcg_gen_ext32u_tl(temp, cpu_ctr);
4439             } else {
4440                 tcg_gen_mov_tl(temp, cpu_ctr);
4441             }
4442             if (bo & 0x2) {
4443                 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
4444             } else {
4445                 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
4446             }
4447         }
4448         tcg_temp_free(temp);
4449     }
4450     if ((bo & 0x10) == 0) {
4451         /* Test CR */
4452         uint32_t bi = BI(ctx->opcode);
4453         uint32_t mask = 0x08 >> (bi & 0x03);
4454         TCGv_i32 temp = tcg_temp_new_i32();
4455 
4456         if (bo & 0x8) {
4457             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4458             tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
4459         } else {
4460             tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
4461             tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
4462         }
4463         tcg_temp_free_i32(temp);
4464     }
4465     gen_update_cfar(ctx, ctx->cia);
4466     if (type == BCOND_IM) {
4467         target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
4468         if (likely(AA(ctx->opcode) == 0)) {
4469             gen_goto_tb(ctx, 0, ctx->cia + li);
4470         } else {
4471             gen_goto_tb(ctx, 0, li);
4472         }
4473     } else {
4474         if (NARROW_MODE(ctx)) {
4475             tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
4476         } else {
4477             tcg_gen_andi_tl(cpu_nip, target, ~3);
4478         }
4479         gen_lookup_and_goto_ptr(ctx);
4480         tcg_temp_free(target);
4481     }
4482     if ((bo & 0x14) != 0x14) {
4483         /* fallthrough case */
4484         gen_set_label(l1);
4485         gen_goto_tb(ctx, 1, ctx->base.pc_next);
4486     }
4487     ctx->base.is_jmp = DISAS_NORETURN;
4488 }
4489 
4490 static void gen_bc(DisasContext *ctx)
4491 {
4492     gen_bcond(ctx, BCOND_IM);
4493 }
4494 
4495 static void gen_bcctr(DisasContext *ctx)
4496 {
4497     gen_bcond(ctx, BCOND_CTR);
4498 }
4499 
4500 static void gen_bclr(DisasContext *ctx)
4501 {
4502     gen_bcond(ctx, BCOND_LR);
4503 }
4504 
4505 static void gen_bctar(DisasContext *ctx)
4506 {
4507     gen_bcond(ctx, BCOND_TAR);
4508 }
4509 
4510 /***                      Condition register logical                       ***/
4511 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
4512 static void glue(gen_, name)(DisasContext *ctx)                               \
4513 {                                                                             \
4514     uint8_t bitmask;                                                          \
4515     int sh;                                                                   \
4516     TCGv_i32 t0, t1;                                                          \
4517     sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
4518     t0 = tcg_temp_new_i32();                                                  \
4519     if (sh > 0)                                                               \
4520         tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
4521     else if (sh < 0)                                                          \
4522         tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
4523     else                                                                      \
4524         tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
4525     t1 = tcg_temp_new_i32();                                                  \
4526     sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
4527     if (sh > 0)                                                               \
4528         tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
4529     else if (sh < 0)                                                          \
4530         tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
4531     else                                                                      \
4532         tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
4533     tcg_op(t0, t0, t1);                                                       \
4534     bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
4535     tcg_gen_andi_i32(t0, t0, bitmask);                                        \
4536     tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
4537     tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
4538     tcg_temp_free_i32(t0);                                                    \
4539     tcg_temp_free_i32(t1);                                                    \
4540 }
4541 
4542 /* crand */
4543 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
4544 /* crandc */
4545 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
4546 /* creqv */
4547 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
4548 /* crnand */
4549 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
4550 /* crnor */
4551 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
4552 /* cror */
4553 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
4554 /* crorc */
4555 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
4556 /* crxor */
4557 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
4558 
4559 /* mcrf */
4560 static void gen_mcrf(DisasContext *ctx)
4561 {
4562     tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
4563 }
4564 
4565 /***                           System linkage                              ***/
4566 
4567 /* rfi (supervisor only) */
4568 static void gen_rfi(DisasContext *ctx)
4569 {
4570 #if defined(CONFIG_USER_ONLY)
4571     GEN_PRIV(ctx);
4572 #else
4573     /*
4574      * This instruction doesn't exist anymore on 64-bit server
4575      * processors compliant with arch 2.x
4576      */
4577     if (is_book3s_arch2x(ctx)) {
4578         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4579         return;
4580     }
4581     /* Restore CPU state */
4582     CHK_SV(ctx);
4583     gen_icount_io_start(ctx);
4584     gen_update_cfar(ctx, ctx->cia);
4585     gen_helper_rfi(cpu_env);
4586     ctx->base.is_jmp = DISAS_EXIT;
4587 #endif
4588 }
4589 
4590 #if defined(TARGET_PPC64)
4591 static void gen_rfid(DisasContext *ctx)
4592 {
4593 #if defined(CONFIG_USER_ONLY)
4594     GEN_PRIV(ctx);
4595 #else
4596     /* Restore CPU state */
4597     CHK_SV(ctx);
4598     gen_icount_io_start(ctx);
4599     gen_update_cfar(ctx, ctx->cia);
4600     gen_helper_rfid(cpu_env);
4601     ctx->base.is_jmp = DISAS_EXIT;
4602 #endif
4603 }
4604 
4605 #if !defined(CONFIG_USER_ONLY)
4606 static void gen_rfscv(DisasContext *ctx)
4607 {
4608 #if defined(CONFIG_USER_ONLY)
4609     GEN_PRIV(ctx);
4610 #else
4611     /* Restore CPU state */
4612     CHK_SV(ctx);
4613     gen_icount_io_start(ctx);
4614     gen_update_cfar(ctx, ctx->cia);
4615     gen_helper_rfscv(cpu_env);
4616     ctx->base.is_jmp = DISAS_EXIT;
4617 #endif
4618 }
4619 #endif
4620 
4621 static void gen_hrfid(DisasContext *ctx)
4622 {
4623 #if defined(CONFIG_USER_ONLY)
4624     GEN_PRIV(ctx);
4625 #else
4626     /* Restore CPU state */
4627     CHK_HV(ctx);
4628     gen_helper_hrfid(cpu_env);
4629     ctx->base.is_jmp = DISAS_EXIT;
4630 #endif
4631 }
4632 #endif
4633 
4634 /* sc */
4635 #if defined(CONFIG_USER_ONLY)
4636 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
4637 #else
4638 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
4639 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED
4640 #endif
4641 static void gen_sc(DisasContext *ctx)
4642 {
4643     uint32_t lev;
4644 
4645     lev = (ctx->opcode >> 5) & 0x7F;
4646     gen_exception_err(ctx, POWERPC_SYSCALL, lev);
4647 }
4648 
4649 #if defined(TARGET_PPC64)
4650 #if !defined(CONFIG_USER_ONLY)
4651 static void gen_scv(DisasContext *ctx)
4652 {
4653     uint32_t lev = (ctx->opcode >> 5) & 0x7F;
4654 
4655     /* Set the PC back to the faulting instruction. */
4656     gen_update_nip(ctx, ctx->cia);
4657     gen_helper_scv(cpu_env, tcg_constant_i32(lev));
4658 
4659     ctx->base.is_jmp = DISAS_NORETURN;
4660 }
4661 #endif
4662 #endif
4663 
4664 /***                                Trap                                   ***/
4665 
4666 /* Check for unconditional traps (always or never) */
4667 static bool check_unconditional_trap(DisasContext *ctx)
4668 {
4669     /* Trap never */
4670     if (TO(ctx->opcode) == 0) {
4671         return true;
4672     }
4673     /* Trap always */
4674     if (TO(ctx->opcode) == 31) {
4675         gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
4676         return true;
4677     }
4678     return false;
4679 }
4680 
4681 /* tw */
4682 static void gen_tw(DisasContext *ctx)
4683 {
4684     TCGv_i32 t0;
4685 
4686     if (check_unconditional_trap(ctx)) {
4687         return;
4688     }
4689     t0 = tcg_const_i32(TO(ctx->opcode));
4690     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4691                   t0);
4692     tcg_temp_free_i32(t0);
4693 }
4694 
4695 /* twi */
4696 static void gen_twi(DisasContext *ctx)
4697 {
4698     TCGv t0;
4699     TCGv_i32 t1;
4700 
4701     if (check_unconditional_trap(ctx)) {
4702         return;
4703     }
4704     t0 = tcg_const_tl(SIMM(ctx->opcode));
4705     t1 = tcg_const_i32(TO(ctx->opcode));
4706     gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4707     tcg_temp_free(t0);
4708     tcg_temp_free_i32(t1);
4709 }
4710 
4711 #if defined(TARGET_PPC64)
4712 /* td */
4713 static void gen_td(DisasContext *ctx)
4714 {
4715     TCGv_i32 t0;
4716 
4717     if (check_unconditional_trap(ctx)) {
4718         return;
4719     }
4720     t0 = tcg_const_i32(TO(ctx->opcode));
4721     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)],
4722                   t0);
4723     tcg_temp_free_i32(t0);
4724 }
4725 
4726 /* tdi */
4727 static void gen_tdi(DisasContext *ctx)
4728 {
4729     TCGv t0;
4730     TCGv_i32 t1;
4731 
4732     if (check_unconditional_trap(ctx)) {
4733         return;
4734     }
4735     t0 = tcg_const_tl(SIMM(ctx->opcode));
4736     t1 = tcg_const_i32(TO(ctx->opcode));
4737     gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1);
4738     tcg_temp_free(t0);
4739     tcg_temp_free_i32(t1);
4740 }
4741 #endif
4742 
4743 /***                          Processor control                            ***/
4744 
4745 /* mcrxr */
4746 static void gen_mcrxr(DisasContext *ctx)
4747 {
4748     TCGv_i32 t0 = tcg_temp_new_i32();
4749     TCGv_i32 t1 = tcg_temp_new_i32();
4750     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4751 
4752     tcg_gen_trunc_tl_i32(t0, cpu_so);
4753     tcg_gen_trunc_tl_i32(t1, cpu_ov);
4754     tcg_gen_trunc_tl_i32(dst, cpu_ca);
4755     tcg_gen_shli_i32(t0, t0, 3);
4756     tcg_gen_shli_i32(t1, t1, 2);
4757     tcg_gen_shli_i32(dst, dst, 1);
4758     tcg_gen_or_i32(dst, dst, t0);
4759     tcg_gen_or_i32(dst, dst, t1);
4760     tcg_temp_free_i32(t0);
4761     tcg_temp_free_i32(t1);
4762 
4763     tcg_gen_movi_tl(cpu_so, 0);
4764     tcg_gen_movi_tl(cpu_ov, 0);
4765     tcg_gen_movi_tl(cpu_ca, 0);
4766 }
4767 
4768 #ifdef TARGET_PPC64
4769 /* mcrxrx */
4770 static void gen_mcrxrx(DisasContext *ctx)
4771 {
4772     TCGv t0 = tcg_temp_new();
4773     TCGv t1 = tcg_temp_new();
4774     TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4775 
4776     /* copy OV and OV32 */
4777     tcg_gen_shli_tl(t0, cpu_ov, 1);
4778     tcg_gen_or_tl(t0, t0, cpu_ov32);
4779     tcg_gen_shli_tl(t0, t0, 2);
4780     /* copy CA and CA32 */
4781     tcg_gen_shli_tl(t1, cpu_ca, 1);
4782     tcg_gen_or_tl(t1, t1, cpu_ca32);
4783     tcg_gen_or_tl(t0, t0, t1);
4784     tcg_gen_trunc_tl_i32(dst, t0);
4785     tcg_temp_free(t0);
4786     tcg_temp_free(t1);
4787 }
4788 #endif
4789 
4790 /* mfcr mfocrf */
4791 static void gen_mfcr(DisasContext *ctx)
4792 {
4793     uint32_t crm, crn;
4794 
4795     if (likely(ctx->opcode & 0x00100000)) {
4796         crm = CRM(ctx->opcode);
4797         if (likely(crm && ((crm & (crm - 1)) == 0))) {
4798             crn = ctz32(crm);
4799             tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4800             tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4801                             cpu_gpr[rD(ctx->opcode)], crn * 4);
4802         }
4803     } else {
4804         TCGv_i32 t0 = tcg_temp_new_i32();
4805         tcg_gen_mov_i32(t0, cpu_crf[0]);
4806         tcg_gen_shli_i32(t0, t0, 4);
4807         tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4808         tcg_gen_shli_i32(t0, t0, 4);
4809         tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4810         tcg_gen_shli_i32(t0, t0, 4);
4811         tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4812         tcg_gen_shli_i32(t0, t0, 4);
4813         tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4814         tcg_gen_shli_i32(t0, t0, 4);
4815         tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4816         tcg_gen_shli_i32(t0, t0, 4);
4817         tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4818         tcg_gen_shli_i32(t0, t0, 4);
4819         tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4820         tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4821         tcg_temp_free_i32(t0);
4822     }
4823 }
4824 
4825 /* mfmsr */
4826 static void gen_mfmsr(DisasContext *ctx)
4827 {
4828     CHK_SV(ctx);
4829     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4830 }
4831 
4832 /* mfspr */
4833 static inline void gen_op_mfspr(DisasContext *ctx)
4834 {
4835     void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4836     uint32_t sprn = SPR(ctx->opcode);
4837 
4838 #if defined(CONFIG_USER_ONLY)
4839     read_cb = ctx->spr_cb[sprn].uea_read;
4840 #else
4841     if (ctx->pr) {
4842         read_cb = ctx->spr_cb[sprn].uea_read;
4843     } else if (ctx->hv) {
4844         read_cb = ctx->spr_cb[sprn].hea_read;
4845     } else {
4846         read_cb = ctx->spr_cb[sprn].oea_read;
4847     }
4848 #endif
4849     if (likely(read_cb != NULL)) {
4850         if (likely(read_cb != SPR_NOACCESS)) {
4851             (*read_cb)(ctx, rD(ctx->opcode), sprn);
4852         } else {
4853             /* Privilege exception */
4854             /*
4855              * This is a hack to avoid warnings when running Linux:
4856              * this OS breaks the PowerPC virtualisation model,
4857              * allowing userland application to read the PVR
4858              */
4859             if (sprn != SPR_PVR) {
4860                 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4861                               "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4862                               ctx->cia);
4863             }
4864             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4865         }
4866     } else {
4867         /* ISA 2.07 defines these as no-ops */
4868         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4869             (sprn >= 808 && sprn <= 811)) {
4870             /* This is a nop */
4871             return;
4872         }
4873         /* Not defined */
4874         qemu_log_mask(LOG_GUEST_ERROR,
4875                       "Trying to read invalid spr %d (0x%03x) at "
4876                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4877 
4878         /*
4879          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4880          * generate a priv, a hv emu or a no-op
4881          */
4882         if (sprn & 0x10) {
4883             if (ctx->pr) {
4884                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4885             }
4886         } else {
4887             if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4888                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4889             }
4890         }
4891     }
4892 }
4893 
4894 static void gen_mfspr(DisasContext *ctx)
4895 {
4896     gen_op_mfspr(ctx);
4897 }
4898 
4899 /* mftb */
4900 static void gen_mftb(DisasContext *ctx)
4901 {
4902     gen_op_mfspr(ctx);
4903 }
4904 
4905 /* mtcrf mtocrf*/
4906 static void gen_mtcrf(DisasContext *ctx)
4907 {
4908     uint32_t crm, crn;
4909 
4910     crm = CRM(ctx->opcode);
4911     if (likely((ctx->opcode & 0x00100000))) {
4912         if (crm && ((crm & (crm - 1)) == 0)) {
4913             TCGv_i32 temp = tcg_temp_new_i32();
4914             crn = ctz32(crm);
4915             tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4916             tcg_gen_shri_i32(temp, temp, crn * 4);
4917             tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4918             tcg_temp_free_i32(temp);
4919         }
4920     } else {
4921         TCGv_i32 temp = tcg_temp_new_i32();
4922         tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4923         for (crn = 0 ; crn < 8 ; crn++) {
4924             if (crm & (1 << crn)) {
4925                     tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4926                     tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4927             }
4928         }
4929         tcg_temp_free_i32(temp);
4930     }
4931 }
4932 
4933 /* mtmsr */
4934 #if defined(TARGET_PPC64)
4935 static void gen_mtmsrd(DisasContext *ctx)
4936 {
4937     if (unlikely(!is_book3s_arch2x(ctx))) {
4938         gen_invalid(ctx);
4939         return;
4940     }
4941 
4942     CHK_SV(ctx);
4943 
4944 #if !defined(CONFIG_USER_ONLY)
4945     TCGv t0, t1;
4946     target_ulong mask;
4947 
4948     t0 = tcg_temp_new();
4949     t1 = tcg_temp_new();
4950 
4951     gen_icount_io_start(ctx);
4952 
4953     if (ctx->opcode & 0x00010000) {
4954         /* L=1 form only updates EE and RI */
4955         mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4956     } else {
4957         /* mtmsrd does not alter HV, S, ME, or LE */
4958         mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4959                  (1ULL << MSR_HV));
4960         /*
4961          * XXX: we need to update nip before the store if we enter
4962          *      power saving mode, we will exit the loop directly from
4963          *      ppc_store_msr
4964          */
4965         gen_update_nip(ctx, ctx->base.pc_next);
4966     }
4967 
4968     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4969     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4970     tcg_gen_or_tl(t0, t0, t1);
4971 
4972     gen_helper_store_msr(cpu_env, t0);
4973 
4974     /* Must stop the translation as machine state (may have) changed */
4975     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4976 
4977     tcg_temp_free(t0);
4978     tcg_temp_free(t1);
4979 #endif /* !defined(CONFIG_USER_ONLY) */
4980 }
4981 #endif /* defined(TARGET_PPC64) */
4982 
4983 static void gen_mtmsr(DisasContext *ctx)
4984 {
4985     CHK_SV(ctx);
4986 
4987 #if !defined(CONFIG_USER_ONLY)
4988     TCGv t0, t1;
4989     target_ulong mask = 0xFFFFFFFF;
4990 
4991     t0 = tcg_temp_new();
4992     t1 = tcg_temp_new();
4993 
4994     gen_icount_io_start(ctx);
4995     if (ctx->opcode & 0x00010000) {
4996         /* L=1 form only updates EE and RI */
4997         mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
4998     } else {
4999         /* mtmsr does not alter S, ME, or LE */
5000         mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
5001 
5002         /*
5003          * XXX: we need to update nip before the store if we enter
5004          *      power saving mode, we will exit the loop directly from
5005          *      ppc_store_msr
5006          */
5007         gen_update_nip(ctx, ctx->base.pc_next);
5008     }
5009 
5010     tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
5011     tcg_gen_andi_tl(t1, cpu_msr, ~mask);
5012     tcg_gen_or_tl(t0, t0, t1);
5013 
5014     gen_helper_store_msr(cpu_env, t0);
5015 
5016     /* Must stop the translation as machine state (may have) changed */
5017     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5018 
5019     tcg_temp_free(t0);
5020     tcg_temp_free(t1);
5021 #endif
5022 }
5023 
5024 /* mtspr */
5025 static void gen_mtspr(DisasContext *ctx)
5026 {
5027     void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
5028     uint32_t sprn = SPR(ctx->opcode);
5029 
5030 #if defined(CONFIG_USER_ONLY)
5031     write_cb = ctx->spr_cb[sprn].uea_write;
5032 #else
5033     if (ctx->pr) {
5034         write_cb = ctx->spr_cb[sprn].uea_write;
5035     } else if (ctx->hv) {
5036         write_cb = ctx->spr_cb[sprn].hea_write;
5037     } else {
5038         write_cb = ctx->spr_cb[sprn].oea_write;
5039     }
5040 #endif
5041     if (likely(write_cb != NULL)) {
5042         if (likely(write_cb != SPR_NOACCESS)) {
5043             (*write_cb)(ctx, sprn, rS(ctx->opcode));
5044         } else {
5045             /* Privilege exception */
5046             qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
5047                           "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
5048                           ctx->cia);
5049             gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5050         }
5051     } else {
5052         /* ISA 2.07 defines these as no-ops */
5053         if ((ctx->insns_flags2 & PPC2_ISA207S) &&
5054             (sprn >= 808 && sprn <= 811)) {
5055             /* This is a nop */
5056             return;
5057         }
5058 
5059         /* Not defined */
5060         qemu_log_mask(LOG_GUEST_ERROR,
5061                       "Trying to write invalid spr %d (0x%03x) at "
5062                       TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
5063 
5064 
5065         /*
5066          * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
5067          * generate a priv, a hv emu or a no-op
5068          */
5069         if (sprn & 0x10) {
5070             if (ctx->pr) {
5071                 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5072             }
5073         } else {
5074             if (ctx->pr || sprn == 0) {
5075                 gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
5076             }
5077         }
5078     }
5079 }
5080 
5081 #if defined(TARGET_PPC64)
5082 /* setb */
5083 static void gen_setb(DisasContext *ctx)
5084 {
5085     TCGv_i32 t0 = tcg_temp_new_i32();
5086     TCGv_i32 t8 = tcg_constant_i32(8);
5087     TCGv_i32 tm1 = tcg_constant_i32(-1);
5088     int crf = crfS(ctx->opcode);
5089 
5090     tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
5091     tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
5092     tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
5093 
5094     tcg_temp_free_i32(t0);
5095 }
5096 #endif
5097 
5098 /***                         Cache management                              ***/
5099 
5100 /* dcbf */
5101 static void gen_dcbf(DisasContext *ctx)
5102 {
5103     /* XXX: specification says this is treated as a load by the MMU */
5104     TCGv t0;
5105     gen_set_access_type(ctx, ACCESS_CACHE);
5106     t0 = tcg_temp_new();
5107     gen_addr_reg_index(ctx, t0);
5108     gen_qemu_ld8u(ctx, t0, t0);
5109     tcg_temp_free(t0);
5110 }
5111 
5112 /* dcbfep (external PID dcbf) */
5113 static void gen_dcbfep(DisasContext *ctx)
5114 {
5115     /* XXX: specification says this is treated as a load by the MMU */
5116     TCGv t0;
5117     CHK_SV(ctx);
5118     gen_set_access_type(ctx, ACCESS_CACHE);
5119     t0 = tcg_temp_new();
5120     gen_addr_reg_index(ctx, t0);
5121     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5122     tcg_temp_free(t0);
5123 }
5124 
5125 /* dcbi (Supervisor only) */
5126 static void gen_dcbi(DisasContext *ctx)
5127 {
5128 #if defined(CONFIG_USER_ONLY)
5129     GEN_PRIV(ctx);
5130 #else
5131     TCGv EA, val;
5132 
5133     CHK_SV(ctx);
5134     EA = tcg_temp_new();
5135     gen_set_access_type(ctx, ACCESS_CACHE);
5136     gen_addr_reg_index(ctx, EA);
5137     val = tcg_temp_new();
5138     /* XXX: specification says this should be treated as a store by the MMU */
5139     gen_qemu_ld8u(ctx, val, EA);
5140     gen_qemu_st8(ctx, val, EA);
5141     tcg_temp_free(val);
5142     tcg_temp_free(EA);
5143 #endif /* defined(CONFIG_USER_ONLY) */
5144 }
5145 
5146 /* dcdst */
5147 static void gen_dcbst(DisasContext *ctx)
5148 {
5149     /* XXX: specification say this is treated as a load by the MMU */
5150     TCGv t0;
5151     gen_set_access_type(ctx, ACCESS_CACHE);
5152     t0 = tcg_temp_new();
5153     gen_addr_reg_index(ctx, t0);
5154     gen_qemu_ld8u(ctx, t0, t0);
5155     tcg_temp_free(t0);
5156 }
5157 
5158 /* dcbstep (dcbstep External PID version) */
5159 static void gen_dcbstep(DisasContext *ctx)
5160 {
5161     /* XXX: specification say this is treated as a load by the MMU */
5162     TCGv t0;
5163     gen_set_access_type(ctx, ACCESS_CACHE);
5164     t0 = tcg_temp_new();
5165     gen_addr_reg_index(ctx, t0);
5166     tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
5167     tcg_temp_free(t0);
5168 }
5169 
5170 /* dcbt */
5171 static void gen_dcbt(DisasContext *ctx)
5172 {
5173     /*
5174      * interpreted as no-op
5175      * XXX: specification say this is treated as a load by the MMU but
5176      *      does not generate any exception
5177      */
5178 }
5179 
5180 /* dcbtep */
5181 static void gen_dcbtep(DisasContext *ctx)
5182 {
5183     /*
5184      * interpreted as no-op
5185      * XXX: specification say this is treated as a load by the MMU but
5186      *      does not generate any exception
5187      */
5188 }
5189 
5190 /* dcbtst */
5191 static void gen_dcbtst(DisasContext *ctx)
5192 {
5193     /*
5194      * interpreted as no-op
5195      * XXX: specification say this is treated as a load by the MMU but
5196      *      does not generate any exception
5197      */
5198 }
5199 
5200 /* dcbtstep */
5201 static void gen_dcbtstep(DisasContext *ctx)
5202 {
5203     /*
5204      * interpreted as no-op
5205      * XXX: specification say this is treated as a load by the MMU but
5206      *      does not generate any exception
5207      */
5208 }
5209 
5210 /* dcbtls */
5211 static void gen_dcbtls(DisasContext *ctx)
5212 {
5213     /* Always fails locking the cache */
5214     TCGv t0 = tcg_temp_new();
5215     gen_load_spr(t0, SPR_Exxx_L1CSR0);
5216     tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
5217     gen_store_spr(SPR_Exxx_L1CSR0, t0);
5218     tcg_temp_free(t0);
5219 }
5220 
5221 /* dcbz */
5222 static void gen_dcbz(DisasContext *ctx)
5223 {
5224     TCGv tcgv_addr;
5225     TCGv_i32 tcgv_op;
5226 
5227     gen_set_access_type(ctx, ACCESS_CACHE);
5228     tcgv_addr = tcg_temp_new();
5229     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5230     gen_addr_reg_index(ctx, tcgv_addr);
5231     gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op);
5232     tcg_temp_free(tcgv_addr);
5233     tcg_temp_free_i32(tcgv_op);
5234 }
5235 
5236 /* dcbzep */
5237 static void gen_dcbzep(DisasContext *ctx)
5238 {
5239     TCGv tcgv_addr;
5240     TCGv_i32 tcgv_op;
5241 
5242     gen_set_access_type(ctx, ACCESS_CACHE);
5243     tcgv_addr = tcg_temp_new();
5244     tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000);
5245     gen_addr_reg_index(ctx, tcgv_addr);
5246     gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op);
5247     tcg_temp_free(tcgv_addr);
5248     tcg_temp_free_i32(tcgv_op);
5249 }
5250 
5251 /* dst / dstt */
5252 static void gen_dst(DisasContext *ctx)
5253 {
5254     if (rA(ctx->opcode) == 0) {
5255         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5256     } else {
5257         /* interpreted as no-op */
5258     }
5259 }
5260 
5261 /* dstst /dststt */
5262 static void gen_dstst(DisasContext *ctx)
5263 {
5264     if (rA(ctx->opcode) == 0) {
5265         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5266     } else {
5267         /* interpreted as no-op */
5268     }
5269 
5270 }
5271 
5272 /* dss / dssall */
5273 static void gen_dss(DisasContext *ctx)
5274 {
5275     /* interpreted as no-op */
5276 }
5277 
5278 /* icbi */
5279 static void gen_icbi(DisasContext *ctx)
5280 {
5281     TCGv t0;
5282     gen_set_access_type(ctx, ACCESS_CACHE);
5283     t0 = tcg_temp_new();
5284     gen_addr_reg_index(ctx, t0);
5285     gen_helper_icbi(cpu_env, t0);
5286     tcg_temp_free(t0);
5287 }
5288 
5289 /* icbiep */
5290 static void gen_icbiep(DisasContext *ctx)
5291 {
5292     TCGv t0;
5293     gen_set_access_type(ctx, ACCESS_CACHE);
5294     t0 = tcg_temp_new();
5295     gen_addr_reg_index(ctx, t0);
5296     gen_helper_icbiep(cpu_env, t0);
5297     tcg_temp_free(t0);
5298 }
5299 
5300 /* Optional: */
5301 /* dcba */
5302 static void gen_dcba(DisasContext *ctx)
5303 {
5304     /*
5305      * interpreted as no-op
5306      * XXX: specification say this is treated as a store by the MMU
5307      *      but does not generate any exception
5308      */
5309 }
5310 
5311 /***                    Segment register manipulation                      ***/
5312 /* Supervisor only: */
5313 
5314 /* mfsr */
5315 static void gen_mfsr(DisasContext *ctx)
5316 {
5317 #if defined(CONFIG_USER_ONLY)
5318     GEN_PRIV(ctx);
5319 #else
5320     TCGv t0;
5321 
5322     CHK_SV(ctx);
5323     t0 = tcg_const_tl(SR(ctx->opcode));
5324     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5325     tcg_temp_free(t0);
5326 #endif /* defined(CONFIG_USER_ONLY) */
5327 }
5328 
5329 /* mfsrin */
5330 static void gen_mfsrin(DisasContext *ctx)
5331 {
5332 #if defined(CONFIG_USER_ONLY)
5333     GEN_PRIV(ctx);
5334 #else
5335     TCGv t0;
5336 
5337     CHK_SV(ctx);
5338     t0 = tcg_temp_new();
5339     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5340     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5341     tcg_temp_free(t0);
5342 #endif /* defined(CONFIG_USER_ONLY) */
5343 }
5344 
5345 /* mtsr */
5346 static void gen_mtsr(DisasContext *ctx)
5347 {
5348 #if defined(CONFIG_USER_ONLY)
5349     GEN_PRIV(ctx);
5350 #else
5351     TCGv t0;
5352 
5353     CHK_SV(ctx);
5354     t0 = tcg_const_tl(SR(ctx->opcode));
5355     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5356     tcg_temp_free(t0);
5357 #endif /* defined(CONFIG_USER_ONLY) */
5358 }
5359 
5360 /* mtsrin */
5361 static void gen_mtsrin(DisasContext *ctx)
5362 {
5363 #if defined(CONFIG_USER_ONLY)
5364     GEN_PRIV(ctx);
5365 #else
5366     TCGv t0;
5367     CHK_SV(ctx);
5368 
5369     t0 = tcg_temp_new();
5370     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5371     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]);
5372     tcg_temp_free(t0);
5373 #endif /* defined(CONFIG_USER_ONLY) */
5374 }
5375 
5376 #if defined(TARGET_PPC64)
5377 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
5378 
5379 /* mfsr */
5380 static void gen_mfsr_64b(DisasContext *ctx)
5381 {
5382 #if defined(CONFIG_USER_ONLY)
5383     GEN_PRIV(ctx);
5384 #else
5385     TCGv t0;
5386 
5387     CHK_SV(ctx);
5388     t0 = tcg_const_tl(SR(ctx->opcode));
5389     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5390     tcg_temp_free(t0);
5391 #endif /* defined(CONFIG_USER_ONLY) */
5392 }
5393 
5394 /* mfsrin */
5395 static void gen_mfsrin_64b(DisasContext *ctx)
5396 {
5397 #if defined(CONFIG_USER_ONLY)
5398     GEN_PRIV(ctx);
5399 #else
5400     TCGv t0;
5401 
5402     CHK_SV(ctx);
5403     t0 = tcg_temp_new();
5404     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5405     gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5406     tcg_temp_free(t0);
5407 #endif /* defined(CONFIG_USER_ONLY) */
5408 }
5409 
5410 /* mtsr */
5411 static void gen_mtsr_64b(DisasContext *ctx)
5412 {
5413 #if defined(CONFIG_USER_ONLY)
5414     GEN_PRIV(ctx);
5415 #else
5416     TCGv t0;
5417 
5418     CHK_SV(ctx);
5419     t0 = tcg_const_tl(SR(ctx->opcode));
5420     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5421     tcg_temp_free(t0);
5422 #endif /* defined(CONFIG_USER_ONLY) */
5423 }
5424 
5425 /* mtsrin */
5426 static void gen_mtsrin_64b(DisasContext *ctx)
5427 {
5428 #if defined(CONFIG_USER_ONLY)
5429     GEN_PRIV(ctx);
5430 #else
5431     TCGv t0;
5432 
5433     CHK_SV(ctx);
5434     t0 = tcg_temp_new();
5435     tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
5436     gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]);
5437     tcg_temp_free(t0);
5438 #endif /* defined(CONFIG_USER_ONLY) */
5439 }
5440 
5441 #endif /* defined(TARGET_PPC64) */
5442 
5443 /***                      Lookaside buffer management                      ***/
5444 /* Optional & supervisor only: */
5445 
5446 /* tlbia */
5447 static void gen_tlbia(DisasContext *ctx)
5448 {
5449 #if defined(CONFIG_USER_ONLY)
5450     GEN_PRIV(ctx);
5451 #else
5452     CHK_HV(ctx);
5453 
5454     gen_helper_tlbia(cpu_env);
5455 #endif  /* defined(CONFIG_USER_ONLY) */
5456 }
5457 
5458 /* tlbsync */
5459 static void gen_tlbsync(DisasContext *ctx)
5460 {
5461 #if defined(CONFIG_USER_ONLY)
5462     GEN_PRIV(ctx);
5463 #else
5464 
5465     if (ctx->gtse) {
5466         CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
5467     } else {
5468         CHK_HV(ctx); /* Else hypervisor privileged */
5469     }
5470 
5471     /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
5472     if (ctx->insns_flags & PPC_BOOKE) {
5473         gen_check_tlb_flush(ctx, true);
5474     }
5475 #endif /* defined(CONFIG_USER_ONLY) */
5476 }
5477 
5478 /***                              External control                         ***/
5479 /* Optional: */
5480 
5481 /* eciwx */
5482 static void gen_eciwx(DisasContext *ctx)
5483 {
5484     TCGv t0;
5485     /* Should check EAR[E] ! */
5486     gen_set_access_type(ctx, ACCESS_EXT);
5487     t0 = tcg_temp_new();
5488     gen_addr_reg_index(ctx, t0);
5489     tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5490                        DEF_MEMOP(MO_UL | MO_ALIGN));
5491     tcg_temp_free(t0);
5492 }
5493 
5494 /* ecowx */
5495 static void gen_ecowx(DisasContext *ctx)
5496 {
5497     TCGv t0;
5498     /* Should check EAR[E] ! */
5499     gen_set_access_type(ctx, ACCESS_EXT);
5500     t0 = tcg_temp_new();
5501     gen_addr_reg_index(ctx, t0);
5502     tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
5503                        DEF_MEMOP(MO_UL | MO_ALIGN));
5504     tcg_temp_free(t0);
5505 }
5506 
5507 /* 602 - 603 - G2 TLB management */
5508 
5509 /* tlbld */
5510 static void gen_tlbld_6xx(DisasContext *ctx)
5511 {
5512 #if defined(CONFIG_USER_ONLY)
5513     GEN_PRIV(ctx);
5514 #else
5515     CHK_SV(ctx);
5516     gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5517 #endif /* defined(CONFIG_USER_ONLY) */
5518 }
5519 
5520 /* tlbli */
5521 static void gen_tlbli_6xx(DisasContext *ctx)
5522 {
5523 #if defined(CONFIG_USER_ONLY)
5524     GEN_PRIV(ctx);
5525 #else
5526     CHK_SV(ctx);
5527     gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5528 #endif /* defined(CONFIG_USER_ONLY) */
5529 }
5530 
5531 /* BookE specific instructions */
5532 
5533 /* XXX: not implemented on 440 ? */
5534 static void gen_mfapidi(DisasContext *ctx)
5535 {
5536     /* XXX: TODO */
5537     gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5538 }
5539 
5540 /* XXX: not implemented on 440 ? */
5541 static void gen_tlbiva(DisasContext *ctx)
5542 {
5543 #if defined(CONFIG_USER_ONLY)
5544     GEN_PRIV(ctx);
5545 #else
5546     TCGv t0;
5547 
5548     CHK_SV(ctx);
5549     t0 = tcg_temp_new();
5550     gen_addr_reg_index(ctx, t0);
5551     gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]);
5552     tcg_temp_free(t0);
5553 #endif /* defined(CONFIG_USER_ONLY) */
5554 }
5555 
5556 /* All 405 MAC instructions are translated here */
5557 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
5558                                         int ra, int rb, int rt, int Rc)
5559 {
5560     TCGv t0, t1;
5561 
5562     t0 = tcg_temp_local_new();
5563     t1 = tcg_temp_local_new();
5564 
5565     switch (opc3 & 0x0D) {
5566     case 0x05:
5567         /* macchw    - macchw.    - macchwo   - macchwo.   */
5568         /* macchws   - macchws.   - macchwso  - macchwso.  */
5569         /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
5570         /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
5571         /* mulchw - mulchw. */
5572         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5573         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5574         tcg_gen_ext16s_tl(t1, t1);
5575         break;
5576     case 0x04:
5577         /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
5578         /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
5579         /* mulchwu - mulchwu. */
5580         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5581         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5582         tcg_gen_ext16u_tl(t1, t1);
5583         break;
5584     case 0x01:
5585         /* machhw    - machhw.    - machhwo   - machhwo.   */
5586         /* machhws   - machhws.   - machhwso  - machhwso.  */
5587         /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
5588         /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
5589         /* mulhhw - mulhhw. */
5590         tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
5591         tcg_gen_ext16s_tl(t0, t0);
5592         tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
5593         tcg_gen_ext16s_tl(t1, t1);
5594         break;
5595     case 0x00:
5596         /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
5597         /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
5598         /* mulhhwu - mulhhwu. */
5599         tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
5600         tcg_gen_ext16u_tl(t0, t0);
5601         tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
5602         tcg_gen_ext16u_tl(t1, t1);
5603         break;
5604     case 0x0D:
5605         /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
5606         /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
5607         /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
5608         /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
5609         /* mullhw - mullhw. */
5610         tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
5611         tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
5612         break;
5613     case 0x0C:
5614         /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
5615         /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
5616         /* mullhwu - mullhwu. */
5617         tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
5618         tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
5619         break;
5620     }
5621     if (opc2 & 0x04) {
5622         /* (n)multiply-and-accumulate (0x0C / 0x0E) */
5623         tcg_gen_mul_tl(t1, t0, t1);
5624         if (opc2 & 0x02) {
5625             /* nmultiply-and-accumulate (0x0E) */
5626             tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
5627         } else {
5628             /* multiply-and-accumulate (0x0C) */
5629             tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
5630         }
5631 
5632         if (opc3 & 0x12) {
5633             /* Check overflow and/or saturate */
5634             TCGLabel *l1 = gen_new_label();
5635 
5636             if (opc3 & 0x10) {
5637                 /* Start with XER OV disabled, the most likely case */
5638                 tcg_gen_movi_tl(cpu_ov, 0);
5639             }
5640             if (opc3 & 0x01) {
5641                 /* Signed */
5642                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
5643                 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
5644                 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
5645                 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
5646                 if (opc3 & 0x02) {
5647                     /* Saturate */
5648                     tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
5649                     tcg_gen_xori_tl(t0, t0, 0x7fffffff);
5650                 }
5651             } else {
5652                 /* Unsigned */
5653                 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
5654                 if (opc3 & 0x02) {
5655                     /* Saturate */
5656                     tcg_gen_movi_tl(t0, UINT32_MAX);
5657                 }
5658             }
5659             if (opc3 & 0x10) {
5660                 /* Check overflow */
5661                 tcg_gen_movi_tl(cpu_ov, 1);
5662                 tcg_gen_movi_tl(cpu_so, 1);
5663             }
5664             gen_set_label(l1);
5665             tcg_gen_mov_tl(cpu_gpr[rt], t0);
5666         }
5667     } else {
5668         tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
5669     }
5670     tcg_temp_free(t0);
5671     tcg_temp_free(t1);
5672     if (unlikely(Rc) != 0) {
5673         /* Update Rc0 */
5674         gen_set_Rc0(ctx, cpu_gpr[rt]);
5675     }
5676 }
5677 
5678 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
5679 static void glue(gen_, name)(DisasContext *ctx)                               \
5680 {                                                                             \
5681     gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
5682                          rD(ctx->opcode), Rc(ctx->opcode));                   \
5683 }
5684 
5685 /* macchw    - macchw.    */
5686 GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
5687 /* macchwo   - macchwo.   */
5688 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
5689 /* macchws   - macchws.   */
5690 GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
5691 /* macchwso  - macchwso.  */
5692 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
5693 /* macchwsu  - macchwsu.  */
5694 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
5695 /* macchwsuo - macchwsuo. */
5696 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
5697 /* macchwu   - macchwu.   */
5698 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
5699 /* macchwuo  - macchwuo.  */
5700 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
5701 /* machhw    - machhw.    */
5702 GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
5703 /* machhwo   - machhwo.   */
5704 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
5705 /* machhws   - machhws.   */
5706 GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
5707 /* machhwso  - machhwso.  */
5708 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
5709 /* machhwsu  - machhwsu.  */
5710 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
5711 /* machhwsuo - machhwsuo. */
5712 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
5713 /* machhwu   - machhwu.   */
5714 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
5715 /* machhwuo  - machhwuo.  */
5716 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
5717 /* maclhw    - maclhw.    */
5718 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
5719 /* maclhwo   - maclhwo.   */
5720 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
5721 /* maclhws   - maclhws.   */
5722 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
5723 /* maclhwso  - maclhwso.  */
5724 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
5725 /* maclhwu   - maclhwu.   */
5726 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
5727 /* maclhwuo  - maclhwuo.  */
5728 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
5729 /* maclhwsu  - maclhwsu.  */
5730 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
5731 /* maclhwsuo - maclhwsuo. */
5732 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
5733 /* nmacchw   - nmacchw.   */
5734 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
5735 /* nmacchwo  - nmacchwo.  */
5736 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
5737 /* nmacchws  - nmacchws.  */
5738 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
5739 /* nmacchwso - nmacchwso. */
5740 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
5741 /* nmachhw   - nmachhw.   */
5742 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
5743 /* nmachhwo  - nmachhwo.  */
5744 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
5745 /* nmachhws  - nmachhws.  */
5746 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
5747 /* nmachhwso - nmachhwso. */
5748 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
5749 /* nmaclhw   - nmaclhw.   */
5750 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
5751 /* nmaclhwo  - nmaclhwo.  */
5752 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
5753 /* nmaclhws  - nmaclhws.  */
5754 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
5755 /* nmaclhwso - nmaclhwso. */
5756 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
5757 
5758 /* mulchw  - mulchw.  */
5759 GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
5760 /* mulchwu - mulchwu. */
5761 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
5762 /* mulhhw  - mulhhw.  */
5763 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
5764 /* mulhhwu - mulhhwu. */
5765 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
5766 /* mullhw  - mullhw.  */
5767 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
5768 /* mullhwu - mullhwu. */
5769 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
5770 
5771 /* mfdcr */
5772 static void gen_mfdcr(DisasContext *ctx)
5773 {
5774 #if defined(CONFIG_USER_ONLY)
5775     GEN_PRIV(ctx);
5776 #else
5777     TCGv dcrn;
5778 
5779     CHK_SV(ctx);
5780     dcrn = tcg_const_tl(SPR(ctx->opcode));
5781     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn);
5782     tcg_temp_free(dcrn);
5783 #endif /* defined(CONFIG_USER_ONLY) */
5784 }
5785 
5786 /* mtdcr */
5787 static void gen_mtdcr(DisasContext *ctx)
5788 {
5789 #if defined(CONFIG_USER_ONLY)
5790     GEN_PRIV(ctx);
5791 #else
5792     TCGv dcrn;
5793 
5794     CHK_SV(ctx);
5795     dcrn = tcg_const_tl(SPR(ctx->opcode));
5796     gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5797     tcg_temp_free(dcrn);
5798 #endif /* defined(CONFIG_USER_ONLY) */
5799 }
5800 
5801 /* mfdcrx */
5802 /* XXX: not implemented on 440 ? */
5803 static void gen_mfdcrx(DisasContext *ctx)
5804 {
5805 #if defined(CONFIG_USER_ONLY)
5806     GEN_PRIV(ctx);
5807 #else
5808     CHK_SV(ctx);
5809     gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env,
5810                         cpu_gpr[rA(ctx->opcode)]);
5811     /* Note: Rc update flag set leads to undefined state of Rc0 */
5812 #endif /* defined(CONFIG_USER_ONLY) */
5813 }
5814 
5815 /* mtdcrx */
5816 /* XXX: not implemented on 440 ? */
5817 static void gen_mtdcrx(DisasContext *ctx)
5818 {
5819 #if defined(CONFIG_USER_ONLY)
5820     GEN_PRIV(ctx);
5821 #else
5822     CHK_SV(ctx);
5823     gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)],
5824                          cpu_gpr[rS(ctx->opcode)]);
5825     /* Note: Rc update flag set leads to undefined state of Rc0 */
5826 #endif /* defined(CONFIG_USER_ONLY) */
5827 }
5828 
5829 /* dccci */
5830 static void gen_dccci(DisasContext *ctx)
5831 {
5832     CHK_SV(ctx);
5833     /* interpreted as no-op */
5834 }
5835 
5836 /* dcread */
5837 static void gen_dcread(DisasContext *ctx)
5838 {
5839 #if defined(CONFIG_USER_ONLY)
5840     GEN_PRIV(ctx);
5841 #else
5842     TCGv EA, val;
5843 
5844     CHK_SV(ctx);
5845     gen_set_access_type(ctx, ACCESS_CACHE);
5846     EA = tcg_temp_new();
5847     gen_addr_reg_index(ctx, EA);
5848     val = tcg_temp_new();
5849     gen_qemu_ld32u(ctx, val, EA);
5850     tcg_temp_free(val);
5851     tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5852     tcg_temp_free(EA);
5853 #endif /* defined(CONFIG_USER_ONLY) */
5854 }
5855 
5856 /* icbt */
5857 static void gen_icbt_40x(DisasContext *ctx)
5858 {
5859     /*
5860      * interpreted as no-op
5861      * XXX: specification say this is treated as a load by the MMU but
5862      *      does not generate any exception
5863      */
5864 }
5865 
5866 /* iccci */
5867 static void gen_iccci(DisasContext *ctx)
5868 {
5869     CHK_SV(ctx);
5870     /* interpreted as no-op */
5871 }
5872 
5873 /* icread */
5874 static void gen_icread(DisasContext *ctx)
5875 {
5876     CHK_SV(ctx);
5877     /* interpreted as no-op */
5878 }
5879 
5880 /* rfci (supervisor only) */
5881 static void gen_rfci_40x(DisasContext *ctx)
5882 {
5883 #if defined(CONFIG_USER_ONLY)
5884     GEN_PRIV(ctx);
5885 #else
5886     CHK_SV(ctx);
5887     /* Restore CPU state */
5888     gen_helper_40x_rfci(cpu_env);
5889     ctx->base.is_jmp = DISAS_EXIT;
5890 #endif /* defined(CONFIG_USER_ONLY) */
5891 }
5892 
5893 static void gen_rfci(DisasContext *ctx)
5894 {
5895 #if defined(CONFIG_USER_ONLY)
5896     GEN_PRIV(ctx);
5897 #else
5898     CHK_SV(ctx);
5899     /* Restore CPU state */
5900     gen_helper_rfci(cpu_env);
5901     ctx->base.is_jmp = DISAS_EXIT;
5902 #endif /* defined(CONFIG_USER_ONLY) */
5903 }
5904 
5905 /* BookE specific */
5906 
5907 /* XXX: not implemented on 440 ? */
5908 static void gen_rfdi(DisasContext *ctx)
5909 {
5910 #if defined(CONFIG_USER_ONLY)
5911     GEN_PRIV(ctx);
5912 #else
5913     CHK_SV(ctx);
5914     /* Restore CPU state */
5915     gen_helper_rfdi(cpu_env);
5916     ctx->base.is_jmp = DISAS_EXIT;
5917 #endif /* defined(CONFIG_USER_ONLY) */
5918 }
5919 
5920 /* XXX: not implemented on 440 ? */
5921 static void gen_rfmci(DisasContext *ctx)
5922 {
5923 #if defined(CONFIG_USER_ONLY)
5924     GEN_PRIV(ctx);
5925 #else
5926     CHK_SV(ctx);
5927     /* Restore CPU state */
5928     gen_helper_rfmci(cpu_env);
5929     ctx->base.is_jmp = DISAS_EXIT;
5930 #endif /* defined(CONFIG_USER_ONLY) */
5931 }
5932 
5933 /* TLB management - PowerPC 405 implementation */
5934 
5935 /* tlbre */
5936 static void gen_tlbre_40x(DisasContext *ctx)
5937 {
5938 #if defined(CONFIG_USER_ONLY)
5939     GEN_PRIV(ctx);
5940 #else
5941     CHK_SV(ctx);
5942     switch (rB(ctx->opcode)) {
5943     case 0:
5944         gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env,
5945                                 cpu_gpr[rA(ctx->opcode)]);
5946         break;
5947     case 1:
5948         gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env,
5949                                 cpu_gpr[rA(ctx->opcode)]);
5950         break;
5951     default:
5952         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5953         break;
5954     }
5955 #endif /* defined(CONFIG_USER_ONLY) */
5956 }
5957 
5958 /* tlbsx - tlbsx. */
5959 static void gen_tlbsx_40x(DisasContext *ctx)
5960 {
5961 #if defined(CONFIG_USER_ONLY)
5962     GEN_PRIV(ctx);
5963 #else
5964     TCGv t0;
5965 
5966     CHK_SV(ctx);
5967     t0 = tcg_temp_new();
5968     gen_addr_reg_index(ctx, t0);
5969     gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
5970     tcg_temp_free(t0);
5971     if (Rc(ctx->opcode)) {
5972         TCGLabel *l1 = gen_new_label();
5973         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5974         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5975         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5976         gen_set_label(l1);
5977     }
5978 #endif /* defined(CONFIG_USER_ONLY) */
5979 }
5980 
5981 /* tlbwe */
5982 static void gen_tlbwe_40x(DisasContext *ctx)
5983 {
5984 #if defined(CONFIG_USER_ONLY)
5985     GEN_PRIV(ctx);
5986 #else
5987     CHK_SV(ctx);
5988 
5989     switch (rB(ctx->opcode)) {
5990     case 0:
5991         gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)],
5992                                 cpu_gpr[rS(ctx->opcode)]);
5993         break;
5994     case 1:
5995         gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)],
5996                                 cpu_gpr[rS(ctx->opcode)]);
5997         break;
5998     default:
5999         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6000         break;
6001     }
6002 #endif /* defined(CONFIG_USER_ONLY) */
6003 }
6004 
6005 /* TLB management - PowerPC 440 implementation */
6006 
6007 /* tlbre */
6008 static void gen_tlbre_440(DisasContext *ctx)
6009 {
6010 #if defined(CONFIG_USER_ONLY)
6011     GEN_PRIV(ctx);
6012 #else
6013     CHK_SV(ctx);
6014 
6015     switch (rB(ctx->opcode)) {
6016     case 0:
6017     case 1:
6018     case 2:
6019         {
6020             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6021             gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env,
6022                                  t0, cpu_gpr[rA(ctx->opcode)]);
6023             tcg_temp_free_i32(t0);
6024         }
6025         break;
6026     default:
6027         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6028         break;
6029     }
6030 #endif /* defined(CONFIG_USER_ONLY) */
6031 }
6032 
6033 /* tlbsx - tlbsx. */
6034 static void gen_tlbsx_440(DisasContext *ctx)
6035 {
6036 #if defined(CONFIG_USER_ONLY)
6037     GEN_PRIV(ctx);
6038 #else
6039     TCGv t0;
6040 
6041     CHK_SV(ctx);
6042     t0 = tcg_temp_new();
6043     gen_addr_reg_index(ctx, t0);
6044     gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0);
6045     tcg_temp_free(t0);
6046     if (Rc(ctx->opcode)) {
6047         TCGLabel *l1 = gen_new_label();
6048         tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
6049         tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
6050         tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
6051         gen_set_label(l1);
6052     }
6053 #endif /* defined(CONFIG_USER_ONLY) */
6054 }
6055 
6056 /* tlbwe */
6057 static void gen_tlbwe_440(DisasContext *ctx)
6058 {
6059 #if defined(CONFIG_USER_ONLY)
6060     GEN_PRIV(ctx);
6061 #else
6062     CHK_SV(ctx);
6063     switch (rB(ctx->opcode)) {
6064     case 0:
6065     case 1:
6066     case 2:
6067         {
6068             TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode));
6069             gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)],
6070                                  cpu_gpr[rS(ctx->opcode)]);
6071             tcg_temp_free_i32(t0);
6072         }
6073         break;
6074     default:
6075         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6076         break;
6077     }
6078 #endif /* defined(CONFIG_USER_ONLY) */
6079 }
6080 
6081 /* TLB management - PowerPC BookE 2.06 implementation */
6082 
6083 /* tlbre */
6084 static void gen_tlbre_booke206(DisasContext *ctx)
6085 {
6086  #if defined(CONFIG_USER_ONLY)
6087     GEN_PRIV(ctx);
6088 #else
6089    CHK_SV(ctx);
6090     gen_helper_booke206_tlbre(cpu_env);
6091 #endif /* defined(CONFIG_USER_ONLY) */
6092 }
6093 
6094 /* tlbsx - tlbsx. */
6095 static void gen_tlbsx_booke206(DisasContext *ctx)
6096 {
6097 #if defined(CONFIG_USER_ONLY)
6098     GEN_PRIV(ctx);
6099 #else
6100     TCGv t0;
6101 
6102     CHK_SV(ctx);
6103     if (rA(ctx->opcode)) {
6104         t0 = tcg_temp_new();
6105         tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]);
6106     } else {
6107         t0 = tcg_const_tl(0);
6108     }
6109 
6110     tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]);
6111     gen_helper_booke206_tlbsx(cpu_env, t0);
6112     tcg_temp_free(t0);
6113 #endif /* defined(CONFIG_USER_ONLY) */
6114 }
6115 
6116 /* tlbwe */
6117 static void gen_tlbwe_booke206(DisasContext *ctx)
6118 {
6119 #if defined(CONFIG_USER_ONLY)
6120     GEN_PRIV(ctx);
6121 #else
6122     CHK_SV(ctx);
6123     gen_helper_booke206_tlbwe(cpu_env);
6124 #endif /* defined(CONFIG_USER_ONLY) */
6125 }
6126 
6127 static void gen_tlbivax_booke206(DisasContext *ctx)
6128 {
6129 #if defined(CONFIG_USER_ONLY)
6130     GEN_PRIV(ctx);
6131 #else
6132     TCGv t0;
6133 
6134     CHK_SV(ctx);
6135     t0 = tcg_temp_new();
6136     gen_addr_reg_index(ctx, t0);
6137     gen_helper_booke206_tlbivax(cpu_env, t0);
6138     tcg_temp_free(t0);
6139 #endif /* defined(CONFIG_USER_ONLY) */
6140 }
6141 
6142 static void gen_tlbilx_booke206(DisasContext *ctx)
6143 {
6144 #if defined(CONFIG_USER_ONLY)
6145     GEN_PRIV(ctx);
6146 #else
6147     TCGv t0;
6148 
6149     CHK_SV(ctx);
6150     t0 = tcg_temp_new();
6151     gen_addr_reg_index(ctx, t0);
6152 
6153     switch ((ctx->opcode >> 21) & 0x3) {
6154     case 0:
6155         gen_helper_booke206_tlbilx0(cpu_env, t0);
6156         break;
6157     case 1:
6158         gen_helper_booke206_tlbilx1(cpu_env, t0);
6159         break;
6160     case 3:
6161         gen_helper_booke206_tlbilx3(cpu_env, t0);
6162         break;
6163     default:
6164         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6165         break;
6166     }
6167 
6168     tcg_temp_free(t0);
6169 #endif /* defined(CONFIG_USER_ONLY) */
6170 }
6171 
6172 /* wrtee */
6173 static void gen_wrtee(DisasContext *ctx)
6174 {
6175 #if defined(CONFIG_USER_ONLY)
6176     GEN_PRIV(ctx);
6177 #else
6178     TCGv t0;
6179 
6180     CHK_SV(ctx);
6181     t0 = tcg_temp_new();
6182     tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
6183     tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6184     tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
6185     gen_ppc_maybe_interrupt(ctx);
6186     tcg_temp_free(t0);
6187     /*
6188      * Stop translation to have a chance to raise an exception if we
6189      * just set msr_ee to 1
6190      */
6191     ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6192 #endif /* defined(CONFIG_USER_ONLY) */
6193 }
6194 
6195 /* wrteei */
6196 static void gen_wrteei(DisasContext *ctx)
6197 {
6198 #if defined(CONFIG_USER_ONLY)
6199     GEN_PRIV(ctx);
6200 #else
6201     CHK_SV(ctx);
6202     if (ctx->opcode & 0x00008000) {
6203         tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
6204         gen_ppc_maybe_interrupt(ctx);
6205         /* Stop translation to have a chance to raise an exception */
6206         ctx->base.is_jmp = DISAS_EXIT_UPDATE;
6207     } else {
6208         tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
6209     }
6210 #endif /* defined(CONFIG_USER_ONLY) */
6211 }
6212 
6213 /* PowerPC 440 specific instructions */
6214 
6215 /* dlmzb */
6216 static void gen_dlmzb(DisasContext *ctx)
6217 {
6218     TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode));
6219     gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env,
6220                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
6221     tcg_temp_free_i32(t0);
6222 }
6223 
6224 /* mbar replaces eieio on 440 */
6225 static void gen_mbar(DisasContext *ctx)
6226 {
6227     /* interpreted as no-op */
6228 }
6229 
6230 /* msync replaces sync on 440 */
6231 static void gen_msync_4xx(DisasContext *ctx)
6232 {
6233     /* Only e500 seems to treat reserved bits as invalid */
6234     if ((ctx->insns_flags2 & PPC2_BOOKE206) &&
6235         (ctx->opcode & 0x03FFF801)) {
6236         gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
6237     }
6238     /* otherwise interpreted as no-op */
6239 }
6240 
6241 /* icbt */
6242 static void gen_icbt_440(DisasContext *ctx)
6243 {
6244     /*
6245      * interpreted as no-op
6246      * XXX: specification say this is treated as a load by the MMU but
6247      *      does not generate any exception
6248      */
6249 }
6250 
6251 #if defined(TARGET_PPC64)
6252 static void gen_maddld(DisasContext *ctx)
6253 {
6254     TCGv_i64 t1 = tcg_temp_new_i64();
6255 
6256     tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
6257     tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]);
6258     tcg_temp_free_i64(t1);
6259 }
6260 
6261 /* maddhd maddhdu */
6262 static void gen_maddhd_maddhdu(DisasContext *ctx)
6263 {
6264     TCGv_i64 lo = tcg_temp_new_i64();
6265     TCGv_i64 hi = tcg_temp_new_i64();
6266     TCGv_i64 t1 = tcg_temp_new_i64();
6267 
6268     if (Rc(ctx->opcode)) {
6269         tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6270                           cpu_gpr[rB(ctx->opcode)]);
6271         tcg_gen_movi_i64(t1, 0);
6272     } else {
6273         tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)],
6274                           cpu_gpr[rB(ctx->opcode)]);
6275         tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63);
6276     }
6277     tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi,
6278                      cpu_gpr[rC(ctx->opcode)], t1);
6279     tcg_temp_free_i64(lo);
6280     tcg_temp_free_i64(hi);
6281     tcg_temp_free_i64(t1);
6282 }
6283 #endif /* defined(TARGET_PPC64) */
6284 
6285 static void gen_tbegin(DisasContext *ctx)
6286 {
6287     if (unlikely(!ctx->tm_enabled)) {
6288         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6289         return;
6290     }
6291     gen_helper_tbegin(cpu_env);
6292 }
6293 
6294 #define GEN_TM_NOOP(name)                                      \
6295 static inline void gen_##name(DisasContext *ctx)               \
6296 {                                                              \
6297     if (unlikely(!ctx->tm_enabled)) {                          \
6298         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6299         return;                                                \
6300     }                                                          \
6301     /*                                                         \
6302      * Because tbegin always fails in QEMU, these user         \
6303      * space instructions all have a simple implementation:    \
6304      *                                                         \
6305      *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
6306      *           = 0b0 || 0b00    || 0b0                       \
6307      */                                                        \
6308     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6309 }
6310 
6311 GEN_TM_NOOP(tend);
6312 GEN_TM_NOOP(tabort);
6313 GEN_TM_NOOP(tabortwc);
6314 GEN_TM_NOOP(tabortwci);
6315 GEN_TM_NOOP(tabortdc);
6316 GEN_TM_NOOP(tabortdci);
6317 GEN_TM_NOOP(tsr);
6318 
6319 static inline void gen_cp_abort(DisasContext *ctx)
6320 {
6321     /* Do Nothing */
6322 }
6323 
6324 #define GEN_CP_PASTE_NOOP(name)                           \
6325 static inline void gen_##name(DisasContext *ctx)          \
6326 {                                                         \
6327     /*                                                    \
6328      * Generate invalid exception until we have an        \
6329      * implementation of the copy paste facility          \
6330      */                                                   \
6331     gen_invalid(ctx);                                     \
6332 }
6333 
6334 GEN_CP_PASTE_NOOP(copy)
6335 GEN_CP_PASTE_NOOP(paste)
6336 
6337 static void gen_tcheck(DisasContext *ctx)
6338 {
6339     if (unlikely(!ctx->tm_enabled)) {
6340         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
6341         return;
6342     }
6343     /*
6344      * Because tbegin always fails, the tcheck implementation is
6345      * simple:
6346      *
6347      * CR[CRF] = TDOOMED || MSR[TS] || 0b0
6348      *         = 0b1 || 0b00 || 0b0
6349      */
6350     tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
6351 }
6352 
6353 #if defined(CONFIG_USER_ONLY)
6354 #define GEN_TM_PRIV_NOOP(name)                                 \
6355 static inline void gen_##name(DisasContext *ctx)               \
6356 {                                                              \
6357     gen_priv_opc(ctx);                                         \
6358 }
6359 
6360 #else
6361 
6362 #define GEN_TM_PRIV_NOOP(name)                                 \
6363 static inline void gen_##name(DisasContext *ctx)               \
6364 {                                                              \
6365     CHK_SV(ctx);                                               \
6366     if (unlikely(!ctx->tm_enabled)) {                          \
6367         gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
6368         return;                                                \
6369     }                                                          \
6370     /*                                                         \
6371      * Because tbegin always fails, the implementation is      \
6372      * simple:                                                 \
6373      *                                                         \
6374      *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
6375      *         = 0b0 || 0b00 | 0b0                             \
6376      */                                                        \
6377     tcg_gen_movi_i32(cpu_crf[0], 0);                           \
6378 }
6379 
6380 #endif
6381 
6382 GEN_TM_PRIV_NOOP(treclaim);
6383 GEN_TM_PRIV_NOOP(trechkpt);
6384 
6385 static inline void get_fpr(TCGv_i64 dst, int regno)
6386 {
6387     tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno));
6388 }
6389 
6390 static inline void set_fpr(int regno, TCGv_i64 src)
6391 {
6392     tcg_gen_st_i64(src, cpu_env, fpr_offset(regno));
6393     /*
6394      * Before PowerISA v3.1 the result of doubleword 1 of the VSR
6395      * corresponding to the target FPR was undefined. However,
6396      * most (if not all) real hardware were setting the result to 0.
6397      * Starting at ISA v3.1, the result for doubleword 1 is now defined
6398      * to be 0.
6399      */
6400     tcg_gen_st_i64(tcg_constant_i64(0), cpu_env, vsr64_offset(regno, false));
6401 }
6402 
6403 static inline void get_avr64(TCGv_i64 dst, int regno, bool high)
6404 {
6405     tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high));
6406 }
6407 
6408 static inline void set_avr64(int regno, TCGv_i64 src, bool high)
6409 {
6410     tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high));
6411 }
6412 
6413 /*
6414  * Helpers for decodetree used by !function for decoding arguments.
6415  */
6416 static int times_2(DisasContext *ctx, int x)
6417 {
6418     return x * 2;
6419 }
6420 
6421 static int times_4(DisasContext *ctx, int x)
6422 {
6423     return x * 4;
6424 }
6425 
6426 static int times_16(DisasContext *ctx, int x)
6427 {
6428     return x * 16;
6429 }
6430 
6431 static int64_t dw_compose_ea(DisasContext *ctx, int x)
6432 {
6433     return deposit64(0xfffffffffffffe00, 3, 6, x);
6434 }
6435 
6436 /*
6437  * Helpers for trans_* functions to check for specific insns flags.
6438  * Use token pasting to ensure that we use the proper flag with the
6439  * proper variable.
6440  */
6441 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
6442     do {                                                \
6443         if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
6444             return false;                               \
6445         }                                               \
6446     } while (0)
6447 
6448 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
6449     do {                                                \
6450         if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
6451             return false;                               \
6452         }                                               \
6453     } while (0)
6454 
6455 /* Then special-case the check for 64-bit so that we elide code for ppc32. */
6456 #if TARGET_LONG_BITS == 32
6457 # define REQUIRE_64BIT(CTX)  return false
6458 #else
6459 # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
6460 #endif
6461 
6462 #define REQUIRE_VECTOR(CTX)                             \
6463     do {                                                \
6464         if (unlikely(!(CTX)->altivec_enabled)) {        \
6465             gen_exception((CTX), POWERPC_EXCP_VPU);     \
6466             return true;                                \
6467         }                                               \
6468     } while (0)
6469 
6470 #define REQUIRE_VSX(CTX)                                \
6471     do {                                                \
6472         if (unlikely(!(CTX)->vsx_enabled)) {            \
6473             gen_exception((CTX), POWERPC_EXCP_VSXU);    \
6474             return true;                                \
6475         }                                               \
6476     } while (0)
6477 
6478 #define REQUIRE_FPU(ctx)                                \
6479     do {                                                \
6480         if (unlikely(!(ctx)->fpu_enabled)) {            \
6481             gen_exception((ctx), POWERPC_EXCP_FPU);     \
6482             return true;                                \
6483         }                                               \
6484     } while (0)
6485 
6486 #if !defined(CONFIG_USER_ONLY)
6487 #define REQUIRE_SV(CTX)             \
6488     do {                            \
6489         if (unlikely((CTX)->pr)) {  \
6490             gen_priv_opc(CTX);      \
6491             return true;            \
6492         }                           \
6493     } while (0)
6494 
6495 #define REQUIRE_HV(CTX)                             \
6496     do {                                            \
6497         if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
6498             gen_priv_opc(CTX);                      \
6499             return true;                            \
6500         }                                           \
6501     } while (0)
6502 #else
6503 #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6504 #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
6505 #endif
6506 
6507 /*
6508  * Helpers for implementing sets of trans_* functions.
6509  * Defer the implementation of NAME to FUNC, with optional extra arguments.
6510  */
6511 #define TRANS(NAME, FUNC, ...) \
6512     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6513     { return FUNC(ctx, a, __VA_ARGS__); }
6514 #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
6515     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6516     {                                                          \
6517         REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
6518         return FUNC(ctx, a, __VA_ARGS__);                      \
6519     }
6520 #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6521     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6522     {                                                          \
6523         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6524         return FUNC(ctx, a, __VA_ARGS__);                      \
6525     }
6526 
6527 #define TRANS64(NAME, FUNC, ...) \
6528     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6529     { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
6530 #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
6531     static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
6532     {                                                          \
6533         REQUIRE_64BIT(ctx);                                    \
6534         REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
6535         return FUNC(ctx, a, __VA_ARGS__);                      \
6536     }
6537 
6538 /* TODO: More TRANS* helpers for extra insn_flags checks. */
6539 
6540 
6541 #include "decode-insn32.c.inc"
6542 #include "decode-insn64.c.inc"
6543 #include "power8-pmu-regs.c.inc"
6544 
6545 /*
6546  * Incorporate CIA into the constant when R=1.
6547  * Validate that when R=1, RA=0.
6548  */
6549 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
6550 {
6551     d->rt = a->rt;
6552     d->ra = a->ra;
6553     d->si = a->si;
6554     if (a->r) {
6555         if (unlikely(a->ra != 0)) {
6556             gen_invalid(ctx);
6557             return false;
6558         }
6559         d->si += ctx->cia;
6560     }
6561     return true;
6562 }
6563 
6564 #include "translate/fixedpoint-impl.c.inc"
6565 
6566 #include "translate/fp-impl.c.inc"
6567 
6568 #include "translate/vmx-impl.c.inc"
6569 
6570 #include "translate/vsx-impl.c.inc"
6571 
6572 #include "translate/dfp-impl.c.inc"
6573 
6574 #include "translate/spe-impl.c.inc"
6575 
6576 #include "translate/branch-impl.c.inc"
6577 
6578 #include "translate/processor-ctrl-impl.c.inc"
6579 
6580 #include "translate/storage-ctrl-impl.c.inc"
6581 
6582 /* Handles lfdp */
6583 static void gen_dform39(DisasContext *ctx)
6584 {
6585     if ((ctx->opcode & 0x3) == 0) {
6586         if (ctx->insns_flags2 & PPC2_ISA205) {
6587             return gen_lfdp(ctx);
6588         }
6589     }
6590     return gen_invalid(ctx);
6591 }
6592 
6593 /* Handles stfdp */
6594 static void gen_dform3D(DisasContext *ctx)
6595 {
6596     if ((ctx->opcode & 3) == 0) { /* DS-FORM */
6597         /* stfdp */
6598         if (ctx->insns_flags2 & PPC2_ISA205) {
6599             return gen_stfdp(ctx);
6600         }
6601     }
6602     return gen_invalid(ctx);
6603 }
6604 
6605 #if defined(TARGET_PPC64)
6606 /* brd */
6607 static void gen_brd(DisasContext *ctx)
6608 {
6609     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6610 }
6611 
6612 /* brw */
6613 static void gen_brw(DisasContext *ctx)
6614 {
6615     tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
6616     tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
6617 
6618 }
6619 
6620 /* brh */
6621 static void gen_brh(DisasContext *ctx)
6622 {
6623     TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
6624     TCGv_i64 t1 = tcg_temp_new_i64();
6625     TCGv_i64 t2 = tcg_temp_new_i64();
6626 
6627     tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
6628     tcg_gen_and_i64(t2, t1, mask);
6629     tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
6630     tcg_gen_shli_i64(t1, t1, 8);
6631     tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
6632 
6633     tcg_temp_free_i64(t1);
6634     tcg_temp_free_i64(t2);
6635 }
6636 #endif
6637 
6638 static opcode_t opcodes[] = {
6639 #if defined(TARGET_PPC64)
6640 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
6641 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
6642 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
6643 #endif
6644 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
6645 #if defined(TARGET_PPC64)
6646 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300),
6647 #endif
6648 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205),
6649 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300),
6650 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL),
6651 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6652 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6653 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER),
6654 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER),
6655 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER),
6656 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER),
6657 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6658 #if defined(TARGET_PPC64)
6659 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B),
6660 #endif
6661 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER),
6662 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER),
6663 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6664 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6665 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6666 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER),
6667 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300),
6668 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
6669 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6670 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
6671 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER),
6672 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER),
6673 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6674 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6675 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6676 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6677 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB),
6678 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD),
6679 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205),
6680 #if defined(TARGET_PPC64)
6681 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD),
6682 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B),
6683 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300),
6684 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300),
6685 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205),
6686 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206),
6687 #endif
6688 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6689 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6690 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6691 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
6692 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
6693 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
6694 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
6695 #if defined(TARGET_PPC64)
6696 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
6697 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
6698 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
6699 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
6700 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
6701 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
6702                PPC_NONE, PPC2_ISA300),
6703 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
6704                PPC_NONE, PPC2_ISA300),
6705 #endif
6706 /* handles lfdp, lxsd, lxssp */
6707 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6708 /* handles stfdp, stxsd, stxssp */
6709 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
6710 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6711 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
6712 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
6713 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
6714 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
6715 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
6716 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO),
6717 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
6718 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6719 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6720 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
6721 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
6722 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
6723 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6724 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
6725 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
6726 #if defined(TARGET_PPC64)
6727 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
6728 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
6729 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
6730 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
6731 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
6732 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
6733 #endif
6734 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC),
6735 /* ISA v3.0 changed the extended opcode from 62 to 30 */
6736 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
6737 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
6738 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6739 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6740 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
6741 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
6742 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
6743 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
6744 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
6745 #if defined(TARGET_PPC64)
6746 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
6747 #if !defined(CONFIG_USER_ONLY)
6748 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6749 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6750 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
6751 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
6752 #endif
6753 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
6754 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6755 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6756 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6757 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
6758 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
6759 #endif
6760 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
6761 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
6762 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
6763 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW),
6764 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
6765 #if defined(TARGET_PPC64)
6766 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B),
6767 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B),
6768 #endif
6769 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
6770 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
6771 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
6772 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
6773 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
6774 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
6775 #if defined(TARGET_PPC64)
6776 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
6777 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
6778 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
6779 #endif
6780 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
6781 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
6782 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
6783 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6784 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
6785 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
6786 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6787 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
6788 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6789 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
6790 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
6791 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
6792 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
6793 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
6794 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
6795 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
6796 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
6797 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
6798 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
6799 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
6800 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
6801 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
6802 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
6803 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
6804 #if defined(TARGET_PPC64)
6805 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
6806 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
6807              PPC_SEGMENT_64B),
6808 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
6809 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
6810              PPC_SEGMENT_64B),
6811 #endif
6812 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
6813 /*
6814  * XXX Those instructions will need to be handled differently for
6815  * different ISA versions
6816  */
6817 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
6818 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
6819 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
6820 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
6821 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
6822 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
6823 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
6824 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
6825 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
6826 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
6827 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
6828 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
6829 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
6830 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
6831 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
6832 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
6833 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
6834 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
6835 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
6836 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
6837 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
6838 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
6839 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
6840 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
6841 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
6842 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
6843 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
6844                PPC_NONE, PPC2_BOOKE206),
6845 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
6846                PPC_NONE, PPC2_BOOKE206),
6847 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
6848                PPC_NONE, PPC2_BOOKE206),
6849 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
6850                PPC_NONE, PPC2_BOOKE206),
6851 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
6852                PPC_NONE, PPC2_BOOKE206),
6853 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
6854 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
6855 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
6856 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801,
6857               PPC_BOOKE, PPC2_BOOKE206),
6858 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE),
6859 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
6860                PPC_BOOKE, PPC2_BOOKE206),
6861 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
6862              PPC_440_SPEC),
6863 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC),
6864 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC),
6865 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
6866 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
6867 #if defined(TARGET_PPC64)
6868 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE,
6869               PPC2_ISA300),
6870 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300),
6871 #endif
6872 
6873 #undef GEN_INT_ARITH_ADD
6874 #undef GEN_INT_ARITH_ADD_CONST
6875 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov)         \
6876 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER),
6877 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val,                        \
6878                                 add_ca, compute_ca, compute_ov)               \
6879 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER),
6880 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0)
6881 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1)
6882 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0)
6883 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1)
6884 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0)
6885 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1)
6886 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0)
6887 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1)
6888 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300),
6889 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0)
6890 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1)
6891 
6892 #undef GEN_INT_ARITH_DIVW
6893 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov)                      \
6894 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER)
6895 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0),
6896 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1),
6897 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0),
6898 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1),
6899 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6900 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6901 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6902 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6903 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6904 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6905 
6906 #if defined(TARGET_PPC64)
6907 #undef GEN_INT_ARITH_DIVD
6908 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov)                      \
6909 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6910 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0),
6911 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1),
6912 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0),
6913 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1),
6914 
6915 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6916 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206),
6917 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6918 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206),
6919 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300),
6920 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300),
6921 
6922 #undef GEN_INT_ARITH_MUL_HELPER
6923 #define GEN_INT_ARITH_MUL_HELPER(name, opc3)                                  \
6924 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B)
6925 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00),
6926 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02),
6927 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17),
6928 #endif
6929 
6930 #undef GEN_INT_ARITH_SUBF
6931 #undef GEN_INT_ARITH_SUBF_CONST
6932 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov)        \
6933 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER),
6934 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val,                       \
6935                                 add_ca, compute_ca, compute_ov)               \
6936 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER),
6937 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0)
6938 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1)
6939 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0)
6940 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1)
6941 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0)
6942 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1)
6943 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0)
6944 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1)
6945 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0)
6946 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1)
6947 
6948 #undef GEN_LOGICAL1
6949 #undef GEN_LOGICAL2
6950 #define GEN_LOGICAL2(name, tcg_op, opc, type)                                 \
6951 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type)
6952 #define GEN_LOGICAL1(name, tcg_op, opc, type)                                 \
6953 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type)
6954 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER),
6955 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER),
6956 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER),
6957 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER),
6958 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER),
6959 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER),
6960 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER),
6961 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER),
6962 #if defined(TARGET_PPC64)
6963 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B),
6964 #endif
6965 
6966 #if defined(TARGET_PPC64)
6967 #undef GEN_PPC64_R2
6968 #undef GEN_PPC64_R4
6969 #define GEN_PPC64_R2(name, opc1, opc2)                                        \
6970 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6971 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6972              PPC_64B)
6973 #define GEN_PPC64_R4(name, opc1, opc2)                                        \
6974 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
6975 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
6976              PPC_64B),                                                        \
6977 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
6978              PPC_64B),                                                        \
6979 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
6980              PPC_64B)
6981 GEN_PPC64_R4(rldicl, 0x1E, 0x00),
6982 GEN_PPC64_R4(rldicr, 0x1E, 0x02),
6983 GEN_PPC64_R4(rldic, 0x1E, 0x04),
6984 GEN_PPC64_R2(rldcl, 0x1E, 0x08),
6985 GEN_PPC64_R2(rldcr, 0x1E, 0x09),
6986 GEN_PPC64_R4(rldimi, 0x1E, 0x06),
6987 #endif
6988 
6989 #undef GEN_LDX_E
6990 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
6991 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
6992 
6993 #if defined(TARGET_PPC64)
6994 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
6995 
6996 /* HV/P7 and later only */
6997 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
6998 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
6999 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
7000 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
7001 #endif
7002 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
7003 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
7004 
7005 /* External PID based load */
7006 #undef GEN_LDEPX
7007 #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
7008 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7009               0x00000001, PPC_NONE, PPC2_BOOKE206),
7010 
7011 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
7012 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
7013 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
7014 #if defined(TARGET_PPC64)
7015 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
7016 #endif
7017 
7018 #undef GEN_STX_E
7019 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
7020 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
7021 
7022 #if defined(TARGET_PPC64)
7023 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
7024 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
7025 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
7026 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
7027 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
7028 #endif
7029 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
7030 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
7031 
7032 #undef GEN_STEPX
7033 #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
7034 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
7035               0x00000001, PPC_NONE, PPC2_BOOKE206),
7036 
7037 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
7038 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
7039 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
7040 #if defined(TARGET_PPC64)
7041 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
7042 #endif
7043 
7044 #undef GEN_CRLOGIC
7045 #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
7046 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
7047 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
7048 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
7049 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
7050 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
7051 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
7052 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
7053 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
7054 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
7055 
7056 #undef GEN_MAC_HANDLER
7057 #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
7058 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
7059 GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
7060 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
7061 GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
7062 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
7063 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
7064 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
7065 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
7066 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
7067 GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
7068 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
7069 GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
7070 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
7071 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
7072 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
7073 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
7074 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
7075 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
7076 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
7077 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
7078 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
7079 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
7080 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
7081 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
7082 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
7083 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
7084 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
7085 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
7086 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
7087 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
7088 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
7089 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
7090 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
7091 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
7092 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
7093 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
7094 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
7095 GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
7096 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
7097 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
7098 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
7099 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
7100 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
7101 
7102 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
7103                PPC_NONE, PPC2_TM),
7104 GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
7105                PPC_NONE, PPC2_TM),
7106 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
7107                PPC_NONE, PPC2_TM),
7108 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
7109                PPC_NONE, PPC2_TM),
7110 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
7111                PPC_NONE, PPC2_TM),
7112 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
7113                PPC_NONE, PPC2_TM),
7114 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
7115                PPC_NONE, PPC2_TM),
7116 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
7117                PPC_NONE, PPC2_TM),
7118 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
7119                PPC_NONE, PPC2_TM),
7120 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
7121                PPC_NONE, PPC2_TM),
7122 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
7123                PPC_NONE, PPC2_TM),
7124 
7125 #include "translate/fp-ops.c.inc"
7126 
7127 #include "translate/vmx-ops.c.inc"
7128 
7129 #include "translate/vsx-ops.c.inc"
7130 
7131 #include "translate/spe-ops.c.inc"
7132 };
7133 
7134 /*****************************************************************************/
7135 /* Opcode types */
7136 enum {
7137     PPC_DIRECT   = 0, /* Opcode routine        */
7138     PPC_INDIRECT = 1, /* Indirect opcode table */
7139 };
7140 
7141 #define PPC_OPCODE_MASK 0x3
7142 
7143 static inline int is_indirect_opcode(void *handler)
7144 {
7145     return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
7146 }
7147 
7148 static inline opc_handler_t **ind_table(void *handler)
7149 {
7150     return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
7151 }
7152 
7153 /* Instruction table creation */
7154 /* Opcodes tables creation */
7155 static void fill_new_table(opc_handler_t **table, int len)
7156 {
7157     int i;
7158 
7159     for (i = 0; i < len; i++) {
7160         table[i] = &invalid_handler;
7161     }
7162 }
7163 
7164 static int create_new_table(opc_handler_t **table, unsigned char idx)
7165 {
7166     opc_handler_t **tmp;
7167 
7168     tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
7169     fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
7170     table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
7171 
7172     return 0;
7173 }
7174 
7175 static int insert_in_table(opc_handler_t **table, unsigned char idx,
7176                             opc_handler_t *handler)
7177 {
7178     if (table[idx] != &invalid_handler) {
7179         return -1;
7180     }
7181     table[idx] = handler;
7182 
7183     return 0;
7184 }
7185 
7186 static int register_direct_insn(opc_handler_t **ppc_opcodes,
7187                                 unsigned char idx, opc_handler_t *handler)
7188 {
7189     if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
7190         printf("*** ERROR: opcode %02x already assigned in main "
7191                "opcode table\n", idx);
7192         return -1;
7193     }
7194 
7195     return 0;
7196 }
7197 
7198 static int register_ind_in_table(opc_handler_t **table,
7199                                  unsigned char idx1, unsigned char idx2,
7200                                  opc_handler_t *handler)
7201 {
7202     if (table[idx1] == &invalid_handler) {
7203         if (create_new_table(table, idx1) < 0) {
7204             printf("*** ERROR: unable to create indirect table "
7205                    "idx=%02x\n", idx1);
7206             return -1;
7207         }
7208     } else {
7209         if (!is_indirect_opcode(table[idx1])) {
7210             printf("*** ERROR: idx %02x already assigned to a direct "
7211                    "opcode\n", idx1);
7212             return -1;
7213         }
7214     }
7215     if (handler != NULL &&
7216         insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
7217         printf("*** ERROR: opcode %02x already assigned in "
7218                "opcode table %02x\n", idx2, idx1);
7219         return -1;
7220     }
7221 
7222     return 0;
7223 }
7224 
7225 static int register_ind_insn(opc_handler_t **ppc_opcodes,
7226                              unsigned char idx1, unsigned char idx2,
7227                              opc_handler_t *handler)
7228 {
7229     return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
7230 }
7231 
7232 static int register_dblind_insn(opc_handler_t **ppc_opcodes,
7233                                 unsigned char idx1, unsigned char idx2,
7234                                 unsigned char idx3, opc_handler_t *handler)
7235 {
7236     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7237         printf("*** ERROR: unable to join indirect table idx "
7238                "[%02x-%02x]\n", idx1, idx2);
7239         return -1;
7240     }
7241     if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
7242                               handler) < 0) {
7243         printf("*** ERROR: unable to insert opcode "
7244                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7245         return -1;
7246     }
7247 
7248     return 0;
7249 }
7250 
7251 static int register_trplind_insn(opc_handler_t **ppc_opcodes,
7252                                  unsigned char idx1, unsigned char idx2,
7253                                  unsigned char idx3, unsigned char idx4,
7254                                  opc_handler_t *handler)
7255 {
7256     opc_handler_t **table;
7257 
7258     if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
7259         printf("*** ERROR: unable to join indirect table idx "
7260                "[%02x-%02x]\n", idx1, idx2);
7261         return -1;
7262     }
7263     table = ind_table(ppc_opcodes[idx1]);
7264     if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
7265         printf("*** ERROR: unable to join 2nd-level indirect table idx "
7266                "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
7267         return -1;
7268     }
7269     table = ind_table(table[idx2]);
7270     if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
7271         printf("*** ERROR: unable to insert opcode "
7272                "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
7273         return -1;
7274     }
7275     return 0;
7276 }
7277 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
7278 {
7279     if (insn->opc2 != 0xFF) {
7280         if (insn->opc3 != 0xFF) {
7281             if (insn->opc4 != 0xFF) {
7282                 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7283                                           insn->opc3, insn->opc4,
7284                                           &insn->handler) < 0) {
7285                     return -1;
7286                 }
7287             } else {
7288                 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
7289                                          insn->opc3, &insn->handler) < 0) {
7290                     return -1;
7291                 }
7292             }
7293         } else {
7294             if (register_ind_insn(ppc_opcodes, insn->opc1,
7295                                   insn->opc2, &insn->handler) < 0) {
7296                 return -1;
7297             }
7298         }
7299     } else {
7300         if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
7301             return -1;
7302         }
7303     }
7304 
7305     return 0;
7306 }
7307 
7308 static int test_opcode_table(opc_handler_t **table, int len)
7309 {
7310     int i, count, tmp;
7311 
7312     for (i = 0, count = 0; i < len; i++) {
7313         /* Consistency fixup */
7314         if (table[i] == NULL) {
7315             table[i] = &invalid_handler;
7316         }
7317         if (table[i] != &invalid_handler) {
7318             if (is_indirect_opcode(table[i])) {
7319                 tmp = test_opcode_table(ind_table(table[i]),
7320                     PPC_CPU_INDIRECT_OPCODES_LEN);
7321                 if (tmp == 0) {
7322                     free(table[i]);
7323                     table[i] = &invalid_handler;
7324                 } else {
7325                     count++;
7326                 }
7327             } else {
7328                 count++;
7329             }
7330         }
7331     }
7332 
7333     return count;
7334 }
7335 
7336 static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
7337 {
7338     if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
7339         printf("*** WARNING: no opcode defined !\n");
7340     }
7341 }
7342 
7343 /*****************************************************************************/
7344 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
7345 {
7346     PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
7347     opcode_t *opc;
7348 
7349     fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
7350     for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
7351         if (((opc->handler.type & pcc->insns_flags) != 0) ||
7352             ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
7353             if (register_insn(cpu->opcodes, opc) < 0) {
7354                 error_setg(errp, "ERROR initializing PowerPC instruction "
7355                            "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
7356                            opc->opc3);
7357                 return;
7358             }
7359         }
7360     }
7361     fix_opcode_tables(cpu->opcodes);
7362     fflush(stdout);
7363     fflush(stderr);
7364 }
7365 
7366 void destroy_ppc_opcodes(PowerPCCPU *cpu)
7367 {
7368     opc_handler_t **table, **table_2;
7369     int i, j, k;
7370 
7371     for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
7372         if (cpu->opcodes[i] == &invalid_handler) {
7373             continue;
7374         }
7375         if (is_indirect_opcode(cpu->opcodes[i])) {
7376             table = ind_table(cpu->opcodes[i]);
7377             for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
7378                 if (table[j] == &invalid_handler) {
7379                     continue;
7380                 }
7381                 if (is_indirect_opcode(table[j])) {
7382                     table_2 = ind_table(table[j]);
7383                     for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
7384                         if (table_2[k] != &invalid_handler &&
7385                             is_indirect_opcode(table_2[k])) {
7386                             g_free((opc_handler_t *)((uintptr_t)table_2[k] &
7387                                                      ~PPC_INDIRECT));
7388                         }
7389                     }
7390                     g_free((opc_handler_t *)((uintptr_t)table[j] &
7391                                              ~PPC_INDIRECT));
7392                 }
7393             }
7394             g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
7395                 ~PPC_INDIRECT));
7396         }
7397     }
7398 }
7399 
7400 int ppc_fixup_cpu(PowerPCCPU *cpu)
7401 {
7402     CPUPPCState *env = &cpu->env;
7403 
7404     /*
7405      * TCG doesn't (yet) emulate some groups of instructions that are
7406      * implemented on some otherwise supported CPUs (e.g. VSX and
7407      * decimal floating point instructions on POWER7).  We remove
7408      * unsupported instruction groups from the cpu state's instruction
7409      * masks and hope the guest can cope.  For at least the pseries
7410      * machine, the unavailability of these instructions can be
7411      * advertised to the guest via the device tree.
7412      */
7413     if ((env->insns_flags & ~PPC_TCG_INSNS)
7414         || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
7415         warn_report("Disabling some instructions which are not "
7416                     "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
7417                     env->insns_flags & ~PPC_TCG_INSNS,
7418                     env->insns_flags2 & ~PPC_TCG_INSNS2);
7419     }
7420     env->insns_flags &= PPC_TCG_INSNS;
7421     env->insns_flags2 &= PPC_TCG_INSNS2;
7422     return 0;
7423 }
7424 
7425 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
7426 {
7427     opc_handler_t **table, *handler;
7428     uint32_t inval;
7429 
7430     ctx->opcode = insn;
7431 
7432     LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
7433               insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7434               ctx->le_mode ? "little" : "big");
7435 
7436     table = cpu->opcodes;
7437     handler = table[opc1(insn)];
7438     if (is_indirect_opcode(handler)) {
7439         table = ind_table(handler);
7440         handler = table[opc2(insn)];
7441         if (is_indirect_opcode(handler)) {
7442             table = ind_table(handler);
7443             handler = table[opc3(insn)];
7444             if (is_indirect_opcode(handler)) {
7445                 table = ind_table(handler);
7446                 handler = table[opc4(insn)];
7447             }
7448         }
7449     }
7450 
7451     /* Is opcode *REALLY* valid ? */
7452     if (unlikely(handler->handler == &gen_invalid)) {
7453         qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
7454                       "%02x - %02x - %02x - %02x (%08x) "
7455                       TARGET_FMT_lx "\n",
7456                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7457                       insn, ctx->cia);
7458         return false;
7459     }
7460 
7461     if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
7462                  && Rc(insn))) {
7463         inval = handler->inval2;
7464     } else {
7465         inval = handler->inval1;
7466     }
7467 
7468     if (unlikely((insn & inval) != 0)) {
7469         qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
7470                       "%02x - %02x - %02x - %02x (%08x) "
7471                       TARGET_FMT_lx "\n", insn & inval,
7472                       opc1(insn), opc2(insn), opc3(insn), opc4(insn),
7473                       insn, ctx->cia);
7474         return false;
7475     }
7476 
7477     handler->handler(ctx);
7478     return true;
7479 }
7480 
7481 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
7482 {
7483     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7484     CPUPPCState *env = cs->env_ptr;
7485     uint32_t hflags = ctx->base.tb->flags;
7486 
7487     ctx->spr_cb = env->spr_cb;
7488     ctx->pr = (hflags >> HFLAGS_PR) & 1;
7489     ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
7490     ctx->dr = (hflags >> HFLAGS_DR) & 1;
7491     ctx->hv = (hflags >> HFLAGS_HV) & 1;
7492     ctx->insns_flags = env->insns_flags;
7493     ctx->insns_flags2 = env->insns_flags2;
7494     ctx->access_type = -1;
7495     ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
7496     ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
7497     ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
7498     ctx->flags = env->flags;
7499 #if defined(TARGET_PPC64)
7500     ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
7501     ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
7502 #endif
7503     ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
7504         || env->mmu_model & POWERPC_MMU_64;
7505 
7506     ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
7507     ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
7508     ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
7509     ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
7510     ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
7511     ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
7512     ctx->hr = (hflags >> HFLAGS_HR) & 1;
7513     ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
7514     ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
7515     ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
7516 
7517     ctx->singlestep_enabled = 0;
7518     if ((hflags >> HFLAGS_SE) & 1) {
7519         ctx->singlestep_enabled |= CPU_SINGLE_STEP;
7520         ctx->base.max_insns = 1;
7521     }
7522     if ((hflags >> HFLAGS_BE) & 1) {
7523         ctx->singlestep_enabled |= CPU_BRANCH_STEP;
7524     }
7525 }
7526 
7527 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
7528 {
7529 }
7530 
7531 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
7532 {
7533     tcg_gen_insn_start(dcbase->pc_next);
7534 }
7535 
7536 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
7537 {
7538     REQUIRE_INSNS_FLAGS2(ctx, ISA310);
7539     return opc1(insn) == 1;
7540 }
7541 
7542 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
7543 {
7544     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7545     PowerPCCPU *cpu = POWERPC_CPU(cs);
7546     CPUPPCState *env = cs->env_ptr;
7547     target_ulong pc;
7548     uint32_t insn;
7549     bool ok;
7550 
7551     LOG_DISAS("----------------\n");
7552     LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
7553               ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
7554 
7555     ctx->cia = pc = ctx->base.pc_next;
7556     insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
7557     ctx->base.pc_next = pc += 4;
7558 
7559     if (!is_prefix_insn(ctx, insn)) {
7560         ok = (decode_insn32(ctx, insn) ||
7561               decode_legacy(cpu, ctx, insn));
7562     } else if ((pc & 63) == 0) {
7563         /*
7564          * Power v3.1, section 1.9 Exceptions:
7565          * attempt to execute a prefixed instruction that crosses a
7566          * 64-byte address boundary (system alignment error).
7567          */
7568         gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
7569         ok = true;
7570     } else {
7571         uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
7572                                              need_byteswap(ctx));
7573         ctx->base.pc_next = pc += 4;
7574         ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
7575     }
7576     if (!ok) {
7577         gen_invalid(ctx);
7578     }
7579 
7580     /* End the TB when crossing a page boundary. */
7581     if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
7582         ctx->base.is_jmp = DISAS_TOO_MANY;
7583     }
7584 
7585     translator_loop_temp_check(&ctx->base);
7586 }
7587 
7588 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
7589 {
7590     DisasContext *ctx = container_of(dcbase, DisasContext, base);
7591     DisasJumpType is_jmp = ctx->base.is_jmp;
7592     target_ulong nip = ctx->base.pc_next;
7593 
7594     if (is_jmp == DISAS_NORETURN) {
7595         /* We have already exited the TB. */
7596         return;
7597     }
7598 
7599     /* Honor single stepping. */
7600     if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)
7601         && (nip <= 0x100 || nip > 0xf00)) {
7602         switch (is_jmp) {
7603         case DISAS_TOO_MANY:
7604         case DISAS_EXIT_UPDATE:
7605         case DISAS_CHAIN_UPDATE:
7606             gen_update_nip(ctx, nip);
7607             break;
7608         case DISAS_EXIT:
7609         case DISAS_CHAIN:
7610             break;
7611         default:
7612             g_assert_not_reached();
7613         }
7614 
7615         gen_debug_exception(ctx);
7616         return;
7617     }
7618 
7619     switch (is_jmp) {
7620     case DISAS_TOO_MANY:
7621         if (use_goto_tb(ctx, nip)) {
7622             pmu_count_insns(ctx);
7623             tcg_gen_goto_tb(0);
7624             gen_update_nip(ctx, nip);
7625             tcg_gen_exit_tb(ctx->base.tb, 0);
7626             break;
7627         }
7628         /* fall through */
7629     case DISAS_CHAIN_UPDATE:
7630         gen_update_nip(ctx, nip);
7631         /* fall through */
7632     case DISAS_CHAIN:
7633         /*
7634          * tcg_gen_lookup_and_goto_ptr will exit the TB if
7635          * CF_NO_GOTO_PTR is set. Count insns now.
7636          */
7637         if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
7638             pmu_count_insns(ctx);
7639         }
7640 
7641         tcg_gen_lookup_and_goto_ptr();
7642         break;
7643 
7644     case DISAS_EXIT_UPDATE:
7645         gen_update_nip(ctx, nip);
7646         /* fall through */
7647     case DISAS_EXIT:
7648         pmu_count_insns(ctx);
7649         tcg_gen_exit_tb(NULL, 0);
7650         break;
7651 
7652     default:
7653         g_assert_not_reached();
7654     }
7655 }
7656 
7657 static void ppc_tr_disas_log(const DisasContextBase *dcbase,
7658                              CPUState *cs, FILE *logfile)
7659 {
7660     fprintf(logfile, "IN: %s\n", lookup_symbol(dcbase->pc_first));
7661     target_disas(logfile, cs, dcbase->pc_first, dcbase->tb->size);
7662 }
7663 
7664 static const TranslatorOps ppc_tr_ops = {
7665     .init_disas_context = ppc_tr_init_disas_context,
7666     .tb_start           = ppc_tr_tb_start,
7667     .insn_start         = ppc_tr_insn_start,
7668     .translate_insn     = ppc_tr_translate_insn,
7669     .tb_stop            = ppc_tr_tb_stop,
7670     .disas_log          = ppc_tr_disas_log,
7671 };
7672 
7673 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns,
7674                            target_ulong pc, void *host_pc)
7675 {
7676     DisasContext ctx;
7677 
7678     translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
7679 }
7680