xref: /openbmc/qemu/target/ppc/translate.c (revision 84fb165d967d7245d2779b3a4217a08b0c312a51)
1  /*
2   *  PowerPC emulation for qemu: main translation routines.
3   *
4   *  Copyright (c) 2003-2007 Jocelyn Mayer
5   *  Copyright (C) 2011 Freescale Semiconductor, Inc.
6   *
7   * This library is free software; you can redistribute it and/or
8   * modify it under the terms of the GNU Lesser General Public
9   * License as published by the Free Software Foundation; either
10   * version 2.1 of the License, or (at your option) any later version.
11   *
12   * This library is distributed in the hope that it will be useful,
13   * but WITHOUT ANY WARRANTY; without even the implied warranty of
14   * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15   * Lesser General Public License for more details.
16   *
17   * You should have received a copy of the GNU Lesser General Public
18   * License along with this library; if not, see <http://www.gnu.org/licenses/>.
19   */
20  
21  #include "qemu/osdep.h"
22  #include "cpu.h"
23  #include "internal.h"
24  #include "exec/exec-all.h"
25  #include "tcg/tcg-op.h"
26  #include "tcg/tcg-op-gvec.h"
27  #include "qemu/host-utils.h"
28  
29  #include "exec/helper-proto.h"
30  #include "exec/helper-gen.h"
31  
32  #include "exec/translator.h"
33  #include "exec/log.h"
34  #include "qemu/atomic128.h"
35  #include "spr_common.h"
36  #include "power8-pmu.h"
37  
38  #include "qemu/qemu-print.h"
39  #include "qapi/error.h"
40  
41  #define HELPER_H "helper.h"
42  #include "exec/helper-info.c.inc"
43  #undef  HELPER_H
44  
45  #define CPU_SINGLE_STEP 0x1
46  #define CPU_BRANCH_STEP 0x2
47  
48  /* Include definitions for instructions classes and implementations flags */
49  /* #define PPC_DEBUG_DISAS */
50  
51  #ifdef PPC_DEBUG_DISAS
52  #  define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
53  #else
54  #  define LOG_DISAS(...) do { } while (0)
55  #endif
56  /*****************************************************************************/
57  /* Code translation helpers                                                  */
58  
59  /* global register indexes */
60  static char cpu_reg_names[10 * 3 + 22 * 4   /* GPR */
61                            + 10 * 4 + 22 * 5 /* SPE GPRh */
62                            + 8 * 5           /* CRF */];
63  static TCGv cpu_gpr[32];
64  static TCGv cpu_gprh[32];
65  static TCGv_i32 cpu_crf[8];
66  static TCGv cpu_nip;
67  static TCGv cpu_msr;
68  static TCGv cpu_ctr;
69  static TCGv cpu_lr;
70  #if defined(TARGET_PPC64)
71  static TCGv cpu_cfar;
72  #endif
73  static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32;
74  static TCGv cpu_reserve;
75  static TCGv cpu_reserve_length;
76  static TCGv cpu_reserve_val;
77  #if defined(TARGET_PPC64)
78  static TCGv cpu_reserve_val2;
79  #endif
80  static TCGv cpu_fpscr;
81  static TCGv_i32 cpu_access_type;
82  
ppc_translate_init(void)83  void ppc_translate_init(void)
84  {
85      int i;
86      char *p;
87      size_t cpu_reg_names_size;
88  
89      p = cpu_reg_names;
90      cpu_reg_names_size = sizeof(cpu_reg_names);
91  
92      for (i = 0; i < 8; i++) {
93          snprintf(p, cpu_reg_names_size, "crf%d", i);
94          cpu_crf[i] = tcg_global_mem_new_i32(tcg_env,
95                                              offsetof(CPUPPCState, crf[i]), p);
96          p += 5;
97          cpu_reg_names_size -= 5;
98      }
99  
100      for (i = 0; i < 32; i++) {
101          snprintf(p, cpu_reg_names_size, "r%d", i);
102          cpu_gpr[i] = tcg_global_mem_new(tcg_env,
103                                          offsetof(CPUPPCState, gpr[i]), p);
104          p += (i < 10) ? 3 : 4;
105          cpu_reg_names_size -= (i < 10) ? 3 : 4;
106          snprintf(p, cpu_reg_names_size, "r%dH", i);
107          cpu_gprh[i] = tcg_global_mem_new(tcg_env,
108                                           offsetof(CPUPPCState, gprh[i]), p);
109          p += (i < 10) ? 4 : 5;
110          cpu_reg_names_size -= (i < 10) ? 4 : 5;
111      }
112  
113      cpu_nip = tcg_global_mem_new(tcg_env,
114                                   offsetof(CPUPPCState, nip), "nip");
115  
116      cpu_msr = tcg_global_mem_new(tcg_env,
117                                   offsetof(CPUPPCState, msr), "msr");
118  
119      cpu_ctr = tcg_global_mem_new(tcg_env,
120                                   offsetof(CPUPPCState, ctr), "ctr");
121  
122      cpu_lr = tcg_global_mem_new(tcg_env,
123                                  offsetof(CPUPPCState, lr), "lr");
124  
125  #if defined(TARGET_PPC64)
126      cpu_cfar = tcg_global_mem_new(tcg_env,
127                                    offsetof(CPUPPCState, cfar), "cfar");
128  #endif
129  
130      cpu_xer = tcg_global_mem_new(tcg_env,
131                                   offsetof(CPUPPCState, xer), "xer");
132      cpu_so = tcg_global_mem_new(tcg_env,
133                                  offsetof(CPUPPCState, so), "SO");
134      cpu_ov = tcg_global_mem_new(tcg_env,
135                                  offsetof(CPUPPCState, ov), "OV");
136      cpu_ca = tcg_global_mem_new(tcg_env,
137                                  offsetof(CPUPPCState, ca), "CA");
138      cpu_ov32 = tcg_global_mem_new(tcg_env,
139                                    offsetof(CPUPPCState, ov32), "OV32");
140      cpu_ca32 = tcg_global_mem_new(tcg_env,
141                                    offsetof(CPUPPCState, ca32), "CA32");
142  
143      cpu_reserve = tcg_global_mem_new(tcg_env,
144                                       offsetof(CPUPPCState, reserve_addr),
145                                       "reserve_addr");
146      cpu_reserve_length = tcg_global_mem_new(tcg_env,
147                                              offsetof(CPUPPCState,
148                                                       reserve_length),
149                                              "reserve_length");
150      cpu_reserve_val = tcg_global_mem_new(tcg_env,
151                                           offsetof(CPUPPCState, reserve_val),
152                                           "reserve_val");
153  #if defined(TARGET_PPC64)
154      cpu_reserve_val2 = tcg_global_mem_new(tcg_env,
155                                            offsetof(CPUPPCState, reserve_val2),
156                                            "reserve_val2");
157  #endif
158  
159      cpu_fpscr = tcg_global_mem_new(tcg_env,
160                                     offsetof(CPUPPCState, fpscr), "fpscr");
161  
162      cpu_access_type = tcg_global_mem_new_i32(tcg_env,
163                                               offsetof(CPUPPCState, access_type),
164                                               "access_type");
165  }
166  
167  /* internal defines */
168  struct DisasContext {
169      DisasContextBase base;
170      target_ulong cia;  /* current instruction address */
171      uint32_t opcode;
172      /* Routine used to access memory */
173      bool pr, hv, dr, le_mode;
174      bool lazy_tlb_flush;
175      bool need_access_type;
176      int mem_idx;
177      int access_type;
178      /* Translation flags */
179      MemOp default_tcg_memop_mask;
180  #if defined(TARGET_PPC64)
181      powerpc_excp_t excp_model;
182      bool sf_mode;
183      bool has_cfar;
184      bool has_bhrb;
185  #endif
186      bool fpu_enabled;
187      bool altivec_enabled;
188      bool vsx_enabled;
189      bool spe_enabled;
190      bool tm_enabled;
191      bool gtse;
192      bool hr;
193      bool mmcr0_pmcc0;
194      bool mmcr0_pmcc1;
195      bool mmcr0_pmcjce;
196      bool pmc_other;
197      bool pmu_insn_cnt;
198      bool bhrb_enable;
199      ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */
200      int singlestep_enabled;
201      uint32_t flags;
202      uint64_t insns_flags;
203      uint64_t insns_flags2;
204  };
205  
206  #define DISAS_EXIT         DISAS_TARGET_0  /* exit to main loop, pc updated */
207  #define DISAS_EXIT_UPDATE  DISAS_TARGET_1  /* exit to main loop, pc stale */
208  #define DISAS_CHAIN        DISAS_TARGET_2  /* lookup next tb, pc updated */
209  #define DISAS_CHAIN_UPDATE DISAS_TARGET_3  /* lookup next tb, pc stale */
210  
211  /* Return true iff byteswap is needed in a scalar memop */
need_byteswap(const DisasContext * ctx)212  static inline bool need_byteswap(const DisasContext *ctx)
213  {
214  #if TARGET_BIG_ENDIAN
215       return ctx->le_mode;
216  #else
217       return !ctx->le_mode;
218  #endif
219  }
220  
221  /* True when active word size < size of target_long.  */
222  #ifdef TARGET_PPC64
223  # define NARROW_MODE(C)  (!(C)->sf_mode)
224  #else
225  # define NARROW_MODE(C)  0
226  #endif
227  
228  struct opc_handler_t {
229      /* invalid bits for instruction 1 (Rc(opcode) == 0) */
230      uint32_t inval1;
231      /* invalid bits for instruction 2 (Rc(opcode) == 1) */
232      uint32_t inval2;
233      /* instruction type */
234      uint64_t type;
235      /* extended instruction type */
236      uint64_t type2;
237      /* handler */
238      void (*handler)(DisasContext *ctx);
239  };
240  
gen_serialize(DisasContext * ctx)241  static inline bool gen_serialize(DisasContext *ctx)
242  {
243      if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
244          /* Restart with exclusive lock.  */
245          gen_helper_exit_atomic(tcg_env);
246          ctx->base.is_jmp = DISAS_NORETURN;
247          return false;
248      }
249      return true;
250  }
251  
252  #if !defined(CONFIG_USER_ONLY)
253  #if defined(TARGET_PPC64)
gen_serialize_core(DisasContext * ctx)254  static inline bool gen_serialize_core(DisasContext *ctx)
255  {
256      if (ctx->flags & POWERPC_FLAG_SMT) {
257          return gen_serialize(ctx);
258      }
259      return true;
260  }
261  #endif
262  
gen_serialize_core_lpar(DisasContext * ctx)263  static inline bool gen_serialize_core_lpar(DisasContext *ctx)
264  {
265  #if defined(TARGET_PPC64)
266      if (ctx->flags & POWERPC_FLAG_SMT_1LPAR) {
267          return gen_serialize(ctx);
268      }
269  #endif
270      return true;
271  }
272  #endif
273  
274  /* SPR load/store helpers */
gen_load_spr(TCGv t,int reg)275  static inline void gen_load_spr(TCGv t, int reg)
276  {
277      tcg_gen_ld_tl(t, tcg_env, offsetof(CPUPPCState, spr[reg]));
278  }
279  
gen_store_spr(int reg,TCGv t)280  static inline void gen_store_spr(int reg, TCGv t)
281  {
282      tcg_gen_st_tl(t, tcg_env, offsetof(CPUPPCState, spr[reg]));
283  }
284  
gen_set_access_type(DisasContext * ctx,int access_type)285  static inline void gen_set_access_type(DisasContext *ctx, int access_type)
286  {
287      if (ctx->need_access_type && ctx->access_type != access_type) {
288          tcg_gen_movi_i32(cpu_access_type, access_type);
289          ctx->access_type = access_type;
290      }
291  }
292  
gen_update_nip(DisasContext * ctx,target_ulong nip)293  static inline void gen_update_nip(DisasContext *ctx, target_ulong nip)
294  {
295      if (NARROW_MODE(ctx)) {
296          nip = (uint32_t)nip;
297      }
298      tcg_gen_movi_tl(cpu_nip, nip);
299  }
300  
gen_exception_err_nip(DisasContext * ctx,uint32_t excp,uint32_t error,target_ulong nip)301  static void gen_exception_err_nip(DisasContext *ctx, uint32_t excp,
302                                    uint32_t error, target_ulong nip)
303  {
304      TCGv_i32 t0, t1;
305  
306      gen_update_nip(ctx, nip);
307      t0 = tcg_constant_i32(excp);
308      t1 = tcg_constant_i32(error);
309      gen_helper_raise_exception_err(tcg_env, t0, t1);
310      ctx->base.is_jmp = DISAS_NORETURN;
311  }
312  
gen_exception_err(DisasContext * ctx,uint32_t excp,uint32_t error)313  static inline void gen_exception_err(DisasContext *ctx, uint32_t excp,
314                                       uint32_t error)
315  {
316      /*
317       * These are all synchronous exceptions, we set the PC back to the
318       * faulting instruction
319       */
320      gen_exception_err_nip(ctx, excp, error, ctx->cia);
321  }
322  
gen_exception_nip(DisasContext * ctx,uint32_t excp,target_ulong nip)323  static void gen_exception_nip(DisasContext *ctx, uint32_t excp,
324                                target_ulong nip)
325  {
326      TCGv_i32 t0;
327  
328      gen_update_nip(ctx, nip);
329      t0 = tcg_constant_i32(excp);
330      gen_helper_raise_exception(tcg_env, t0);
331      ctx->base.is_jmp = DISAS_NORETURN;
332  }
333  
gen_exception(DisasContext * ctx,uint32_t excp)334  static inline void gen_exception(DisasContext *ctx, uint32_t excp)
335  {
336      /*
337       * These are all synchronous exceptions, we set the PC back to the
338       * faulting instruction
339       */
340      gen_exception_nip(ctx, excp, ctx->cia);
341  }
342  
343  #if !defined(CONFIG_USER_ONLY)
gen_ppc_maybe_interrupt(DisasContext * ctx)344  static void gen_ppc_maybe_interrupt(DisasContext *ctx)
345  {
346      translator_io_start(&ctx->base);
347      gen_helper_ppc_maybe_interrupt(tcg_env);
348  }
349  #endif
350  
351  /*
352   * Tells the caller what is the appropriate exception to generate and prepares
353   * SPR registers for this exception.
354   *
355   * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or
356   * POWERPC_EXCP_DEBUG (on BookE).
357   */
gen_debug_exception(DisasContext * ctx,bool rfi_type)358  static void gen_debug_exception(DisasContext *ctx, bool rfi_type)
359  {
360  #if !defined(CONFIG_USER_ONLY)
361      if (ctx->flags & POWERPC_FLAG_DE) {
362          target_ulong dbsr = 0;
363          if (ctx->singlestep_enabled & CPU_SINGLE_STEP) {
364              dbsr = DBCR0_ICMP;
365          } else {
366              /* Must have been branch */
367              dbsr = DBCR0_BRT;
368          }
369          TCGv t0 = tcg_temp_new();
370          gen_load_spr(t0, SPR_BOOKE_DBSR);
371          tcg_gen_ori_tl(t0, t0, dbsr);
372          gen_store_spr(SPR_BOOKE_DBSR, t0);
373          gen_helper_raise_exception(tcg_env,
374                                     tcg_constant_i32(POWERPC_EXCP_DEBUG));
375          ctx->base.is_jmp = DISAS_NORETURN;
376      } else {
377          if (!rfi_type) { /* BookS does not single step rfi type instructions */
378              TCGv t0 = tcg_temp_new();
379              tcg_gen_movi_tl(t0, ctx->cia);
380              gen_helper_book3s_trace(tcg_env, t0);
381              ctx->base.is_jmp = DISAS_NORETURN;
382          }
383      }
384  #endif
385  }
386  
gen_inval_exception(DisasContext * ctx,uint32_t error)387  static inline void gen_inval_exception(DisasContext *ctx, uint32_t error)
388  {
389      /* Will be converted to program check if needed */
390      gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error);
391  }
392  
gen_priv_exception(DisasContext * ctx,uint32_t error)393  static inline void gen_priv_exception(DisasContext *ctx, uint32_t error)
394  {
395      gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error);
396  }
397  
gen_hvpriv_exception(DisasContext * ctx,uint32_t error)398  static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error)
399  {
400      /* Will be converted to program check if needed */
401      gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error);
402  }
403  
404  /*****************************************************************************/
405  /* SPR READ/WRITE CALLBACKS */
406  
spr_noaccess(DisasContext * ctx,int gprn,int sprn)407  void spr_noaccess(DisasContext *ctx, int gprn, int sprn)
408  {
409  #if 0
410      sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5);
411      printf("ERROR: try to access SPR %d !\n", sprn);
412  #endif
413  }
414  
415  /* #define PPC_DUMP_SPR_ACCESSES */
416  
417  /*
418   * Generic callbacks:
419   * do nothing but store/retrieve spr value
420   */
spr_load_dump_spr(int sprn)421  static void spr_load_dump_spr(int sprn)
422  {
423  #ifdef PPC_DUMP_SPR_ACCESSES
424      TCGv_i32 t0 = tcg_constant_i32(sprn);
425      gen_helper_load_dump_spr(tcg_env, t0);
426  #endif
427  }
428  
spr_read_generic(DisasContext * ctx,int gprn,int sprn)429  void spr_read_generic(DisasContext *ctx, int gprn, int sprn)
430  {
431      gen_load_spr(cpu_gpr[gprn], sprn);
432      spr_load_dump_spr(sprn);
433  }
434  
spr_store_dump_spr(int sprn)435  static void spr_store_dump_spr(int sprn)
436  {
437  #ifdef PPC_DUMP_SPR_ACCESSES
438      TCGv_i32 t0 = tcg_constant_i32(sprn);
439      gen_helper_store_dump_spr(tcg_env, t0);
440  #endif
441  }
442  
spr_write_generic(DisasContext * ctx,int sprn,int gprn)443  void spr_write_generic(DisasContext *ctx, int sprn, int gprn)
444  {
445      gen_store_spr(sprn, cpu_gpr[gprn]);
446      spr_store_dump_spr(sprn);
447  }
448  
spr_write_generic32(DisasContext * ctx,int sprn,int gprn)449  void spr_write_generic32(DisasContext *ctx, int sprn, int gprn)
450  {
451  #ifdef TARGET_PPC64
452      TCGv t0 = tcg_temp_new();
453      tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
454      gen_store_spr(sprn, t0);
455      spr_store_dump_spr(sprn);
456  #else
457      spr_write_generic(ctx, sprn, gprn);
458  #endif
459  }
460  
spr_core_write_generic(DisasContext * ctx,int sprn,int gprn)461  void spr_core_write_generic(DisasContext *ctx, int sprn, int gprn)
462  {
463      if (!(ctx->flags & POWERPC_FLAG_SMT)) {
464          spr_write_generic(ctx, sprn, gprn);
465          return;
466      }
467  
468      if (!gen_serialize(ctx)) {
469          return;
470      }
471  
472      gen_helper_spr_core_write_generic(tcg_env, tcg_constant_i32(sprn),
473                                        cpu_gpr[gprn]);
474      spr_store_dump_spr(sprn);
475  }
476  
spr_core_write_generic32(DisasContext * ctx,int sprn,int gprn)477  void spr_core_write_generic32(DisasContext *ctx, int sprn, int gprn)
478  {
479      TCGv t0;
480  
481      if (!(ctx->flags & POWERPC_FLAG_SMT)) {
482          spr_write_generic32(ctx, sprn, gprn);
483          return;
484      }
485  
486      if (!gen_serialize(ctx)) {
487          return;
488      }
489  
490      t0 = tcg_temp_new();
491      tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]);
492      gen_helper_spr_core_write_generic(tcg_env, tcg_constant_i32(sprn), t0);
493      spr_store_dump_spr(sprn);
494  }
495  
spr_core_lpar_write_generic(DisasContext * ctx,int sprn,int gprn)496  void spr_core_lpar_write_generic(DisasContext *ctx, int sprn, int gprn)
497  {
498      if (ctx->flags & POWERPC_FLAG_SMT_1LPAR) {
499          spr_core_write_generic(ctx, sprn, gprn);
500      } else {
501          spr_write_generic(ctx, sprn, gprn);
502      }
503  }
504  
spr_write_CTRL_ST(DisasContext * ctx,int sprn,int gprn)505  static void spr_write_CTRL_ST(DisasContext *ctx, int sprn, int gprn)
506  {
507      /* This does not implement >1 thread */
508      TCGv t0 = tcg_temp_new();
509      TCGv t1 = tcg_temp_new();
510      tcg_gen_extract_tl(t0, cpu_gpr[gprn], 0, 1); /* Extract RUN field */
511      tcg_gen_shli_tl(t1, t0, 8); /* Duplicate the bit in TS */
512      tcg_gen_or_tl(t1, t1, t0);
513      gen_store_spr(sprn, t1);
514  }
515  
spr_write_CTRL(DisasContext * ctx,int sprn,int gprn)516  void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn)
517  {
518      if (!(ctx->flags & POWERPC_FLAG_SMT_1LPAR)) {
519          /* CTRL behaves as 1-thread in LPAR-per-thread mode */
520          spr_write_CTRL_ST(ctx, sprn, gprn);
521          goto out;
522      }
523  
524      if (!gen_serialize(ctx)) {
525          return;
526      }
527  
528      gen_helper_spr_write_CTRL(tcg_env, tcg_constant_i32(sprn),
529                                cpu_gpr[gprn]);
530  out:
531      spr_store_dump_spr(sprn);
532  
533      /*
534       * SPR_CTRL writes must force a new translation block,
535       * allowing the PMU to calculate the run latch events with
536       * more accuracy.
537       */
538      ctx->base.is_jmp = DISAS_EXIT_UPDATE;
539  }
540  
541  #if !defined(CONFIG_USER_ONLY)
spr_write_clear(DisasContext * ctx,int sprn,int gprn)542  void spr_write_clear(DisasContext *ctx, int sprn, int gprn)
543  {
544      TCGv t0 = tcg_temp_new();
545      TCGv t1 = tcg_temp_new();
546      gen_load_spr(t0, sprn);
547      tcg_gen_neg_tl(t1, cpu_gpr[gprn]);
548      tcg_gen_and_tl(t0, t0, t1);
549      gen_store_spr(sprn, t0);
550  }
551  
spr_access_nop(DisasContext * ctx,int sprn,int gprn)552  void spr_access_nop(DisasContext *ctx, int sprn, int gprn)
553  {
554  }
555  
556  #endif
557  
558  /* SPR common to all PowerPC */
559  /* XER */
spr_read_xer(DisasContext * ctx,int gprn,int sprn)560  void spr_read_xer(DisasContext *ctx, int gprn, int sprn)
561  {
562      TCGv dst = cpu_gpr[gprn];
563      TCGv t0 = tcg_temp_new();
564      TCGv t1 = tcg_temp_new();
565      TCGv t2 = tcg_temp_new();
566      tcg_gen_mov_tl(dst, cpu_xer);
567      tcg_gen_shli_tl(t0, cpu_so, XER_SO);
568      tcg_gen_shli_tl(t1, cpu_ov, XER_OV);
569      tcg_gen_shli_tl(t2, cpu_ca, XER_CA);
570      tcg_gen_or_tl(t0, t0, t1);
571      tcg_gen_or_tl(dst, dst, t2);
572      tcg_gen_or_tl(dst, dst, t0);
573      if (is_isa300(ctx)) {
574          tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32);
575          tcg_gen_or_tl(dst, dst, t0);
576          tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32);
577          tcg_gen_or_tl(dst, dst, t0);
578      }
579  }
580  
spr_write_xer(DisasContext * ctx,int sprn,int gprn)581  void spr_write_xer(DisasContext *ctx, int sprn, int gprn)
582  {
583      TCGv src = cpu_gpr[gprn];
584      /* Write all flags, while reading back check for isa300 */
585      tcg_gen_andi_tl(cpu_xer, src,
586                      ~((1u << XER_SO) |
587                        (1u << XER_OV) | (1u << XER_OV32) |
588                        (1u << XER_CA) | (1u << XER_CA32)));
589      tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1);
590      tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1);
591      tcg_gen_extract_tl(cpu_so, src, XER_SO, 1);
592      tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1);
593      tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1);
594  }
595  
596  /* LR */
spr_read_lr(DisasContext * ctx,int gprn,int sprn)597  void spr_read_lr(DisasContext *ctx, int gprn, int sprn)
598  {
599      tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr);
600  }
601  
spr_write_lr(DisasContext * ctx,int sprn,int gprn)602  void spr_write_lr(DisasContext *ctx, int sprn, int gprn)
603  {
604      tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]);
605  }
606  
607  #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
608  /* Debug facilities */
609  /* CFAR */
spr_read_cfar(DisasContext * ctx,int gprn,int sprn)610  void spr_read_cfar(DisasContext *ctx, int gprn, int sprn)
611  {
612      tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar);
613  }
614  
spr_write_cfar(DisasContext * ctx,int sprn,int gprn)615  void spr_write_cfar(DisasContext *ctx, int sprn, int gprn)
616  {
617      tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]);
618  }
619  
620  /* Breakpoint */
spr_write_ciabr(DisasContext * ctx,int sprn,int gprn)621  void spr_write_ciabr(DisasContext *ctx, int sprn, int gprn)
622  {
623      translator_io_start(&ctx->base);
624      gen_helper_store_ciabr(tcg_env, cpu_gpr[gprn]);
625  }
626  
627  /* Watchpoint */
spr_write_dawr0(DisasContext * ctx,int sprn,int gprn)628  void spr_write_dawr0(DisasContext *ctx, int sprn, int gprn)
629  {
630      translator_io_start(&ctx->base);
631      gen_helper_store_dawr0(tcg_env, cpu_gpr[gprn]);
632  }
633  
spr_write_dawrx0(DisasContext * ctx,int sprn,int gprn)634  void spr_write_dawrx0(DisasContext *ctx, int sprn, int gprn)
635  {
636      translator_io_start(&ctx->base);
637      gen_helper_store_dawrx0(tcg_env, cpu_gpr[gprn]);
638  }
639  #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */
640  
641  /* CTR */
spr_read_ctr(DisasContext * ctx,int gprn,int sprn)642  void spr_read_ctr(DisasContext *ctx, int gprn, int sprn)
643  {
644      tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr);
645  }
646  
spr_write_ctr(DisasContext * ctx,int sprn,int gprn)647  void spr_write_ctr(DisasContext *ctx, int sprn, int gprn)
648  {
649      tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]);
650  }
651  
652  /* User read access to SPR */
653  /* USPRx */
654  /* UMMCRx */
655  /* UPMCx */
656  /* USIA */
657  /* UDECR */
spr_read_ureg(DisasContext * ctx,int gprn,int sprn)658  void spr_read_ureg(DisasContext *ctx, int gprn, int sprn)
659  {
660      gen_load_spr(cpu_gpr[gprn], sprn + 0x10);
661  }
662  
663  #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
spr_write_ureg(DisasContext * ctx,int sprn,int gprn)664  void spr_write_ureg(DisasContext *ctx, int sprn, int gprn)
665  {
666      gen_store_spr(sprn + 0x10, cpu_gpr[gprn]);
667  }
668  #endif
669  
670  /* SPR common to all non-embedded PowerPC */
671  /* DECR */
672  #if !defined(CONFIG_USER_ONLY)
spr_read_decr(DisasContext * ctx,int gprn,int sprn)673  void spr_read_decr(DisasContext *ctx, int gprn, int sprn)
674  {
675      translator_io_start(&ctx->base);
676      gen_helper_load_decr(cpu_gpr[gprn], tcg_env);
677  }
678  
spr_write_decr(DisasContext * ctx,int sprn,int gprn)679  void spr_write_decr(DisasContext *ctx, int sprn, int gprn)
680  {
681      translator_io_start(&ctx->base);
682      gen_helper_store_decr(tcg_env, cpu_gpr[gprn]);
683  }
684  #endif
685  
686  /* SPR common to all non-embedded PowerPC, except 601 */
687  /* Time base */
spr_read_tbl(DisasContext * ctx,int gprn,int sprn)688  void spr_read_tbl(DisasContext *ctx, int gprn, int sprn)
689  {
690      translator_io_start(&ctx->base);
691      gen_helper_load_tbl(cpu_gpr[gprn], tcg_env);
692  }
693  
spr_read_tbu(DisasContext * ctx,int gprn,int sprn)694  void spr_read_tbu(DisasContext *ctx, int gprn, int sprn)
695  {
696      translator_io_start(&ctx->base);
697      gen_helper_load_tbu(cpu_gpr[gprn], tcg_env);
698  }
699  
spr_read_atbl(DisasContext * ctx,int gprn,int sprn)700  void spr_read_atbl(DisasContext *ctx, int gprn, int sprn)
701  {
702      gen_helper_load_atbl(cpu_gpr[gprn], tcg_env);
703  }
704  
spr_read_atbu(DisasContext * ctx,int gprn,int sprn)705  void spr_read_atbu(DisasContext *ctx, int gprn, int sprn)
706  {
707      gen_helper_load_atbu(cpu_gpr[gprn], tcg_env);
708  }
709  
710  #if !defined(CONFIG_USER_ONLY)
spr_write_tbl(DisasContext * ctx,int sprn,int gprn)711  void spr_write_tbl(DisasContext *ctx, int sprn, int gprn)
712  {
713      if (!gen_serialize_core_lpar(ctx)) {
714          return;
715      }
716  
717      translator_io_start(&ctx->base);
718      gen_helper_store_tbl(tcg_env, cpu_gpr[gprn]);
719  }
720  
spr_write_tbu(DisasContext * ctx,int sprn,int gprn)721  void spr_write_tbu(DisasContext *ctx, int sprn, int gprn)
722  {
723      if (!gen_serialize_core_lpar(ctx)) {
724          return;
725      }
726  
727      translator_io_start(&ctx->base);
728      gen_helper_store_tbu(tcg_env, cpu_gpr[gprn]);
729  }
730  
spr_write_atbl(DisasContext * ctx,int sprn,int gprn)731  void spr_write_atbl(DisasContext *ctx, int sprn, int gprn)
732  {
733      gen_helper_store_atbl(tcg_env, cpu_gpr[gprn]);
734  }
735  
spr_write_atbu(DisasContext * ctx,int sprn,int gprn)736  void spr_write_atbu(DisasContext *ctx, int sprn, int gprn)
737  {
738      gen_helper_store_atbu(tcg_env, cpu_gpr[gprn]);
739  }
740  
741  #if defined(TARGET_PPC64)
spr_read_purr(DisasContext * ctx,int gprn,int sprn)742  void spr_read_purr(DisasContext *ctx, int gprn, int sprn)
743  {
744      translator_io_start(&ctx->base);
745      gen_helper_load_purr(cpu_gpr[gprn], tcg_env);
746  }
747  
spr_write_purr(DisasContext * ctx,int sprn,int gprn)748  void spr_write_purr(DisasContext *ctx, int sprn, int gprn)
749  {
750      if (!gen_serialize_core_lpar(ctx)) {
751          return;
752      }
753      translator_io_start(&ctx->base);
754      gen_helper_store_purr(tcg_env, cpu_gpr[gprn]);
755  }
756  
757  /* HDECR */
spr_read_hdecr(DisasContext * ctx,int gprn,int sprn)758  void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn)
759  {
760      translator_io_start(&ctx->base);
761      gen_helper_load_hdecr(cpu_gpr[gprn], tcg_env);
762  }
763  
spr_write_hdecr(DisasContext * ctx,int sprn,int gprn)764  void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn)
765  {
766      if (!gen_serialize_core_lpar(ctx)) {
767          return;
768      }
769      translator_io_start(&ctx->base);
770      gen_helper_store_hdecr(tcg_env, cpu_gpr[gprn]);
771  }
772  
spr_read_vtb(DisasContext * ctx,int gprn,int sprn)773  void spr_read_vtb(DisasContext *ctx, int gprn, int sprn)
774  {
775      translator_io_start(&ctx->base);
776      gen_helper_load_vtb(cpu_gpr[gprn], tcg_env);
777  }
778  
spr_write_vtb(DisasContext * ctx,int sprn,int gprn)779  void spr_write_vtb(DisasContext *ctx, int sprn, int gprn)
780  {
781      if (!gen_serialize_core_lpar(ctx)) {
782          return;
783      }
784      translator_io_start(&ctx->base);
785      gen_helper_store_vtb(tcg_env, cpu_gpr[gprn]);
786  }
787  
spr_write_tbu40(DisasContext * ctx,int sprn,int gprn)788  void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn)
789  {
790      if (!gen_serialize_core_lpar(ctx)) {
791          return;
792      }
793      translator_io_start(&ctx->base);
794      gen_helper_store_tbu40(tcg_env, cpu_gpr[gprn]);
795  }
796  
797  #endif
798  #endif
799  
800  #if !defined(CONFIG_USER_ONLY)
801  /* IBAT0U...IBAT0U */
802  /* IBAT0L...IBAT7L */
spr_read_ibat(DisasContext * ctx,int gprn,int sprn)803  void spr_read_ibat(DisasContext *ctx, int gprn, int sprn)
804  {
805      tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
806                    offsetof(CPUPPCState,
807                             IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2]));
808  }
809  
spr_read_ibat_h(DisasContext * ctx,int gprn,int sprn)810  void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn)
811  {
812      tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
813                    offsetof(CPUPPCState,
814                             IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4]));
815  }
816  
spr_write_ibatu(DisasContext * ctx,int sprn,int gprn)817  void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn)
818  {
819      TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0U) / 2);
820      gen_helper_store_ibatu(tcg_env, t0, cpu_gpr[gprn]);
821  }
822  
spr_write_ibatu_h(DisasContext * ctx,int sprn,int gprn)823  void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn)
824  {
825      TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4U) / 2) + 4);
826      gen_helper_store_ibatu(tcg_env, t0, cpu_gpr[gprn]);
827  }
828  
spr_write_ibatl(DisasContext * ctx,int sprn,int gprn)829  void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn)
830  {
831      TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_IBAT0L) / 2);
832      gen_helper_store_ibatl(tcg_env, t0, cpu_gpr[gprn]);
833  }
834  
spr_write_ibatl_h(DisasContext * ctx,int sprn,int gprn)835  void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn)
836  {
837      TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_IBAT4L) / 2) + 4);
838      gen_helper_store_ibatl(tcg_env, t0, cpu_gpr[gprn]);
839  }
840  
841  /* DBAT0U...DBAT7U */
842  /* DBAT0L...DBAT7L */
spr_read_dbat(DisasContext * ctx,int gprn,int sprn)843  void spr_read_dbat(DisasContext *ctx, int gprn, int sprn)
844  {
845      tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
846                    offsetof(CPUPPCState,
847                             DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2]));
848  }
849  
spr_read_dbat_h(DisasContext * ctx,int gprn,int sprn)850  void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn)
851  {
852      tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env,
853                    offsetof(CPUPPCState,
854                             DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4]));
855  }
856  
spr_write_dbatu(DisasContext * ctx,int sprn,int gprn)857  void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn)
858  {
859      TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0U) / 2);
860      gen_helper_store_dbatu(tcg_env, t0, cpu_gpr[gprn]);
861  }
862  
spr_write_dbatu_h(DisasContext * ctx,int sprn,int gprn)863  void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn)
864  {
865      TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4U) / 2) + 4);
866      gen_helper_store_dbatu(tcg_env, t0, cpu_gpr[gprn]);
867  }
868  
spr_write_dbatl(DisasContext * ctx,int sprn,int gprn)869  void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn)
870  {
871      TCGv_i32 t0 = tcg_constant_i32((sprn - SPR_DBAT0L) / 2);
872      gen_helper_store_dbatl(tcg_env, t0, cpu_gpr[gprn]);
873  }
874  
spr_write_dbatl_h(DisasContext * ctx,int sprn,int gprn)875  void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn)
876  {
877      TCGv_i32 t0 = tcg_constant_i32(((sprn - SPR_DBAT4L) / 2) + 4);
878      gen_helper_store_dbatl(tcg_env, t0, cpu_gpr[gprn]);
879  }
880  
881  /* SDR1 */
spr_write_sdr1(DisasContext * ctx,int sprn,int gprn)882  void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn)
883  {
884      gen_helper_store_sdr1(tcg_env, cpu_gpr[gprn]);
885  }
886  
887  #if defined(TARGET_PPC64)
888  /* 64 bits PowerPC specific SPRs */
889  /* PIDR */
spr_write_pidr(DisasContext * ctx,int sprn,int gprn)890  void spr_write_pidr(DisasContext *ctx, int sprn, int gprn)
891  {
892      gen_helper_store_pidr(tcg_env, cpu_gpr[gprn]);
893  }
894  
spr_write_lpidr(DisasContext * ctx,int sprn,int gprn)895  void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn)
896  {
897      gen_helper_store_lpidr(tcg_env, cpu_gpr[gprn]);
898  }
899  
spr_read_hior(DisasContext * ctx,int gprn,int sprn)900  void spr_read_hior(DisasContext *ctx, int gprn, int sprn)
901  {
902      tcg_gen_ld_tl(cpu_gpr[gprn], tcg_env, offsetof(CPUPPCState, excp_prefix));
903  }
904  
spr_write_hior(DisasContext * ctx,int sprn,int gprn)905  void spr_write_hior(DisasContext *ctx, int sprn, int gprn)
906  {
907      TCGv t0 = tcg_temp_new();
908      tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL);
909      tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_prefix));
910  }
spr_write_ptcr(DisasContext * ctx,int sprn,int gprn)911  void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn)
912  {
913      if (!gen_serialize_core(ctx)) {
914          return;
915      }
916  
917      gen_helper_store_ptcr(tcg_env, cpu_gpr[gprn]);
918  }
919  
spr_write_pcr(DisasContext * ctx,int sprn,int gprn)920  void spr_write_pcr(DisasContext *ctx, int sprn, int gprn)
921  {
922      gen_helper_store_pcr(tcg_env, cpu_gpr[gprn]);
923  }
924  
925  /* DPDES */
spr_read_dpdes(DisasContext * ctx,int gprn,int sprn)926  void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn)
927  {
928      if (!gen_serialize_core_lpar(ctx)) {
929          return;
930      }
931  
932      gen_helper_load_dpdes(cpu_gpr[gprn], tcg_env);
933  }
934  
spr_write_dpdes(DisasContext * ctx,int sprn,int gprn)935  void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn)
936  {
937      if (!gen_serialize_core_lpar(ctx)) {
938          return;
939      }
940  
941      gen_helper_store_dpdes(tcg_env, cpu_gpr[gprn]);
942  }
943  #endif
944  #endif
945  
946  /* PowerPC 40x specific registers */
947  #if !defined(CONFIG_USER_ONLY)
spr_read_40x_pit(DisasContext * ctx,int gprn,int sprn)948  void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn)
949  {
950      translator_io_start(&ctx->base);
951      gen_helper_load_40x_pit(cpu_gpr[gprn], tcg_env);
952  }
953  
spr_write_40x_pit(DisasContext * ctx,int sprn,int gprn)954  void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn)
955  {
956      translator_io_start(&ctx->base);
957      gen_helper_store_40x_pit(tcg_env, cpu_gpr[gprn]);
958  }
959  
spr_write_40x_dbcr0(DisasContext * ctx,int sprn,int gprn)960  void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn)
961  {
962      translator_io_start(&ctx->base);
963      gen_store_spr(sprn, cpu_gpr[gprn]);
964      gen_helper_store_40x_dbcr0(tcg_env, cpu_gpr[gprn]);
965      /* We must stop translation as we may have rebooted */
966      ctx->base.is_jmp = DISAS_EXIT_UPDATE;
967  }
968  
spr_write_40x_sler(DisasContext * ctx,int sprn,int gprn)969  void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn)
970  {
971      translator_io_start(&ctx->base);
972      gen_helper_store_40x_sler(tcg_env, cpu_gpr[gprn]);
973  }
974  
spr_write_40x_tcr(DisasContext * ctx,int sprn,int gprn)975  void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn)
976  {
977      translator_io_start(&ctx->base);
978      gen_helper_store_40x_tcr(tcg_env, cpu_gpr[gprn]);
979  }
980  
spr_write_40x_tsr(DisasContext * ctx,int sprn,int gprn)981  void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn)
982  {
983      translator_io_start(&ctx->base);
984      gen_helper_store_40x_tsr(tcg_env, cpu_gpr[gprn]);
985  }
986  
spr_write_40x_pid(DisasContext * ctx,int sprn,int gprn)987  void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn)
988  {
989      TCGv t0 = tcg_temp_new();
990      tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF);
991      gen_helper_store_40x_pid(tcg_env, t0);
992  }
993  
spr_write_booke_tcr(DisasContext * ctx,int sprn,int gprn)994  void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn)
995  {
996      translator_io_start(&ctx->base);
997      gen_helper_store_booke_tcr(tcg_env, cpu_gpr[gprn]);
998  }
999  
spr_write_booke_tsr(DisasContext * ctx,int sprn,int gprn)1000  void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn)
1001  {
1002      translator_io_start(&ctx->base);
1003      gen_helper_store_booke_tsr(tcg_env, cpu_gpr[gprn]);
1004  }
1005  #endif
1006  
1007  /* PIR */
1008  #if !defined(CONFIG_USER_ONLY)
spr_write_pir(DisasContext * ctx,int sprn,int gprn)1009  void spr_write_pir(DisasContext *ctx, int sprn, int gprn)
1010  {
1011      TCGv t0 = tcg_temp_new();
1012      tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF);
1013      gen_store_spr(SPR_PIR, t0);
1014  }
1015  #endif
1016  
1017  /* SPE specific registers */
spr_read_spefscr(DisasContext * ctx,int gprn,int sprn)1018  void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn)
1019  {
1020      TCGv_i32 t0 = tcg_temp_new_i32();
1021      tcg_gen_ld_i32(t0, tcg_env, offsetof(CPUPPCState, spe_fscr));
1022      tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0);
1023  }
1024  
spr_write_spefscr(DisasContext * ctx,int sprn,int gprn)1025  void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn)
1026  {
1027      TCGv_i32 t0 = tcg_temp_new_i32();
1028      tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]);
1029      tcg_gen_st_i32(t0, tcg_env, offsetof(CPUPPCState, spe_fscr));
1030  }
1031  
1032  #if !defined(CONFIG_USER_ONLY)
1033  /* Callback used to write the exception vector base */
spr_write_excp_prefix(DisasContext * ctx,int sprn,int gprn)1034  void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn)
1035  {
1036      TCGv t0 = tcg_temp_new();
1037      tcg_gen_ld_tl(t0, tcg_env, offsetof(CPUPPCState, ivpr_mask));
1038      tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1039      tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_prefix));
1040      gen_store_spr(sprn, t0);
1041  }
1042  
spr_write_excp_vector(DisasContext * ctx,int sprn,int gprn)1043  void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn)
1044  {
1045      int sprn_offs;
1046  
1047      if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) {
1048          sprn_offs = sprn - SPR_BOOKE_IVOR0;
1049      } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) {
1050          sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32;
1051      } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) {
1052          sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38;
1053      } else {
1054          qemu_log_mask(LOG_GUEST_ERROR, "Trying to write an unknown exception"
1055                        " vector 0x%03x\n", sprn);
1056          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1057          return;
1058      }
1059  
1060      TCGv t0 = tcg_temp_new();
1061      tcg_gen_ld_tl(t0, tcg_env, offsetof(CPUPPCState, ivor_mask));
1062      tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]);
1063      tcg_gen_st_tl(t0, tcg_env, offsetof(CPUPPCState, excp_vectors[sprn_offs]));
1064      gen_store_spr(sprn, t0);
1065  }
1066  #endif
1067  
1068  #ifdef TARGET_PPC64
1069  #ifndef CONFIG_USER_ONLY
spr_write_amr(DisasContext * ctx,int sprn,int gprn)1070  void spr_write_amr(DisasContext *ctx, int sprn, int gprn)
1071  {
1072      TCGv t0 = tcg_temp_new();
1073      TCGv t1 = tcg_temp_new();
1074      TCGv t2 = tcg_temp_new();
1075  
1076      /*
1077       * Note, the HV=1 PR=0 case is handled earlier by simply using
1078       * spr_write_generic for HV mode in the SPR table
1079       */
1080  
1081      /* Build insertion mask into t1 based on context */
1082      if (ctx->pr) {
1083          gen_load_spr(t1, SPR_UAMOR);
1084      } else {
1085          gen_load_spr(t1, SPR_AMOR);
1086      }
1087  
1088      /* Mask new bits into t2 */
1089      tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1090  
1091      /* Load AMR and clear new bits in t0 */
1092      gen_load_spr(t0, SPR_AMR);
1093      tcg_gen_andc_tl(t0, t0, t1);
1094  
1095      /* Or'in new bits and write it out */
1096      tcg_gen_or_tl(t0, t0, t2);
1097      gen_store_spr(SPR_AMR, t0);
1098      spr_store_dump_spr(SPR_AMR);
1099  }
1100  
spr_write_uamor(DisasContext * ctx,int sprn,int gprn)1101  void spr_write_uamor(DisasContext *ctx, int sprn, int gprn)
1102  {
1103      TCGv t0 = tcg_temp_new();
1104      TCGv t1 = tcg_temp_new();
1105      TCGv t2 = tcg_temp_new();
1106  
1107      /*
1108       * Note, the HV=1 case is handled earlier by simply using
1109       * spr_write_generic for HV mode in the SPR table
1110       */
1111  
1112      /* Build insertion mask into t1 based on context */
1113      gen_load_spr(t1, SPR_AMOR);
1114  
1115      /* Mask new bits into t2 */
1116      tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1117  
1118      /* Load AMR and clear new bits in t0 */
1119      gen_load_spr(t0, SPR_UAMOR);
1120      tcg_gen_andc_tl(t0, t0, t1);
1121  
1122      /* Or'in new bits and write it out */
1123      tcg_gen_or_tl(t0, t0, t2);
1124      gen_store_spr(SPR_UAMOR, t0);
1125      spr_store_dump_spr(SPR_UAMOR);
1126  }
1127  
spr_write_iamr(DisasContext * ctx,int sprn,int gprn)1128  void spr_write_iamr(DisasContext *ctx, int sprn, int gprn)
1129  {
1130      TCGv t0 = tcg_temp_new();
1131      TCGv t1 = tcg_temp_new();
1132      TCGv t2 = tcg_temp_new();
1133  
1134      /*
1135       * Note, the HV=1 case is handled earlier by simply using
1136       * spr_write_generic for HV mode in the SPR table
1137       */
1138  
1139      /* Build insertion mask into t1 based on context */
1140      gen_load_spr(t1, SPR_AMOR);
1141  
1142      /* Mask new bits into t2 */
1143      tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]);
1144  
1145      /* Load AMR and clear new bits in t0 */
1146      gen_load_spr(t0, SPR_IAMR);
1147      tcg_gen_andc_tl(t0, t0, t1);
1148  
1149      /* Or'in new bits and write it out */
1150      tcg_gen_or_tl(t0, t0, t2);
1151      gen_store_spr(SPR_IAMR, t0);
1152      spr_store_dump_spr(SPR_IAMR);
1153  }
1154  #endif
1155  #endif
1156  
1157  #ifndef CONFIG_USER_ONLY
spr_read_thrm(DisasContext * ctx,int gprn,int sprn)1158  void spr_read_thrm(DisasContext *ctx, int gprn, int sprn)
1159  {
1160      gen_helper_fixup_thrm(tcg_env);
1161      gen_load_spr(cpu_gpr[gprn], sprn);
1162      spr_load_dump_spr(sprn);
1163  }
1164  #endif /* !CONFIG_USER_ONLY */
1165  
1166  #if !defined(CONFIG_USER_ONLY)
spr_write_e500_l1csr0(DisasContext * ctx,int sprn,int gprn)1167  void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn)
1168  {
1169      TCGv t0 = tcg_temp_new();
1170  
1171      tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE);
1172      gen_store_spr(sprn, t0);
1173  }
1174  
spr_write_e500_l1csr1(DisasContext * ctx,int sprn,int gprn)1175  void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn)
1176  {
1177      TCGv t0 = tcg_temp_new();
1178  
1179      tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE);
1180      gen_store_spr(sprn, t0);
1181  }
1182  
spr_write_e500_l2csr0(DisasContext * ctx,int sprn,int gprn)1183  void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn)
1184  {
1185      TCGv t0 = tcg_temp_new();
1186  
1187      tcg_gen_andi_tl(t0, cpu_gpr[gprn],
1188                      ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC));
1189      gen_store_spr(sprn, t0);
1190  }
1191  
spr_write_booke206_mmucsr0(DisasContext * ctx,int sprn,int gprn)1192  void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn)
1193  {
1194      gen_helper_booke206_tlbflush(tcg_env, cpu_gpr[gprn]);
1195  }
1196  
spr_write_booke_pid(DisasContext * ctx,int sprn,int gprn)1197  void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn)
1198  {
1199      TCGv_i32 t0 = tcg_constant_i32(sprn);
1200      gen_helper_booke_setpid(tcg_env, t0, cpu_gpr[gprn]);
1201  }
1202  
spr_write_eplc(DisasContext * ctx,int sprn,int gprn)1203  void spr_write_eplc(DisasContext *ctx, int sprn, int gprn)
1204  {
1205      gen_helper_booke_set_eplc(tcg_env, cpu_gpr[gprn]);
1206  }
1207  
spr_write_epsc(DisasContext * ctx,int sprn,int gprn)1208  void spr_write_epsc(DisasContext *ctx, int sprn, int gprn)
1209  {
1210      gen_helper_booke_set_epsc(tcg_env, cpu_gpr[gprn]);
1211  }
1212  
1213  #endif
1214  
1215  #if !defined(CONFIG_USER_ONLY)
spr_write_mas73(DisasContext * ctx,int sprn,int gprn)1216  void spr_write_mas73(DisasContext *ctx, int sprn, int gprn)
1217  {
1218      TCGv val = tcg_temp_new();
1219      tcg_gen_ext32u_tl(val, cpu_gpr[gprn]);
1220      gen_store_spr(SPR_BOOKE_MAS3, val);
1221      tcg_gen_shri_tl(val, cpu_gpr[gprn], 32);
1222      gen_store_spr(SPR_BOOKE_MAS7, val);
1223  }
1224  
spr_read_mas73(DisasContext * ctx,int gprn,int sprn)1225  void spr_read_mas73(DisasContext *ctx, int gprn, int sprn)
1226  {
1227      TCGv mas7 = tcg_temp_new();
1228      TCGv mas3 = tcg_temp_new();
1229      gen_load_spr(mas7, SPR_BOOKE_MAS7);
1230      tcg_gen_shli_tl(mas7, mas7, 32);
1231      gen_load_spr(mas3, SPR_BOOKE_MAS3);
1232      tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7);
1233  }
1234  
1235  #endif
1236  
1237  #ifdef TARGET_PPC64
gen_fscr_facility_check(DisasContext * ctx,int facility_sprn,int bit,int sprn,int cause)1238  static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn,
1239                                      int bit, int sprn, int cause)
1240  {
1241      TCGv_i32 t1 = tcg_constant_i32(bit);
1242      TCGv_i32 t2 = tcg_constant_i32(sprn);
1243      TCGv_i32 t3 = tcg_constant_i32(cause);
1244  
1245      gen_helper_fscr_facility_check(tcg_env, t1, t2, t3);
1246  }
1247  
gen_msr_facility_check(DisasContext * ctx,int facility_sprn,int bit,int sprn,int cause)1248  static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn,
1249                                     int bit, int sprn, int cause)
1250  {
1251      TCGv_i32 t1 = tcg_constant_i32(bit);
1252      TCGv_i32 t2 = tcg_constant_i32(sprn);
1253      TCGv_i32 t3 = tcg_constant_i32(cause);
1254  
1255      gen_helper_msr_facility_check(tcg_env, t1, t2, t3);
1256  }
1257  
spr_read_prev_upper32(DisasContext * ctx,int gprn,int sprn)1258  void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn)
1259  {
1260      TCGv spr_up = tcg_temp_new();
1261      TCGv spr = tcg_temp_new();
1262  
1263      gen_load_spr(spr, sprn - 1);
1264      tcg_gen_shri_tl(spr_up, spr, 32);
1265      tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up);
1266  }
1267  
spr_write_prev_upper32(DisasContext * ctx,int sprn,int gprn)1268  void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn)
1269  {
1270      TCGv spr = tcg_temp_new();
1271  
1272      gen_load_spr(spr, sprn - 1);
1273      tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32);
1274      gen_store_spr(sprn - 1, spr);
1275  }
1276  
1277  #if !defined(CONFIG_USER_ONLY)
spr_write_hmer(DisasContext * ctx,int sprn,int gprn)1278  void spr_write_hmer(DisasContext *ctx, int sprn, int gprn)
1279  {
1280      TCGv hmer = tcg_temp_new();
1281  
1282      gen_load_spr(hmer, sprn);
1283      tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer);
1284      gen_store_spr(sprn, hmer);
1285      spr_store_dump_spr(sprn);
1286  }
1287  
spr_read_tfmr(DisasContext * ctx,int gprn,int sprn)1288  void spr_read_tfmr(DisasContext *ctx, int gprn, int sprn)
1289  {
1290      /* Reading TFMR can cause it to be updated, so serialize threads here too */
1291      if (!gen_serialize_core(ctx)) {
1292          return;
1293      }
1294      gen_helper_load_tfmr(cpu_gpr[gprn], tcg_env);
1295  }
1296  
spr_write_tfmr(DisasContext * ctx,int sprn,int gprn)1297  void spr_write_tfmr(DisasContext *ctx, int sprn, int gprn)
1298  {
1299      if (!gen_serialize_core(ctx)) {
1300          return;
1301      }
1302      gen_helper_store_tfmr(tcg_env, cpu_gpr[gprn]);
1303  }
1304  
spr_write_sprc(DisasContext * ctx,int sprn,int gprn)1305  void spr_write_sprc(DisasContext *ctx, int sprn, int gprn)
1306  {
1307      gen_helper_store_sprc(tcg_env, cpu_gpr[gprn]);
1308  }
1309  
spr_read_sprd(DisasContext * ctx,int gprn,int sprn)1310  void spr_read_sprd(DisasContext *ctx, int gprn, int sprn)
1311  {
1312      gen_helper_load_sprd(cpu_gpr[gprn], tcg_env);
1313  }
1314  
spr_write_sprd(DisasContext * ctx,int sprn,int gprn)1315  void spr_write_sprd(DisasContext *ctx, int sprn, int gprn)
1316  {
1317      if (!gen_serialize_core(ctx)) {
1318          return;
1319      }
1320      gen_helper_store_sprd(tcg_env, cpu_gpr[gprn]);
1321  }
1322  
spr_write_lpcr(DisasContext * ctx,int sprn,int gprn)1323  void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn)
1324  {
1325      translator_io_start(&ctx->base);
1326      gen_helper_store_lpcr(tcg_env, cpu_gpr[gprn]);
1327  }
1328  #endif /* !defined(CONFIG_USER_ONLY) */
1329  
spr_read_tar(DisasContext * ctx,int gprn,int sprn)1330  void spr_read_tar(DisasContext *ctx, int gprn, int sprn)
1331  {
1332      gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1333      spr_read_generic(ctx, gprn, sprn);
1334  }
1335  
spr_write_tar(DisasContext * ctx,int sprn,int gprn)1336  void spr_write_tar(DisasContext *ctx, int sprn, int gprn)
1337  {
1338      gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR);
1339      spr_write_generic(ctx, sprn, gprn);
1340  }
1341  
spr_read_tm(DisasContext * ctx,int gprn,int sprn)1342  void spr_read_tm(DisasContext *ctx, int gprn, int sprn)
1343  {
1344      gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1345      spr_read_generic(ctx, gprn, sprn);
1346  }
1347  
spr_write_tm(DisasContext * ctx,int sprn,int gprn)1348  void spr_write_tm(DisasContext *ctx, int sprn, int gprn)
1349  {
1350      gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1351      spr_write_generic(ctx, sprn, gprn);
1352  }
1353  
spr_read_tm_upper32(DisasContext * ctx,int gprn,int sprn)1354  void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn)
1355  {
1356      gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1357      spr_read_prev_upper32(ctx, gprn, sprn);
1358  }
1359  
spr_write_tm_upper32(DisasContext * ctx,int sprn,int gprn)1360  void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn)
1361  {
1362      gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM);
1363      spr_write_prev_upper32(ctx, sprn, gprn);
1364  }
1365  
spr_read_ebb(DisasContext * ctx,int gprn,int sprn)1366  void spr_read_ebb(DisasContext *ctx, int gprn, int sprn)
1367  {
1368      gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1369      spr_read_generic(ctx, gprn, sprn);
1370  }
1371  
spr_write_ebb(DisasContext * ctx,int sprn,int gprn)1372  void spr_write_ebb(DisasContext *ctx, int sprn, int gprn)
1373  {
1374      gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1375      spr_write_generic(ctx, sprn, gprn);
1376  }
1377  
spr_read_ebb_upper32(DisasContext * ctx,int gprn,int sprn)1378  void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn)
1379  {
1380      gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1381      spr_read_prev_upper32(ctx, gprn, sprn);
1382  }
1383  
spr_write_ebb_upper32(DisasContext * ctx,int sprn,int gprn)1384  void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn)
1385  {
1386      gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB);
1387      spr_write_prev_upper32(ctx, sprn, gprn);
1388  }
1389  
spr_read_dexcr_ureg(DisasContext * ctx,int gprn,int sprn)1390  void spr_read_dexcr_ureg(DisasContext *ctx, int gprn, int sprn)
1391  {
1392      TCGv t0 = tcg_temp_new();
1393  
1394      /*
1395       * Access to the (H)DEXCR in problem state is done using separated
1396       * SPR indexes which are 16 below the SPR indexes which have full
1397       * access to the (H)DEXCR in privileged state. Problem state can
1398       * only read bits 32:63, bits 0:31 return 0.
1399       *
1400       * See section 9.3.1-9.3.2 of PowerISA v3.1B
1401       */
1402  
1403      gen_load_spr(t0, sprn + 16);
1404      tcg_gen_ext32u_tl(cpu_gpr[gprn], t0);
1405  }
1406  
1407  /* The PPR32 SPR accesses the upper 32-bits of PPR */
spr_read_ppr32(DisasContext * ctx,int gprn,int sprn)1408  void spr_read_ppr32(DisasContext *ctx, int gprn, int sprn)
1409  {
1410      gen_load_spr(cpu_gpr[gprn], SPR_PPR);
1411      tcg_gen_shri_tl(cpu_gpr[gprn], cpu_gpr[gprn], 32);
1412      spr_load_dump_spr(SPR_PPR);
1413  }
1414  
spr_write_ppr32(DisasContext * ctx,int sprn,int gprn)1415  void spr_write_ppr32(DisasContext *ctx, int sprn, int gprn)
1416  {
1417      TCGv t0 = tcg_temp_new();
1418  
1419      /*
1420       * Don't clobber the low 32-bits of the PPR. These are all reserved bits
1421       * but TCG does implement them, so it would be surprising to zero them
1422       * here. "Priority nops" are similarly careful not to clobber reserved
1423       * bits.
1424       */
1425      gen_load_spr(t0, SPR_PPR);
1426      tcg_gen_deposit_tl(t0, t0, cpu_gpr[gprn], 32, 32);
1427      gen_store_spr(SPR_PPR, t0);
1428      spr_store_dump_spr(SPR_PPR);
1429  }
1430  #endif
1431  
1432  #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type)                      \
1433  GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE)
1434  
1435  #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2)             \
1436  GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2)
1437  
1438  #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type)               \
1439  GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE)
1440  
1441  #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2)      \
1442  GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2)
1443  
1444  #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2)     \
1445  GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2)
1446  
1447  #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \
1448  GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2)
1449  
1450  typedef struct opcode_t {
1451      unsigned char opc1, opc2, opc3, opc4;
1452  #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */
1453      unsigned char pad[4];
1454  #endif
1455      opc_handler_t handler;
1456      const char *oname;
1457  } opcode_t;
1458  
gen_priv_opc(DisasContext * ctx)1459  static void gen_priv_opc(DisasContext *ctx)
1460  {
1461      gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC);
1462  }
1463  
1464  /* Helpers for priv. check */
1465  #define GEN_PRIV(CTX)              \
1466      do {                           \
1467          gen_priv_opc(CTX); return; \
1468      } while (0)
1469  
1470  #if defined(CONFIG_USER_ONLY)
1471  #define CHK_HV(CTX) GEN_PRIV(CTX)
1472  #define CHK_SV(CTX) GEN_PRIV(CTX)
1473  #define CHK_HVRM(CTX) GEN_PRIV(CTX)
1474  #else
1475  #define CHK_HV(CTX)                         \
1476      do {                                    \
1477          if (unlikely(ctx->pr || !ctx->hv)) {\
1478              GEN_PRIV(CTX);                  \
1479          }                                   \
1480      } while (0)
1481  #define CHK_SV(CTX)              \
1482      do {                         \
1483          if (unlikely(ctx->pr)) { \
1484              GEN_PRIV(CTX);       \
1485          }                        \
1486      } while (0)
1487  #define CHK_HVRM(CTX)                                   \
1488      do {                                                \
1489          if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \
1490              GEN_PRIV(CTX);                              \
1491          }                                               \
1492      } while (0)
1493  #endif
1494  
1495  #define CHK_NONE(CTX)
1496  
1497  /*****************************************************************************/
1498  /* PowerPC instructions table                                                */
1499  
1500  #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2)                    \
1501  {                                                                             \
1502      .opc1 = op1,                                                              \
1503      .opc2 = op2,                                                              \
1504      .opc3 = op3,                                                              \
1505      .opc4 = 0xff,                                                             \
1506      .handler = {                                                              \
1507          .inval1  = invl,                                                      \
1508          .type = _typ,                                                         \
1509          .type2 = _typ2,                                                       \
1510          .handler = &gen_##name,                                               \
1511      },                                                                        \
1512      .oname = stringify(name),                                                 \
1513  }
1514  #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2)       \
1515  {                                                                             \
1516      .opc1 = op1,                                                              \
1517      .opc2 = op2,                                                              \
1518      .opc3 = op3,                                                              \
1519      .opc4 = 0xff,                                                             \
1520      .handler = {                                                              \
1521          .inval1  = invl1,                                                     \
1522          .inval2  = invl2,                                                     \
1523          .type = _typ,                                                         \
1524          .type2 = _typ2,                                                       \
1525          .handler = &gen_##name,                                               \
1526      },                                                                        \
1527      .oname = stringify(name),                                                 \
1528  }
1529  #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2)             \
1530  {                                                                             \
1531      .opc1 = op1,                                                              \
1532      .opc2 = op2,                                                              \
1533      .opc3 = op3,                                                              \
1534      .opc4 = 0xff,                                                             \
1535      .handler = {                                                              \
1536          .inval1  = invl,                                                      \
1537          .type = _typ,                                                         \
1538          .type2 = _typ2,                                                       \
1539          .handler = &gen_##name,                                               \
1540      },                                                                        \
1541      .oname = onam,                                                            \
1542  }
1543  #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2)              \
1544  {                                                                             \
1545      .opc1 = op1,                                                              \
1546      .opc2 = op2,                                                              \
1547      .opc3 = op3,                                                              \
1548      .opc4 = op4,                                                              \
1549      .handler = {                                                              \
1550          .inval1  = invl,                                                      \
1551          .type = _typ,                                                         \
1552          .type2 = _typ2,                                                       \
1553          .handler = &gen_##name,                                               \
1554      },                                                                        \
1555      .oname = stringify(name),                                                 \
1556  }
1557  #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2)        \
1558  {                                                                             \
1559      .opc1 = op1,                                                              \
1560      .opc2 = op2,                                                              \
1561      .opc3 = op3,                                                              \
1562      .opc4 = op4,                                                              \
1563      .handler = {                                                              \
1564          .inval1  = invl,                                                      \
1565          .type = _typ,                                                         \
1566          .type2 = _typ2,                                                       \
1567          .handler = &gen_##name,                                               \
1568      },                                                                        \
1569      .oname = onam,                                                            \
1570  }
1571  
1572  /* Invalid instruction */
gen_invalid(DisasContext * ctx)1573  static void gen_invalid(DisasContext *ctx)
1574  {
1575      gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
1576  }
1577  
1578  static opc_handler_t invalid_handler = {
1579      .inval1  = 0xFFFFFFFF,
1580      .inval2  = 0xFFFFFFFF,
1581      .type    = PPC_NONE,
1582      .type2   = PPC_NONE,
1583      .handler = gen_invalid,
1584  };
1585  
1586  /***                           Integer comparison                          ***/
1587  
gen_op_cmp(TCGv arg0,TCGv arg1,int s,int crf)1588  static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf)
1589  {
1590      TCGv t0 = tcg_temp_new();
1591      TCGv_i32 t = tcg_temp_new_i32();
1592  
1593      tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU),
1594                         t0, arg0, arg1,
1595                         tcg_constant_tl(CRF_LT), tcg_constant_tl(CRF_EQ));
1596      tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU),
1597                         t0, arg0, arg1, tcg_constant_tl(CRF_GT), t0);
1598  
1599      tcg_gen_trunc_tl_i32(t, t0);
1600      tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so);
1601      tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t);
1602  }
1603  
gen_op_cmpi(TCGv arg0,target_ulong arg1,int s,int crf)1604  static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf)
1605  {
1606      TCGv t0 = tcg_constant_tl(arg1);
1607      gen_op_cmp(arg0, t0, s, crf);
1608  }
1609  
gen_op_cmp32(TCGv arg0,TCGv arg1,int s,int crf)1610  static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf)
1611  {
1612      TCGv t0, t1;
1613      t0 = tcg_temp_new();
1614      t1 = tcg_temp_new();
1615      if (s) {
1616          tcg_gen_ext32s_tl(t0, arg0);
1617          tcg_gen_ext32s_tl(t1, arg1);
1618      } else {
1619          tcg_gen_ext32u_tl(t0, arg0);
1620          tcg_gen_ext32u_tl(t1, arg1);
1621      }
1622      gen_op_cmp(t0, t1, s, crf);
1623  }
1624  
gen_op_cmpi32(TCGv arg0,target_ulong arg1,int s,int crf)1625  static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf)
1626  {
1627      TCGv t0 = tcg_constant_tl(arg1);
1628      gen_op_cmp32(arg0, t0, s, crf);
1629  }
1630  
gen_set_Rc0(DisasContext * ctx,TCGv reg)1631  static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg)
1632  {
1633      if (NARROW_MODE(ctx)) {
1634          gen_op_cmpi32(reg, 0, 1, 0);
1635      } else {
1636          gen_op_cmpi(reg, 0, 1, 0);
1637      }
1638  }
1639  
1640  /***                           Integer arithmetic                          ***/
1641  
gen_op_arith_compute_ov(DisasContext * ctx,TCGv arg0,TCGv arg1,TCGv arg2,int sub)1642  static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0,
1643                                             TCGv arg1, TCGv arg2, int sub)
1644  {
1645      TCGv t0 = tcg_temp_new();
1646  
1647      tcg_gen_xor_tl(cpu_ov, arg0, arg2);
1648      tcg_gen_xor_tl(t0, arg1, arg2);
1649      if (sub) {
1650          tcg_gen_and_tl(cpu_ov, cpu_ov, t0);
1651      } else {
1652          tcg_gen_andc_tl(cpu_ov, cpu_ov, t0);
1653      }
1654      if (NARROW_MODE(ctx)) {
1655          tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1);
1656          if (is_isa300(ctx)) {
1657              tcg_gen_mov_tl(cpu_ov32, cpu_ov);
1658          }
1659      } else {
1660          if (is_isa300(ctx)) {
1661              tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1);
1662          }
1663          tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1);
1664      }
1665      tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1666  }
1667  
gen_op_arith_compute_ca32(DisasContext * ctx,TCGv res,TCGv arg0,TCGv arg1,TCGv ca32,int sub)1668  static inline void gen_op_arith_compute_ca32(DisasContext *ctx,
1669                                               TCGv res, TCGv arg0, TCGv arg1,
1670                                               TCGv ca32, int sub)
1671  {
1672      TCGv t0;
1673  
1674      if (!is_isa300(ctx)) {
1675          return;
1676      }
1677  
1678      t0 = tcg_temp_new();
1679      if (sub) {
1680          tcg_gen_eqv_tl(t0, arg0, arg1);
1681      } else {
1682          tcg_gen_xor_tl(t0, arg0, arg1);
1683      }
1684      tcg_gen_xor_tl(t0, t0, res);
1685      tcg_gen_extract_tl(ca32, t0, 32, 1);
1686  }
1687  
1688  /* Common add function */
gen_op_arith_add(DisasContext * ctx,TCGv ret,TCGv arg1,TCGv arg2,TCGv ca,TCGv ca32,bool add_ca,bool compute_ca,bool compute_ov,bool compute_rc0)1689  static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1,
1690                                      TCGv arg2, TCGv ca, TCGv ca32,
1691                                      bool add_ca, bool compute_ca,
1692                                      bool compute_ov, bool compute_rc0)
1693  {
1694      TCGv t0 = ret;
1695  
1696      if (compute_ca || compute_ov) {
1697          t0 = tcg_temp_new();
1698      }
1699  
1700      if (compute_ca) {
1701          if (NARROW_MODE(ctx)) {
1702              /*
1703               * Caution: a non-obvious corner case of the spec is that
1704               * we must produce the *entire* 64-bit addition, but
1705               * produce the carry into bit 32.
1706               */
1707              TCGv t1 = tcg_temp_new();
1708              tcg_gen_xor_tl(t1, arg1, arg2);        /* add without carry */
1709              tcg_gen_add_tl(t0, arg1, arg2);
1710              if (add_ca) {
1711                  tcg_gen_add_tl(t0, t0, ca);
1712              }
1713              tcg_gen_xor_tl(ca, t0, t1);        /* bits changed w/ carry */
1714              tcg_gen_extract_tl(ca, ca, 32, 1);
1715              if (is_isa300(ctx)) {
1716                  tcg_gen_mov_tl(ca32, ca);
1717              }
1718          } else {
1719              TCGv zero = tcg_constant_tl(0);
1720              if (add_ca) {
1721                  tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero);
1722                  tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero);
1723              } else {
1724                  tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero);
1725              }
1726              gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0);
1727          }
1728      } else {
1729          tcg_gen_add_tl(t0, arg1, arg2);
1730          if (add_ca) {
1731              tcg_gen_add_tl(t0, t0, ca);
1732          }
1733      }
1734  
1735      if (compute_ov) {
1736          gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0);
1737      }
1738      if (unlikely(compute_rc0)) {
1739          gen_set_Rc0(ctx, t0);
1740      }
1741  
1742      if (t0 != ret) {
1743          tcg_gen_mov_tl(ret, t0);
1744      }
1745  }
1746  
gen_op_arith_divw(DisasContext * ctx,TCGv ret,TCGv arg1,TCGv arg2,bool sign,bool compute_ov,bool compute_rc0)1747  static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret,
1748                                       TCGv arg1, TCGv arg2, bool sign,
1749                                       bool compute_ov, bool compute_rc0)
1750  {
1751      TCGv_i32 t0 = tcg_temp_new_i32();
1752      TCGv_i32 t1 = tcg_temp_new_i32();
1753      TCGv_i32 t2 = tcg_temp_new_i32();
1754      TCGv_i32 t3 = tcg_temp_new_i32();
1755  
1756      tcg_gen_trunc_tl_i32(t0, arg1);
1757      tcg_gen_trunc_tl_i32(t1, arg2);
1758      if (sign) {
1759          tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1760          tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1761          tcg_gen_and_i32(t2, t2, t3);
1762          tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1763          tcg_gen_or_i32(t2, t2, t3);
1764          tcg_gen_movi_i32(t3, 0);
1765          tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1766          tcg_gen_div_i32(t3, t0, t1);
1767          tcg_gen_extu_i32_tl(ret, t3);
1768      } else {
1769          tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0);
1770          tcg_gen_movi_i32(t3, 0);
1771          tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1772          tcg_gen_divu_i32(t3, t0, t1);
1773          tcg_gen_extu_i32_tl(ret, t3);
1774      }
1775      if (compute_ov) {
1776          tcg_gen_extu_i32_tl(cpu_ov, t2);
1777          if (is_isa300(ctx)) {
1778              tcg_gen_extu_i32_tl(cpu_ov32, t2);
1779          }
1780          tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1781      }
1782  
1783      if (unlikely(compute_rc0)) {
1784          gen_set_Rc0(ctx, ret);
1785      }
1786  }
1787  
1788  #if defined(TARGET_PPC64)
gen_op_arith_divd(DisasContext * ctx,TCGv ret,TCGv arg1,TCGv arg2,bool sign,bool compute_ov,bool compute_rc0)1789  static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret,
1790                                       TCGv arg1, TCGv arg2, bool sign,
1791                                       bool compute_ov, bool compute_rc0)
1792  {
1793      TCGv_i64 t0 = tcg_temp_new_i64();
1794      TCGv_i64 t1 = tcg_temp_new_i64();
1795      TCGv_i64 t2 = tcg_temp_new_i64();
1796      TCGv_i64 t3 = tcg_temp_new_i64();
1797  
1798      tcg_gen_mov_i64(t0, arg1);
1799      tcg_gen_mov_i64(t1, arg2);
1800      if (sign) {
1801          tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1802          tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1803          tcg_gen_and_i64(t2, t2, t3);
1804          tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1805          tcg_gen_or_i64(t2, t2, t3);
1806          tcg_gen_movi_i64(t3, 0);
1807          tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1808          tcg_gen_div_i64(ret, t0, t1);
1809      } else {
1810          tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0);
1811          tcg_gen_movi_i64(t3, 0);
1812          tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1813          tcg_gen_divu_i64(ret, t0, t1);
1814      }
1815      if (compute_ov) {
1816          tcg_gen_mov_tl(cpu_ov, t2);
1817          if (is_isa300(ctx)) {
1818              tcg_gen_mov_tl(cpu_ov32, t2);
1819          }
1820          tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov);
1821      }
1822  
1823      if (unlikely(compute_rc0)) {
1824          gen_set_Rc0(ctx, ret);
1825      }
1826  }
1827  #endif
1828  
gen_op_arith_modw(DisasContext * ctx,TCGv ret,TCGv arg1,TCGv arg2,int sign)1829  static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1,
1830                                       TCGv arg2, int sign)
1831  {
1832      TCGv_i32 t0 = tcg_temp_new_i32();
1833      TCGv_i32 t1 = tcg_temp_new_i32();
1834  
1835      tcg_gen_trunc_tl_i32(t0, arg1);
1836      tcg_gen_trunc_tl_i32(t1, arg2);
1837      if (sign) {
1838          TCGv_i32 t2 = tcg_temp_new_i32();
1839          TCGv_i32 t3 = tcg_temp_new_i32();
1840          tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN);
1841          tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1);
1842          tcg_gen_and_i32(t2, t2, t3);
1843          tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0);
1844          tcg_gen_or_i32(t2, t2, t3);
1845          tcg_gen_movi_i32(t3, 0);
1846          tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1);
1847          tcg_gen_rem_i32(t3, t0, t1);
1848          tcg_gen_ext_i32_tl(ret, t3);
1849      } else {
1850          TCGv_i32 t2 = tcg_constant_i32(1);
1851          TCGv_i32 t3 = tcg_constant_i32(0);
1852          tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1);
1853          tcg_gen_remu_i32(t0, t0, t1);
1854          tcg_gen_extu_i32_tl(ret, t0);
1855      }
1856  }
1857  
1858  #if defined(TARGET_PPC64)
gen_op_arith_modd(DisasContext * ctx,TCGv ret,TCGv arg1,TCGv arg2,int sign)1859  static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1,
1860                                       TCGv arg2, int sign)
1861  {
1862      TCGv_i64 t0 = tcg_temp_new_i64();
1863      TCGv_i64 t1 = tcg_temp_new_i64();
1864  
1865      tcg_gen_mov_i64(t0, arg1);
1866      tcg_gen_mov_i64(t1, arg2);
1867      if (sign) {
1868          TCGv_i64 t2 = tcg_temp_new_i64();
1869          TCGv_i64 t3 = tcg_temp_new_i64();
1870          tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN);
1871          tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1);
1872          tcg_gen_and_i64(t2, t2, t3);
1873          tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0);
1874          tcg_gen_or_i64(t2, t2, t3);
1875          tcg_gen_movi_i64(t3, 0);
1876          tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1);
1877          tcg_gen_rem_i64(ret, t0, t1);
1878      } else {
1879          TCGv_i64 t2 = tcg_constant_i64(1);
1880          TCGv_i64 t3 = tcg_constant_i64(0);
1881          tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1);
1882          tcg_gen_remu_i64(ret, t0, t1);
1883      }
1884  }
1885  #endif
1886  
1887  /* Common subf function */
gen_op_arith_subf(DisasContext * ctx,TCGv ret,TCGv arg1,TCGv arg2,bool add_ca,bool compute_ca,bool compute_ov,bool compute_rc0)1888  static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1,
1889                                       TCGv arg2, bool add_ca, bool compute_ca,
1890                                       bool compute_ov, bool compute_rc0)
1891  {
1892      TCGv t0 = ret;
1893  
1894      if (compute_ca || compute_ov) {
1895          t0 = tcg_temp_new();
1896      }
1897  
1898      if (compute_ca) {
1899          /* dest = ~arg1 + arg2 [+ ca].  */
1900          if (NARROW_MODE(ctx)) {
1901              /*
1902               * Caution: a non-obvious corner case of the spec is that
1903               * we must produce the *entire* 64-bit addition, but
1904               * produce the carry into bit 32.
1905               */
1906              TCGv inv1 = tcg_temp_new();
1907              TCGv t1 = tcg_temp_new();
1908              tcg_gen_not_tl(inv1, arg1);
1909              if (add_ca) {
1910                  tcg_gen_add_tl(t0, arg2, cpu_ca);
1911              } else {
1912                  tcg_gen_addi_tl(t0, arg2, 1);
1913              }
1914              tcg_gen_xor_tl(t1, arg2, inv1);         /* add without carry */
1915              tcg_gen_add_tl(t0, t0, inv1);
1916              tcg_gen_xor_tl(cpu_ca, t0, t1);         /* bits changes w/ carry */
1917              tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1);
1918              if (is_isa300(ctx)) {
1919                  tcg_gen_mov_tl(cpu_ca32, cpu_ca);
1920              }
1921          } else if (add_ca) {
1922              TCGv zero, inv1 = tcg_temp_new();
1923              tcg_gen_not_tl(inv1, arg1);
1924              zero = tcg_constant_tl(0);
1925              tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero);
1926              tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero);
1927              gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0);
1928          } else {
1929              tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1);
1930              tcg_gen_sub_tl(t0, arg2, arg1);
1931              gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1);
1932          }
1933      } else if (add_ca) {
1934          /*
1935           * Since we're ignoring carry-out, we can simplify the
1936           * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1.
1937           */
1938          tcg_gen_sub_tl(t0, arg2, arg1);
1939          tcg_gen_add_tl(t0, t0, cpu_ca);
1940          tcg_gen_subi_tl(t0, t0, 1);
1941      } else {
1942          tcg_gen_sub_tl(t0, arg2, arg1);
1943      }
1944  
1945      if (compute_ov) {
1946          gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1);
1947      }
1948      if (unlikely(compute_rc0)) {
1949          gen_set_Rc0(ctx, t0);
1950      }
1951  
1952      if (t0 != ret) {
1953          tcg_gen_mov_tl(ret, t0);
1954      }
1955  }
1956  
1957  /***                            Integer logical                            ***/
1958  
1959  #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY)
gen_pause(DisasContext * ctx)1960  static void gen_pause(DisasContext *ctx)
1961  {
1962      TCGv_i32 t0 = tcg_constant_i32(0);
1963      tcg_gen_st_i32(t0, tcg_env,
1964                     -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
1965  
1966      /* Stop translation, this gives other CPUs a chance to run */
1967      gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
1968  }
1969  #endif /* defined(TARGET_PPC64) */
1970  
1971  /***                             Integer rotate                            ***/
1972  
1973  /* rlwimi & rlwimi. */
gen_rlwimi(DisasContext * ctx)1974  static void gen_rlwimi(DisasContext *ctx)
1975  {
1976      TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
1977      TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
1978      uint32_t sh = SH(ctx->opcode);
1979      uint32_t mb = MB(ctx->opcode);
1980      uint32_t me = ME(ctx->opcode);
1981  
1982      if (sh == (31 - me) && mb <= me) {
1983          tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
1984      } else {
1985          target_ulong mask;
1986          bool mask_in_32b = true;
1987          TCGv t1;
1988  
1989  #if defined(TARGET_PPC64)
1990          mb += 32;
1991          me += 32;
1992  #endif
1993          mask = MASK(mb, me);
1994  
1995  #if defined(TARGET_PPC64)
1996          if (mask > 0xffffffffu) {
1997              mask_in_32b = false;
1998          }
1999  #endif
2000          t1 = tcg_temp_new();
2001          if (mask_in_32b) {
2002              TCGv_i32 t0 = tcg_temp_new_i32();
2003              tcg_gen_trunc_tl_i32(t0, t_rs);
2004              tcg_gen_rotli_i32(t0, t0, sh);
2005              tcg_gen_extu_i32_tl(t1, t0);
2006          } else {
2007  #if defined(TARGET_PPC64)
2008              tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32);
2009              tcg_gen_rotli_i64(t1, t1, sh);
2010  #else
2011              g_assert_not_reached();
2012  #endif
2013          }
2014  
2015          tcg_gen_andi_tl(t1, t1, mask);
2016          tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2017          tcg_gen_or_tl(t_ra, t_ra, t1);
2018      }
2019      if (unlikely(Rc(ctx->opcode) != 0)) {
2020          gen_set_Rc0(ctx, t_ra);
2021      }
2022  }
2023  
2024  /* rlwinm & rlwinm. */
gen_rlwinm(DisasContext * ctx)2025  static void gen_rlwinm(DisasContext *ctx)
2026  {
2027      TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2028      TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2029      int sh = SH(ctx->opcode);
2030      int mb = MB(ctx->opcode);
2031      int me = ME(ctx->opcode);
2032      int len = me - mb + 1;
2033      int rsh = (32 - sh) & 31;
2034  
2035      if (sh != 0 && len > 0 && me == (31 - sh)) {
2036          tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2037      } else if (me == 31 && rsh + len <= 32) {
2038          tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2039      } else {
2040          target_ulong mask;
2041          bool mask_in_32b = true;
2042  #if defined(TARGET_PPC64)
2043          mb += 32;
2044          me += 32;
2045  #endif
2046          mask = MASK(mb, me);
2047  #if defined(TARGET_PPC64)
2048          if (mask > 0xffffffffu) {
2049              mask_in_32b = false;
2050          }
2051  #endif
2052          if (mask_in_32b) {
2053              if (sh == 0) {
2054                  tcg_gen_andi_tl(t_ra, t_rs, mask);
2055              } else {
2056                  TCGv_i32 t0 = tcg_temp_new_i32();
2057                  tcg_gen_trunc_tl_i32(t0, t_rs);
2058                  tcg_gen_rotli_i32(t0, t0, sh);
2059                  tcg_gen_andi_i32(t0, t0, mask);
2060                  tcg_gen_extu_i32_tl(t_ra, t0);
2061              }
2062          } else {
2063  #if defined(TARGET_PPC64)
2064              tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2065              tcg_gen_rotli_i64(t_ra, t_ra, sh);
2066              tcg_gen_andi_i64(t_ra, t_ra, mask);
2067  #else
2068              g_assert_not_reached();
2069  #endif
2070          }
2071      }
2072      if (unlikely(Rc(ctx->opcode) != 0)) {
2073          gen_set_Rc0(ctx, t_ra);
2074      }
2075  }
2076  
2077  /* rlwnm & rlwnm. */
gen_rlwnm(DisasContext * ctx)2078  static void gen_rlwnm(DisasContext *ctx)
2079  {
2080      TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2081      TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2082      TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2083      uint32_t mb = MB(ctx->opcode);
2084      uint32_t me = ME(ctx->opcode);
2085      target_ulong mask;
2086      bool mask_in_32b = true;
2087  
2088  #if defined(TARGET_PPC64)
2089      mb += 32;
2090      me += 32;
2091  #endif
2092      mask = MASK(mb, me);
2093  
2094  #if defined(TARGET_PPC64)
2095      if (mask > 0xffffffffu) {
2096          mask_in_32b = false;
2097      }
2098  #endif
2099      if (mask_in_32b) {
2100          TCGv_i32 t0 = tcg_temp_new_i32();
2101          TCGv_i32 t1 = tcg_temp_new_i32();
2102          tcg_gen_trunc_tl_i32(t0, t_rb);
2103          tcg_gen_trunc_tl_i32(t1, t_rs);
2104          tcg_gen_andi_i32(t0, t0, 0x1f);
2105          tcg_gen_rotl_i32(t1, t1, t0);
2106          tcg_gen_extu_i32_tl(t_ra, t1);
2107      } else {
2108  #if defined(TARGET_PPC64)
2109          TCGv_i64 t0 = tcg_temp_new_i64();
2110          tcg_gen_andi_i64(t0, t_rb, 0x1f);
2111          tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32);
2112          tcg_gen_rotl_i64(t_ra, t_ra, t0);
2113  #else
2114          g_assert_not_reached();
2115  #endif
2116      }
2117  
2118      tcg_gen_andi_tl(t_ra, t_ra, mask);
2119  
2120      if (unlikely(Rc(ctx->opcode) != 0)) {
2121          gen_set_Rc0(ctx, t_ra);
2122      }
2123  }
2124  
2125  #if defined(TARGET_PPC64)
2126  #define GEN_PPC64_R2(name, opc1, opc2)                                        \
2127  static void glue(gen_, name##0)(DisasContext *ctx)                            \
2128  {                                                                             \
2129      gen_##name(ctx, 0);                                                       \
2130  }                                                                             \
2131                                                                                \
2132  static void glue(gen_, name##1)(DisasContext *ctx)                            \
2133  {                                                                             \
2134      gen_##name(ctx, 1);                                                       \
2135  }
2136  #define GEN_PPC64_R4(name, opc1, opc2)                                        \
2137  static void glue(gen_, name##0)(DisasContext *ctx)                            \
2138  {                                                                             \
2139      gen_##name(ctx, 0, 0);                                                    \
2140  }                                                                             \
2141                                                                                \
2142  static void glue(gen_, name##1)(DisasContext *ctx)                            \
2143  {                                                                             \
2144      gen_##name(ctx, 0, 1);                                                    \
2145  }                                                                             \
2146                                                                                \
2147  static void glue(gen_, name##2)(DisasContext *ctx)                            \
2148  {                                                                             \
2149      gen_##name(ctx, 1, 0);                                                    \
2150  }                                                                             \
2151                                                                                \
2152  static void glue(gen_, name##3)(DisasContext *ctx)                            \
2153  {                                                                             \
2154      gen_##name(ctx, 1, 1);                                                    \
2155  }
2156  
gen_rldinm(DisasContext * ctx,int mb,int me,int sh)2157  static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh)
2158  {
2159      TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2160      TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2161      int len = me - mb + 1;
2162      int rsh = (64 - sh) & 63;
2163  
2164      if (sh != 0 && len > 0 && me == (63 - sh)) {
2165          tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len);
2166      } else if (me == 63 && rsh + len <= 64) {
2167          tcg_gen_extract_tl(t_ra, t_rs, rsh, len);
2168      } else {
2169          tcg_gen_rotli_tl(t_ra, t_rs, sh);
2170          tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2171      }
2172      if (unlikely(Rc(ctx->opcode) != 0)) {
2173          gen_set_Rc0(ctx, t_ra);
2174      }
2175  }
2176  
2177  /* rldicl - rldicl. */
gen_rldicl(DisasContext * ctx,int mbn,int shn)2178  static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn)
2179  {
2180      uint32_t sh, mb;
2181  
2182      sh = SH(ctx->opcode) | (shn << 5);
2183      mb = MB(ctx->opcode) | (mbn << 5);
2184      gen_rldinm(ctx, mb, 63, sh);
2185  }
2186  GEN_PPC64_R4(rldicl, 0x1E, 0x00);
2187  
2188  /* rldicr - rldicr. */
gen_rldicr(DisasContext * ctx,int men,int shn)2189  static inline void gen_rldicr(DisasContext *ctx, int men, int shn)
2190  {
2191      uint32_t sh, me;
2192  
2193      sh = SH(ctx->opcode) | (shn << 5);
2194      me = MB(ctx->opcode) | (men << 5);
2195      gen_rldinm(ctx, 0, me, sh);
2196  }
2197  GEN_PPC64_R4(rldicr, 0x1E, 0x02);
2198  
2199  /* rldic - rldic. */
gen_rldic(DisasContext * ctx,int mbn,int shn)2200  static inline void gen_rldic(DisasContext *ctx, int mbn, int shn)
2201  {
2202      uint32_t sh, mb;
2203  
2204      sh = SH(ctx->opcode) | (shn << 5);
2205      mb = MB(ctx->opcode) | (mbn << 5);
2206      gen_rldinm(ctx, mb, 63 - sh, sh);
2207  }
2208  GEN_PPC64_R4(rldic, 0x1E, 0x04);
2209  
gen_rldnm(DisasContext * ctx,int mb,int me)2210  static void gen_rldnm(DisasContext *ctx, int mb, int me)
2211  {
2212      TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2213      TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2214      TCGv t_rb = cpu_gpr[rB(ctx->opcode)];
2215      TCGv t0;
2216  
2217      t0 = tcg_temp_new();
2218      tcg_gen_andi_tl(t0, t_rb, 0x3f);
2219      tcg_gen_rotl_tl(t_ra, t_rs, t0);
2220  
2221      tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me));
2222      if (unlikely(Rc(ctx->opcode) != 0)) {
2223          gen_set_Rc0(ctx, t_ra);
2224      }
2225  }
2226  
2227  /* rldcl - rldcl. */
gen_rldcl(DisasContext * ctx,int mbn)2228  static inline void gen_rldcl(DisasContext *ctx, int mbn)
2229  {
2230      uint32_t mb;
2231  
2232      mb = MB(ctx->opcode) | (mbn << 5);
2233      gen_rldnm(ctx, mb, 63);
2234  }
2235  GEN_PPC64_R2(rldcl, 0x1E, 0x08);
2236  
2237  /* rldcr - rldcr. */
gen_rldcr(DisasContext * ctx,int men)2238  static inline void gen_rldcr(DisasContext *ctx, int men)
2239  {
2240      uint32_t me;
2241  
2242      me = MB(ctx->opcode) | (men << 5);
2243      gen_rldnm(ctx, 0, me);
2244  }
2245  GEN_PPC64_R2(rldcr, 0x1E, 0x09);
2246  
2247  /* rldimi - rldimi. */
gen_rldimi(DisasContext * ctx,int mbn,int shn)2248  static void gen_rldimi(DisasContext *ctx, int mbn, int shn)
2249  {
2250      TCGv t_ra = cpu_gpr[rA(ctx->opcode)];
2251      TCGv t_rs = cpu_gpr[rS(ctx->opcode)];
2252      uint32_t sh = SH(ctx->opcode) | (shn << 5);
2253      uint32_t mb = MB(ctx->opcode) | (mbn << 5);
2254      uint32_t me = 63 - sh;
2255  
2256      if (mb <= me) {
2257          tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1);
2258      } else {
2259          target_ulong mask = MASK(mb, me);
2260          TCGv t1 = tcg_temp_new();
2261  
2262          tcg_gen_rotli_tl(t1, t_rs, sh);
2263          tcg_gen_andi_tl(t1, t1, mask);
2264          tcg_gen_andi_tl(t_ra, t_ra, ~mask);
2265          tcg_gen_or_tl(t_ra, t_ra, t1);
2266      }
2267      if (unlikely(Rc(ctx->opcode) != 0)) {
2268          gen_set_Rc0(ctx, t_ra);
2269      }
2270  }
2271  GEN_PPC64_R4(rldimi, 0x1E, 0x06);
2272  #endif
2273  
2274  /***                             Integer shift                             ***/
2275  
2276  /* slw & slw. */
gen_slw(DisasContext * ctx)2277  static void gen_slw(DisasContext *ctx)
2278  {
2279      TCGv t0, t1;
2280  
2281      t0 = tcg_temp_new();
2282      /* AND rS with a mask that is 0 when rB >= 0x20 */
2283  #if defined(TARGET_PPC64)
2284      tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2285      tcg_gen_sari_tl(t0, t0, 0x3f);
2286  #else
2287      tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2288      tcg_gen_sari_tl(t0, t0, 0x1f);
2289  #endif
2290      tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2291      t1 = tcg_temp_new();
2292      tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2293      tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2294      tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]);
2295      if (unlikely(Rc(ctx->opcode) != 0)) {
2296          gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2297      }
2298  }
2299  
2300  /* sraw & sraw. */
gen_sraw(DisasContext * ctx)2301  static void gen_sraw(DisasContext *ctx)
2302  {
2303      gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], tcg_env,
2304                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2305      if (unlikely(Rc(ctx->opcode) != 0)) {
2306          gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2307      }
2308  }
2309  
2310  /* srawi & srawi. */
gen_srawi(DisasContext * ctx)2311  static void gen_srawi(DisasContext *ctx)
2312  {
2313      int sh = SH(ctx->opcode);
2314      TCGv dst = cpu_gpr[rA(ctx->opcode)];
2315      TCGv src = cpu_gpr[rS(ctx->opcode)];
2316      if (sh == 0) {
2317          tcg_gen_ext32s_tl(dst, src);
2318          tcg_gen_movi_tl(cpu_ca, 0);
2319          if (is_isa300(ctx)) {
2320              tcg_gen_movi_tl(cpu_ca32, 0);
2321          }
2322      } else {
2323          TCGv t0;
2324          tcg_gen_ext32s_tl(dst, src);
2325          tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1);
2326          t0 = tcg_temp_new();
2327          tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1);
2328          tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2329          tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2330          if (is_isa300(ctx)) {
2331              tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2332          }
2333          tcg_gen_sari_tl(dst, dst, sh);
2334      }
2335      if (unlikely(Rc(ctx->opcode) != 0)) {
2336          gen_set_Rc0(ctx, dst);
2337      }
2338  }
2339  
2340  /* srw & srw. */
gen_srw(DisasContext * ctx)2341  static void gen_srw(DisasContext *ctx)
2342  {
2343      TCGv t0, t1;
2344  
2345      t0 = tcg_temp_new();
2346      /* AND rS with a mask that is 0 when rB >= 0x20 */
2347  #if defined(TARGET_PPC64)
2348      tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a);
2349      tcg_gen_sari_tl(t0, t0, 0x3f);
2350  #else
2351      tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a);
2352      tcg_gen_sari_tl(t0, t0, 0x1f);
2353  #endif
2354      tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2355      tcg_gen_ext32u_tl(t0, t0);
2356      t1 = tcg_temp_new();
2357      tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f);
2358      tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2359      if (unlikely(Rc(ctx->opcode) != 0)) {
2360          gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2361      }
2362  }
2363  
2364  #if defined(TARGET_PPC64)
2365  /* sld & sld. */
gen_sld(DisasContext * ctx)2366  static void gen_sld(DisasContext *ctx)
2367  {
2368      TCGv t0, t1;
2369  
2370      t0 = tcg_temp_new();
2371      /* AND rS with a mask that is 0 when rB >= 0x40 */
2372      tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2373      tcg_gen_sari_tl(t0, t0, 0x3f);
2374      tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2375      t1 = tcg_temp_new();
2376      tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2377      tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2378      if (unlikely(Rc(ctx->opcode) != 0)) {
2379          gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2380      }
2381  }
2382  
2383  /* srad & srad. */
gen_srad(DisasContext * ctx)2384  static void gen_srad(DisasContext *ctx)
2385  {
2386      gen_helper_srad(cpu_gpr[rA(ctx->opcode)], tcg_env,
2387                      cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2388      if (unlikely(Rc(ctx->opcode) != 0)) {
2389          gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2390      }
2391  }
2392  /* sradi & sradi. */
gen_sradi(DisasContext * ctx,int n)2393  static inline void gen_sradi(DisasContext *ctx, int n)
2394  {
2395      int sh = SH(ctx->opcode) + (n << 5);
2396      TCGv dst = cpu_gpr[rA(ctx->opcode)];
2397      TCGv src = cpu_gpr[rS(ctx->opcode)];
2398      if (sh == 0) {
2399          tcg_gen_mov_tl(dst, src);
2400          tcg_gen_movi_tl(cpu_ca, 0);
2401          if (is_isa300(ctx)) {
2402              tcg_gen_movi_tl(cpu_ca32, 0);
2403          }
2404      } else {
2405          TCGv t0;
2406          tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1);
2407          t0 = tcg_temp_new();
2408          tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1);
2409          tcg_gen_and_tl(cpu_ca, cpu_ca, t0);
2410          tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0);
2411          if (is_isa300(ctx)) {
2412              tcg_gen_mov_tl(cpu_ca32, cpu_ca);
2413          }
2414          tcg_gen_sari_tl(dst, src, sh);
2415      }
2416      if (unlikely(Rc(ctx->opcode) != 0)) {
2417          gen_set_Rc0(ctx, dst);
2418      }
2419  }
2420  
gen_sradi0(DisasContext * ctx)2421  static void gen_sradi0(DisasContext *ctx)
2422  {
2423      gen_sradi(ctx, 0);
2424  }
2425  
gen_sradi1(DisasContext * ctx)2426  static void gen_sradi1(DisasContext *ctx)
2427  {
2428      gen_sradi(ctx, 1);
2429  }
2430  
2431  /* extswsli & extswsli. */
gen_extswsli(DisasContext * ctx,int n)2432  static inline void gen_extswsli(DisasContext *ctx, int n)
2433  {
2434      int sh = SH(ctx->opcode) + (n << 5);
2435      TCGv dst = cpu_gpr[rA(ctx->opcode)];
2436      TCGv src = cpu_gpr[rS(ctx->opcode)];
2437  
2438      tcg_gen_ext32s_tl(dst, src);
2439      tcg_gen_shli_tl(dst, dst, sh);
2440      if (unlikely(Rc(ctx->opcode) != 0)) {
2441          gen_set_Rc0(ctx, dst);
2442      }
2443  }
2444  
gen_extswsli0(DisasContext * ctx)2445  static void gen_extswsli0(DisasContext *ctx)
2446  {
2447      gen_extswsli(ctx, 0);
2448  }
2449  
gen_extswsli1(DisasContext * ctx)2450  static void gen_extswsli1(DisasContext *ctx)
2451  {
2452      gen_extswsli(ctx, 1);
2453  }
2454  
2455  /* srd & srd. */
gen_srd(DisasContext * ctx)2456  static void gen_srd(DisasContext *ctx)
2457  {
2458      TCGv t0, t1;
2459  
2460      t0 = tcg_temp_new();
2461      /* AND rS with a mask that is 0 when rB >= 0x40 */
2462      tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39);
2463      tcg_gen_sari_tl(t0, t0, 0x3f);
2464      tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0);
2465      t1 = tcg_temp_new();
2466      tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f);
2467      tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1);
2468      if (unlikely(Rc(ctx->opcode) != 0)) {
2469          gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]);
2470      }
2471  }
2472  #endif
2473  
2474  /***                           Addressing modes                            ***/
2475  /* Register indirect with immediate index : EA = (rA|0) + SIMM */
gen_addr_imm_index(DisasContext * ctx,TCGv EA,target_long maskl)2476  static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA,
2477                                        target_long maskl)
2478  {
2479      target_long simm = SIMM(ctx->opcode);
2480  
2481      simm &= ~maskl;
2482      if (rA(ctx->opcode) == 0) {
2483          if (NARROW_MODE(ctx)) {
2484              simm = (uint32_t)simm;
2485          }
2486          tcg_gen_movi_tl(EA, simm);
2487      } else if (likely(simm != 0)) {
2488          tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm);
2489          if (NARROW_MODE(ctx)) {
2490              tcg_gen_ext32u_tl(EA, EA);
2491          }
2492      } else {
2493          if (NARROW_MODE(ctx)) {
2494              tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2495          } else {
2496              tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2497          }
2498      }
2499  }
2500  
gen_addr_reg_index(DisasContext * ctx,TCGv EA)2501  static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA)
2502  {
2503      if (rA(ctx->opcode) == 0) {
2504          if (NARROW_MODE(ctx)) {
2505              tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2506          } else {
2507              tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]);
2508          }
2509      } else {
2510          tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
2511          if (NARROW_MODE(ctx)) {
2512              tcg_gen_ext32u_tl(EA, EA);
2513          }
2514      }
2515  }
2516  
gen_addr_register(DisasContext * ctx,TCGv EA)2517  static inline void gen_addr_register(DisasContext *ctx, TCGv EA)
2518  {
2519      if (rA(ctx->opcode) == 0) {
2520          tcg_gen_movi_tl(EA, 0);
2521      } else if (NARROW_MODE(ctx)) {
2522          tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2523      } else {
2524          tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]);
2525      }
2526  }
2527  
gen_addr_add(DisasContext * ctx,TCGv ret,TCGv arg1,target_long val)2528  static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1,
2529                                  target_long val)
2530  {
2531      tcg_gen_addi_tl(ret, arg1, val);
2532      if (NARROW_MODE(ctx)) {
2533          tcg_gen_ext32u_tl(ret, ret);
2534      }
2535  }
2536  
gen_align_no_le(DisasContext * ctx)2537  static inline void gen_align_no_le(DisasContext *ctx)
2538  {
2539      gen_exception_err(ctx, POWERPC_EXCP_ALIGN,
2540                        (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE);
2541  }
2542  
2543  /* EA <- {(ra == 0) ? 0 : GPR[ra]} + displ */
do_ea_calc(DisasContext * ctx,int ra,TCGv displ)2544  static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ)
2545  {
2546      TCGv ea = tcg_temp_new();
2547      if (ra) {
2548          tcg_gen_add_tl(ea, cpu_gpr[ra], displ);
2549      } else {
2550          tcg_gen_mov_tl(ea, displ);
2551      }
2552      if (NARROW_MODE(ctx)) {
2553          tcg_gen_ext32u_tl(ea, ea);
2554      }
2555      return ea;
2556  }
2557  
2558  #if defined(TARGET_PPC64)
2559  /* EA <- (ra == 0) ? 0 : GPR[ra] */
do_ea_calc_ra(DisasContext * ctx,int ra)2560  static TCGv do_ea_calc_ra(DisasContext *ctx, int ra)
2561  {
2562      TCGv EA = tcg_temp_new();
2563      if (!ra) {
2564          tcg_gen_movi_tl(EA, 0);
2565      } else if (NARROW_MODE(ctx)) {
2566          tcg_gen_ext32u_tl(EA, cpu_gpr[ra]);
2567      } else {
2568          tcg_gen_mov_tl(EA, cpu_gpr[ra]);
2569      }
2570      return EA;
2571  }
2572  #endif
2573  
2574  /***                             Integer load                              ***/
2575  #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask)
2576  #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP))
2577  
2578  #define GEN_QEMU_LOAD_TL(ldop, op)                                      \
2579  static void glue(gen_qemu_, ldop)(DisasContext *ctx,                    \
2580                                    TCGv val,                             \
2581                                    TCGv addr)                            \
2582  {                                                                       \
2583      tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op);                    \
2584  }
2585  
2586  GEN_QEMU_LOAD_TL(ld8u,  DEF_MEMOP(MO_UB))
2587  GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW))
2588  GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW))
2589  GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL))
2590  GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL))
2591  
2592  GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW))
2593  GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL))
2594  
2595  #define GEN_QEMU_LOAD_64(ldop, op)                                  \
2596  static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx,    \
2597                                               TCGv_i64 val,          \
2598                                               TCGv addr)             \
2599  {                                                                   \
2600      tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op);               \
2601  }
2602  
2603  GEN_QEMU_LOAD_64(ld8u,  DEF_MEMOP(MO_UB))
2604  GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW))
2605  GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL))
2606  GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL))
2607  GEN_QEMU_LOAD_64(ld64,  DEF_MEMOP(MO_UQ))
2608  
2609  #if defined(TARGET_PPC64)
2610  GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ))
2611  #endif
2612  
2613  #define GEN_QEMU_STORE_TL(stop, op)                                     \
2614  static void glue(gen_qemu_, stop)(DisasContext *ctx,                    \
2615                                    TCGv val,                             \
2616                                    TCGv addr)                            \
2617  {                                                                       \
2618      tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op);                    \
2619  }
2620  
2621  #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY)
2622  GEN_QEMU_STORE_TL(st8,  DEF_MEMOP(MO_UB))
2623  #endif
2624  GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW))
2625  GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL))
2626  
2627  GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW))
2628  GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL))
2629  
2630  #define GEN_QEMU_STORE_64(stop, op)                               \
2631  static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx,  \
2632                                                TCGv_i64 val,       \
2633                                                TCGv addr)          \
2634  {                                                                 \
2635      tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op);             \
2636  }
2637  
2638  GEN_QEMU_STORE_64(st8,  DEF_MEMOP(MO_UB))
2639  GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW))
2640  GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL))
2641  GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ))
2642  
2643  #if defined(TARGET_PPC64)
2644  GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ))
2645  #endif
2646  
2647  #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
2648  static void glue(gen_, name##x)(DisasContext *ctx)                            \
2649  {                                                                             \
2650      TCGv EA;                                                                  \
2651      chk(ctx);                                                                 \
2652      gen_set_access_type(ctx, ACCESS_INT);                                     \
2653      EA = tcg_temp_new();                                                      \
2654      gen_addr_reg_index(ctx, EA);                                              \
2655      gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA);                       \
2656  }
2657  
2658  #define GEN_LDX(name, ldop, opc2, opc3, type)                                 \
2659      GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2660  
2661  #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type)                            \
2662      GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2663  
2664  #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
2665  static void glue(gen_, name##epx)(DisasContext *ctx)                          \
2666  {                                                                             \
2667      TCGv EA;                                                                  \
2668      CHK_SV(ctx);                                                              \
2669      gen_set_access_type(ctx, ACCESS_INT);                                     \
2670      EA = tcg_temp_new();                                                      \
2671      gen_addr_reg_index(ctx, EA);                                              \
2672      tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\
2673  }
2674  
2675  GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
2676  GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
2677  GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
2678  #if defined(TARGET_PPC64)
2679  GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
2680  #endif
2681  
2682  #if defined(TARGET_PPC64)
2683  /* CI load/store variants */
2684  GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
2685  GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST)
2686  GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
2687  GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
2688  #endif
2689  
2690  /***                              Integer store                            ***/
2691  #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
2692  static void glue(gen_, name##x)(DisasContext *ctx)                            \
2693  {                                                                             \
2694      TCGv EA;                                                                  \
2695      chk(ctx);                                                                 \
2696      gen_set_access_type(ctx, ACCESS_INT);                                     \
2697      EA = tcg_temp_new();                                                      \
2698      gen_addr_reg_index(ctx, EA);                                              \
2699      gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA);                       \
2700  }
2701  #define GEN_STX(name, stop, opc2, opc3, type)                                 \
2702      GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE)
2703  
2704  #define GEN_STX_HVRM(name, stop, opc2, opc3, type)                            \
2705      GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM)
2706  
2707  #define GEN_STEPX(name, stop, opc2, opc3)                                     \
2708  static void glue(gen_, name##epx)(DisasContext *ctx)                          \
2709  {                                                                             \
2710      TCGv EA;                                                                  \
2711      CHK_SV(ctx);                                                              \
2712      gen_set_access_type(ctx, ACCESS_INT);                                     \
2713      EA = tcg_temp_new();                                                      \
2714      gen_addr_reg_index(ctx, EA);                                              \
2715      tcg_gen_qemu_st_tl(                                                       \
2716          cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop);              \
2717  }
2718  
2719  GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
2720  GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
2721  GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
2722  #if defined(TARGET_PPC64)
2723  GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04)
2724  #endif
2725  
2726  #if defined(TARGET_PPC64)
2727  GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
2728  GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
2729  GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
2730  GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
2731  #endif
2732  /***                Integer load and store with byte reverse               ***/
2733  
2734  /* lhbrx */
2735  GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER);
2736  
2737  /* lwbrx */
2738  GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER);
2739  
2740  #if defined(TARGET_PPC64)
2741  /* ldbrx */
2742  GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE);
2743  /* stdbrx */
2744  GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE);
2745  #endif  /* TARGET_PPC64 */
2746  
2747  /* sthbrx */
2748  GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER);
2749  /* stwbrx */
2750  GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER);
2751  
2752  /***                    Integer load and store multiple                    ***/
2753  
2754  /* lmw */
gen_lmw(DisasContext * ctx)2755  static void gen_lmw(DisasContext *ctx)
2756  {
2757      TCGv t0;
2758      TCGv_i32 t1;
2759  
2760      if (ctx->le_mode) {
2761          gen_align_no_le(ctx);
2762          return;
2763      }
2764      gen_set_access_type(ctx, ACCESS_INT);
2765      t0 = tcg_temp_new();
2766      t1 = tcg_constant_i32(rD(ctx->opcode));
2767      gen_addr_imm_index(ctx, t0, 0);
2768      gen_helper_lmw(tcg_env, t0, t1);
2769  }
2770  
2771  /* stmw */
gen_stmw(DisasContext * ctx)2772  static void gen_stmw(DisasContext *ctx)
2773  {
2774      TCGv t0;
2775      TCGv_i32 t1;
2776  
2777      if (ctx->le_mode) {
2778          gen_align_no_le(ctx);
2779          return;
2780      }
2781      gen_set_access_type(ctx, ACCESS_INT);
2782      t0 = tcg_temp_new();
2783      t1 = tcg_constant_i32(rS(ctx->opcode));
2784      gen_addr_imm_index(ctx, t0, 0);
2785      gen_helper_stmw(tcg_env, t0, t1);
2786  }
2787  
2788  /***                    Integer load and store strings                     ***/
2789  
2790  /* lswi */
2791  /*
2792   * PowerPC32 specification says we must generate an exception if rA is
2793   * in the range of registers to be loaded.  In an other hand, IBM says
2794   * this is valid, but rA won't be loaded.  For now, I'll follow the
2795   * spec...
2796   */
gen_lswi(DisasContext * ctx)2797  static void gen_lswi(DisasContext *ctx)
2798  {
2799      TCGv t0;
2800      TCGv_i32 t1, t2;
2801      int nb = NB(ctx->opcode);
2802      int start = rD(ctx->opcode);
2803      int ra = rA(ctx->opcode);
2804      int nr;
2805  
2806      if (ctx->le_mode) {
2807          gen_align_no_le(ctx);
2808          return;
2809      }
2810      if (nb == 0) {
2811          nb = 32;
2812      }
2813      nr = DIV_ROUND_UP(nb, 4);
2814      if (unlikely(lsw_reg_in_range(start, nr, ra))) {
2815          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX);
2816          return;
2817      }
2818      gen_set_access_type(ctx, ACCESS_INT);
2819      t0 = tcg_temp_new();
2820      gen_addr_register(ctx, t0);
2821      t1 = tcg_constant_i32(nb);
2822      t2 = tcg_constant_i32(start);
2823      gen_helper_lsw(tcg_env, t0, t1, t2);
2824  }
2825  
2826  /* lswx */
gen_lswx(DisasContext * ctx)2827  static void gen_lswx(DisasContext *ctx)
2828  {
2829      TCGv t0;
2830      TCGv_i32 t1, t2, t3;
2831  
2832      if (ctx->le_mode) {
2833          gen_align_no_le(ctx);
2834          return;
2835      }
2836      gen_set_access_type(ctx, ACCESS_INT);
2837      t0 = tcg_temp_new();
2838      gen_addr_reg_index(ctx, t0);
2839      t1 = tcg_constant_i32(rD(ctx->opcode));
2840      t2 = tcg_constant_i32(rA(ctx->opcode));
2841      t3 = tcg_constant_i32(rB(ctx->opcode));
2842      gen_helper_lswx(tcg_env, t0, t1, t2, t3);
2843  }
2844  
2845  /* stswi */
gen_stswi(DisasContext * ctx)2846  static void gen_stswi(DisasContext *ctx)
2847  {
2848      TCGv t0;
2849      TCGv_i32 t1, t2;
2850      int nb = NB(ctx->opcode);
2851  
2852      if (ctx->le_mode) {
2853          gen_align_no_le(ctx);
2854          return;
2855      }
2856      gen_set_access_type(ctx, ACCESS_INT);
2857      t0 = tcg_temp_new();
2858      gen_addr_register(ctx, t0);
2859      if (nb == 0) {
2860          nb = 32;
2861      }
2862      t1 = tcg_constant_i32(nb);
2863      t2 = tcg_constant_i32(rS(ctx->opcode));
2864      gen_helper_stsw(tcg_env, t0, t1, t2);
2865  }
2866  
2867  /* stswx */
gen_stswx(DisasContext * ctx)2868  static void gen_stswx(DisasContext *ctx)
2869  {
2870      TCGv t0;
2871      TCGv_i32 t1, t2;
2872  
2873      if (ctx->le_mode) {
2874          gen_align_no_le(ctx);
2875          return;
2876      }
2877      gen_set_access_type(ctx, ACCESS_INT);
2878      t0 = tcg_temp_new();
2879      gen_addr_reg_index(ctx, t0);
2880      t1 = tcg_temp_new_i32();
2881      tcg_gen_trunc_tl_i32(t1, cpu_xer);
2882      tcg_gen_andi_i32(t1, t1, 0x7F);
2883      t2 = tcg_constant_i32(rS(ctx->opcode));
2884      gen_helper_stsw(tcg_env, t0, t1, t2);
2885  }
2886  
2887  #if !defined(CONFIG_USER_ONLY)
gen_check_tlb_flush(DisasContext * ctx,bool global)2888  static inline void gen_check_tlb_flush(DisasContext *ctx, bool global)
2889  {
2890      TCGv_i32 t;
2891      TCGLabel *l;
2892  
2893      if (!ctx->lazy_tlb_flush) {
2894          return;
2895      }
2896      l = gen_new_label();
2897      t = tcg_temp_new_i32();
2898      tcg_gen_ld_i32(t, tcg_env, offsetof(CPUPPCState, tlb_need_flush));
2899      tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l);
2900      if (global) {
2901          gen_helper_check_tlb_flush_global(tcg_env);
2902      } else {
2903          gen_helper_check_tlb_flush_local(tcg_env);
2904      }
2905      gen_set_label(l);
2906      if (global) {
2907          /*
2908           * Global TLB flush uses async-work which must run before the
2909           * next instruction, so this must be the last in the TB.
2910           */
2911          ctx->base.is_jmp = DISAS_EXIT_UPDATE;
2912      }
2913  }
2914  #else
gen_check_tlb_flush(DisasContext * ctx,bool global)2915  static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { }
2916  #endif
2917  
2918  /* isync */
gen_isync(DisasContext * ctx)2919  static void gen_isync(DisasContext *ctx)
2920  {
2921      /*
2922       * We need to check for a pending TLB flush. This can only happen in
2923       * kernel mode however so check MSR_PR
2924       */
2925      if (!ctx->pr) {
2926          gen_check_tlb_flush(ctx, false);
2927      }
2928      tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC);
2929      ctx->base.is_jmp = DISAS_EXIT_UPDATE;
2930  }
2931  
gen_load_locked(DisasContext * ctx,MemOp memop)2932  static void gen_load_locked(DisasContext *ctx, MemOp memop)
2933  {
2934      TCGv gpr = cpu_gpr[rD(ctx->opcode)];
2935      TCGv t0 = tcg_temp_new();
2936  
2937      gen_set_access_type(ctx, ACCESS_RES);
2938      gen_addr_reg_index(ctx, t0);
2939      tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, DEF_MEMOP(memop) | MO_ALIGN);
2940      tcg_gen_mov_tl(cpu_reserve, t0);
2941      tcg_gen_movi_tl(cpu_reserve_length, memop_size(memop));
2942      tcg_gen_mov_tl(cpu_reserve_val, gpr);
2943  }
2944  
2945  #define LARX(name, memop)                  \
2946  static void gen_##name(DisasContext *ctx)  \
2947  {                                          \
2948      gen_load_locked(ctx, memop);           \
2949  }
2950  
2951  /* lwarx */
LARX(lbarx,MO_UB)2952  LARX(lbarx, MO_UB)
2953  LARX(lharx, MO_UW)
2954  LARX(lwarx, MO_UL)
2955  
2956  static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop,
2957                                        TCGv EA, TCGCond cond, int addend)
2958  {
2959      TCGv t = tcg_temp_new();
2960      TCGv t2 = tcg_temp_new();
2961      TCGv u = tcg_temp_new();
2962  
2963      tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
2964      tcg_gen_addi_tl(t2, EA, memop_size(memop));
2965      tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop);
2966      tcg_gen_addi_tl(u, t, addend);
2967  
2968      /* E.g. for fetch and increment bounded... */
2969      /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */
2970      tcg_gen_movcond_tl(cond, u, t, t2, u, t);
2971      tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop);
2972  
2973      /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */
2974      tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t,
2975                         tcg_constant_tl(1 << (memop_size(memop) * 8 - 1)));
2976  }
2977  
gen_ld_atomic(DisasContext * ctx,MemOp memop)2978  static void gen_ld_atomic(DisasContext *ctx, MemOp memop)
2979  {
2980      uint32_t gpr_FC = FC(ctx->opcode);
2981      TCGv EA = tcg_temp_new();
2982      int rt = rD(ctx->opcode);
2983      bool need_serial;
2984      TCGv src, dst;
2985  
2986      gen_addr_register(ctx, EA);
2987      dst = cpu_gpr[rt];
2988      src = cpu_gpr[(rt + 1) & 31];
2989  
2990      need_serial = false;
2991      memop |= MO_ALIGN;
2992      switch (gpr_FC) {
2993      case 0: /* Fetch and add */
2994          tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop);
2995          break;
2996      case 1: /* Fetch and xor */
2997          tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop);
2998          break;
2999      case 2: /* Fetch and or */
3000          tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop);
3001          break;
3002      case 3: /* Fetch and 'and' */
3003          tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop);
3004          break;
3005      case 4:  /* Fetch and max unsigned */
3006          tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop);
3007          break;
3008      case 5:  /* Fetch and max signed */
3009          tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop);
3010          break;
3011      case 6:  /* Fetch and min unsigned */
3012          tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop);
3013          break;
3014      case 7:  /* Fetch and min signed */
3015          tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop);
3016          break;
3017      case 8: /* Swap */
3018          tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop);
3019          break;
3020  
3021      case 16: /* Compare and swap not equal */
3022          if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3023              need_serial = true;
3024          } else {
3025              TCGv t0 = tcg_temp_new();
3026              TCGv t1 = tcg_temp_new();
3027  
3028              tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop);
3029              if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) {
3030                  tcg_gen_mov_tl(t1, src);
3031              } else {
3032                  tcg_gen_ext32u_tl(t1, src);
3033              }
3034              tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1,
3035                                 cpu_gpr[(rt + 2) & 31], t0);
3036              tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop);
3037              tcg_gen_mov_tl(dst, t0);
3038          }
3039          break;
3040  
3041      case 24: /* Fetch and increment bounded */
3042          if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3043              need_serial = true;
3044          } else {
3045              gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1);
3046          }
3047          break;
3048      case 25: /* Fetch and increment equal */
3049          if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3050              need_serial = true;
3051          } else {
3052              gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1);
3053          }
3054          break;
3055      case 28: /* Fetch and decrement bounded */
3056          if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3057              need_serial = true;
3058          } else {
3059              gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1);
3060          }
3061          break;
3062  
3063      default:
3064          /* invoke data storage error handler */
3065          gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3066      }
3067  
3068      if (need_serial) {
3069          /* Restart with exclusive lock.  */
3070          gen_helper_exit_atomic(tcg_env);
3071          ctx->base.is_jmp = DISAS_NORETURN;
3072      }
3073  }
3074  
gen_lwat(DisasContext * ctx)3075  static void gen_lwat(DisasContext *ctx)
3076  {
3077      gen_ld_atomic(ctx, DEF_MEMOP(MO_UL));
3078  }
3079  
3080  #ifdef TARGET_PPC64
gen_ldat(DisasContext * ctx)3081  static void gen_ldat(DisasContext *ctx)
3082  {
3083      gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ));
3084  }
3085  #endif
3086  
gen_st_atomic(DisasContext * ctx,MemOp memop)3087  static void gen_st_atomic(DisasContext *ctx, MemOp memop)
3088  {
3089      uint32_t gpr_FC = FC(ctx->opcode);
3090      TCGv EA = tcg_temp_new();
3091      TCGv src, discard;
3092  
3093      gen_addr_register(ctx, EA);
3094      src = cpu_gpr[rD(ctx->opcode)];
3095      discard = tcg_temp_new();
3096  
3097      memop |= MO_ALIGN;
3098      switch (gpr_FC) {
3099      case 0: /* add and Store */
3100          tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3101          break;
3102      case 1: /* xor and Store */
3103          tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3104          break;
3105      case 2: /* Or and Store */
3106          tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3107          break;
3108      case 3: /* 'and' and Store */
3109          tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3110          break;
3111      case 4:  /* Store max unsigned */
3112          tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3113          break;
3114      case 5:  /* Store max signed */
3115          tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3116          break;
3117      case 6:  /* Store min unsigned */
3118          tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3119          break;
3120      case 7:  /* Store min signed */
3121          tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop);
3122          break;
3123      case 24: /* Store twin  */
3124          if (tb_cflags(ctx->base.tb) & CF_PARALLEL) {
3125              /* Restart with exclusive lock.  */
3126              gen_helper_exit_atomic(tcg_env);
3127              ctx->base.is_jmp = DISAS_NORETURN;
3128          } else {
3129              TCGv t = tcg_temp_new();
3130              TCGv t2 = tcg_temp_new();
3131              TCGv s = tcg_temp_new();
3132              TCGv s2 = tcg_temp_new();
3133              TCGv ea_plus_s = tcg_temp_new();
3134  
3135              tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop);
3136              tcg_gen_addi_tl(ea_plus_s, EA, memop_size(memop));
3137              tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop);
3138              tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t);
3139              tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2);
3140              tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop);
3141              tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop);
3142          }
3143          break;
3144      default:
3145          /* invoke data storage error handler */
3146          gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL);
3147      }
3148  }
3149  
gen_stwat(DisasContext * ctx)3150  static void gen_stwat(DisasContext *ctx)
3151  {
3152      gen_st_atomic(ctx, DEF_MEMOP(MO_UL));
3153  }
3154  
3155  #ifdef TARGET_PPC64
gen_stdat(DisasContext * ctx)3156  static void gen_stdat(DisasContext *ctx)
3157  {
3158      gen_st_atomic(ctx, DEF_MEMOP(MO_UQ));
3159  }
3160  #endif
3161  
gen_conditional_store(DisasContext * ctx,MemOp memop)3162  static void gen_conditional_store(DisasContext *ctx, MemOp memop)
3163  {
3164      TCGLabel *lfail;
3165      TCGv EA;
3166      TCGv cr0;
3167      TCGv t0;
3168      int rs = rS(ctx->opcode);
3169  
3170      lfail = gen_new_label();
3171      EA = tcg_temp_new();
3172      cr0 = tcg_temp_new();
3173      t0 = tcg_temp_new();
3174  
3175      tcg_gen_mov_tl(cr0, cpu_so);
3176      gen_set_access_type(ctx, ACCESS_RES);
3177      gen_addr_reg_index(ctx, EA);
3178      tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lfail);
3179      tcg_gen_brcondi_tl(TCG_COND_NE, cpu_reserve_length, memop_size(memop), lfail);
3180  
3181      tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val,
3182                                cpu_gpr[rs], ctx->mem_idx,
3183                                DEF_MEMOP(memop) | MO_ALIGN);
3184      tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val);
3185      tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3186      tcg_gen_or_tl(cr0, cr0, t0);
3187  
3188      gen_set_label(lfail);
3189      tcg_gen_trunc_tl_i32(cpu_crf[0], cr0);
3190      tcg_gen_movi_tl(cpu_reserve, -1);
3191  }
3192  
3193  #define STCX(name, memop)                  \
3194  static void gen_##name(DisasContext *ctx)  \
3195  {                                          \
3196      gen_conditional_store(ctx, memop);     \
3197  }
3198  
STCX(stbcx_,MO_UB)3199  STCX(stbcx_, MO_UB)
3200  STCX(sthcx_, MO_UW)
3201  STCX(stwcx_, MO_UL)
3202  
3203  #if defined(TARGET_PPC64)
3204  /* ldarx */
3205  LARX(ldarx, MO_UQ)
3206  /* stdcx. */
3207  STCX(stdcx_, MO_UQ)
3208  
3209  /* lqarx */
3210  static void gen_lqarx(DisasContext *ctx)
3211  {
3212      int rd = rD(ctx->opcode);
3213      TCGv EA, hi, lo;
3214      TCGv_i128 t16;
3215  
3216      if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) ||
3217                   (rd == rB(ctx->opcode)))) {
3218          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3219          return;
3220      }
3221  
3222      gen_set_access_type(ctx, ACCESS_RES);
3223      EA = tcg_temp_new();
3224      gen_addr_reg_index(ctx, EA);
3225  
3226      /* Note that the low part is always in RD+1, even in LE mode.  */
3227      lo = cpu_gpr[rd + 1];
3228      hi = cpu_gpr[rd];
3229  
3230      t16 = tcg_temp_new_i128();
3231      tcg_gen_qemu_ld_i128(t16, EA, ctx->mem_idx, DEF_MEMOP(MO_128 | MO_ALIGN));
3232      tcg_gen_extr_i128_i64(lo, hi, t16);
3233  
3234      tcg_gen_mov_tl(cpu_reserve, EA);
3235      tcg_gen_movi_tl(cpu_reserve_length, 16);
3236      tcg_gen_st_tl(hi, tcg_env, offsetof(CPUPPCState, reserve_val));
3237      tcg_gen_st_tl(lo, tcg_env, offsetof(CPUPPCState, reserve_val2));
3238  }
3239  
3240  /* stqcx. */
gen_stqcx_(DisasContext * ctx)3241  static void gen_stqcx_(DisasContext *ctx)
3242  {
3243      TCGLabel *lfail;
3244      TCGv EA, t0, t1;
3245      TCGv cr0;
3246      TCGv_i128 cmp, val;
3247      int rs = rS(ctx->opcode);
3248  
3249      if (unlikely(rs & 1)) {
3250          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3251          return;
3252      }
3253  
3254      lfail = gen_new_label();
3255      EA = tcg_temp_new();
3256      cr0 = tcg_temp_new();
3257  
3258      tcg_gen_mov_tl(cr0, cpu_so);
3259      gen_set_access_type(ctx, ACCESS_RES);
3260      gen_addr_reg_index(ctx, EA);
3261      tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lfail);
3262      tcg_gen_brcondi_tl(TCG_COND_NE, cpu_reserve_length, 16, lfail);
3263  
3264      cmp = tcg_temp_new_i128();
3265      val = tcg_temp_new_i128();
3266  
3267      tcg_gen_concat_i64_i128(cmp, cpu_reserve_val2, cpu_reserve_val);
3268  
3269      /* Note that the low part is always in RS+1, even in LE mode.  */
3270      tcg_gen_concat_i64_i128(val, cpu_gpr[rs + 1], cpu_gpr[rs]);
3271  
3272      tcg_gen_atomic_cmpxchg_i128(val, cpu_reserve, cmp, val, ctx->mem_idx,
3273                                  DEF_MEMOP(MO_128 | MO_ALIGN));
3274  
3275      t0 = tcg_temp_new();
3276      t1 = tcg_temp_new();
3277      tcg_gen_extr_i128_i64(t1, t0, val);
3278  
3279      tcg_gen_xor_tl(t1, t1, cpu_reserve_val2);
3280      tcg_gen_xor_tl(t0, t0, cpu_reserve_val);
3281      tcg_gen_or_tl(t0, t0, t1);
3282  
3283      tcg_gen_setcondi_tl(TCG_COND_EQ, t0, t0, 0);
3284      tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT);
3285      tcg_gen_or_tl(cr0, cr0, t0);
3286  
3287      gen_set_label(lfail);
3288      tcg_gen_trunc_tl_i32(cpu_crf[0], cr0);
3289      tcg_gen_movi_tl(cpu_reserve, -1);
3290  }
3291  #endif /* defined(TARGET_PPC64) */
3292  
3293  /* wait */
gen_wait(DisasContext * ctx)3294  static void gen_wait(DisasContext *ctx)
3295  {
3296      uint32_t wc;
3297  
3298      if (ctx->insns_flags & PPC_WAIT) {
3299          /* v2.03-v2.07 define an older incompatible 'wait' encoding. */
3300  
3301          if (ctx->insns_flags2 & PPC2_PM_ISA206) {
3302              /* v2.06 introduced the WC field. WC > 0 may be treated as no-op. */
3303              wc = WC(ctx->opcode);
3304          } else {
3305              wc = 0;
3306          }
3307  
3308      } else if (ctx->insns_flags2 & PPC2_ISA300) {
3309          /* v3.0 defines a new 'wait' encoding. */
3310          wc = WC(ctx->opcode);
3311          if (ctx->insns_flags2 & PPC2_ISA310) {
3312              uint32_t pl = PL(ctx->opcode);
3313  
3314              /* WC 1,2 may be treated as no-op. WC 3 is reserved. */
3315              if (wc == 3) {
3316                  gen_invalid(ctx);
3317                  return;
3318              }
3319  
3320              /* PL 1-3 are reserved. If WC=2 then the insn is treated as noop. */
3321              if (pl > 0 && wc != 2) {
3322                  gen_invalid(ctx);
3323                  return;
3324              }
3325  
3326          } else { /* ISA300 */
3327              /* WC 1-3 are reserved */
3328              if (wc > 0) {
3329                  gen_invalid(ctx);
3330                  return;
3331              }
3332          }
3333  
3334      } else {
3335          warn_report("wait instruction decoded with wrong ISA flags.");
3336          gen_invalid(ctx);
3337          return;
3338      }
3339  
3340      /*
3341       * wait without WC field or with WC=0 waits for an exception / interrupt
3342       * to occur.
3343       */
3344      if (wc == 0) {
3345          TCGv_i32 t0 = tcg_constant_i32(1);
3346          tcg_gen_st_i32(t0, tcg_env,
3347                         -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted));
3348          /* Stop translation, as the CPU is supposed to sleep from now */
3349          gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3350      }
3351  
3352      /*
3353       * Other wait types must not just wait until an exception occurs because
3354       * ignoring their other wake-up conditions could cause a hang.
3355       *
3356       * For v2.06 and 2.07, wc=1,2,3 are architected but may be implemented as
3357       * no-ops.
3358       *
3359       * wc=1 and wc=3 explicitly allow the instruction to be treated as a no-op.
3360       *
3361       * wc=2 waits for an implementation-specific condition, such could be
3362       * always true, so it can be implemented as a no-op.
3363       *
3364       * For v3.1, wc=1,2 are architected but may be implemented as no-ops.
3365       *
3366       * wc=1 (waitrsv) waits for an exception or a reservation to be lost.
3367       * Reservation-loss may have implementation-specific conditions, so it
3368       * can be implemented as a no-op.
3369       *
3370       * wc=2 waits for an exception or an amount of time to pass. This
3371       * amount is implementation-specific so it can be implemented as a
3372       * no-op.
3373       *
3374       * ISA v3.1 allows for execution to resume "in the rare case of
3375       * an implementation-dependent event", so in any case software must
3376       * not depend on the architected resumption condition to become
3377       * true, so no-op implementations should be architecturally correct
3378       * (if suboptimal).
3379       */
3380  }
3381  
3382  #if defined(TARGET_PPC64)
gen_doze(DisasContext * ctx)3383  static void gen_doze(DisasContext *ctx)
3384  {
3385  #if defined(CONFIG_USER_ONLY)
3386      GEN_PRIV(ctx);
3387  #else
3388      TCGv_i32 t;
3389  
3390      CHK_HV(ctx);
3391      translator_io_start(&ctx->base);
3392      t = tcg_constant_i32(PPC_PM_DOZE);
3393      gen_helper_pminsn(tcg_env, t);
3394      /* Stop translation, as the CPU is supposed to sleep from now */
3395      gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3396  #endif /* defined(CONFIG_USER_ONLY) */
3397  }
3398  
gen_nap(DisasContext * ctx)3399  static void gen_nap(DisasContext *ctx)
3400  {
3401  #if defined(CONFIG_USER_ONLY)
3402      GEN_PRIV(ctx);
3403  #else
3404      TCGv_i32 t;
3405  
3406      CHK_HV(ctx);
3407      translator_io_start(&ctx->base);
3408      t = tcg_constant_i32(PPC_PM_NAP);
3409      gen_helper_pminsn(tcg_env, t);
3410      /* Stop translation, as the CPU is supposed to sleep from now */
3411      gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3412  #endif /* defined(CONFIG_USER_ONLY) */
3413  }
3414  
gen_stop(DisasContext * ctx)3415  static void gen_stop(DisasContext *ctx)
3416  {
3417  #if defined(CONFIG_USER_ONLY)
3418      GEN_PRIV(ctx);
3419  #else
3420      TCGv_i32 t;
3421  
3422      CHK_HV(ctx);
3423      translator_io_start(&ctx->base);
3424      t = tcg_constant_i32(PPC_PM_STOP);
3425      gen_helper_pminsn(tcg_env, t);
3426      /* Stop translation, as the CPU is supposed to sleep from now */
3427      gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3428  #endif /* defined(CONFIG_USER_ONLY) */
3429  }
3430  
gen_sleep(DisasContext * ctx)3431  static void gen_sleep(DisasContext *ctx)
3432  {
3433  #if defined(CONFIG_USER_ONLY)
3434      GEN_PRIV(ctx);
3435  #else
3436      TCGv_i32 t;
3437  
3438      CHK_HV(ctx);
3439      translator_io_start(&ctx->base);
3440      t = tcg_constant_i32(PPC_PM_SLEEP);
3441      gen_helper_pminsn(tcg_env, t);
3442      /* Stop translation, as the CPU is supposed to sleep from now */
3443      gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3444  #endif /* defined(CONFIG_USER_ONLY) */
3445  }
3446  
gen_rvwinkle(DisasContext * ctx)3447  static void gen_rvwinkle(DisasContext *ctx)
3448  {
3449  #if defined(CONFIG_USER_ONLY)
3450      GEN_PRIV(ctx);
3451  #else
3452      TCGv_i32 t;
3453  
3454      CHK_HV(ctx);
3455      translator_io_start(&ctx->base);
3456      t = tcg_constant_i32(PPC_PM_RVWINKLE);
3457      gen_helper_pminsn(tcg_env, t);
3458      /* Stop translation, as the CPU is supposed to sleep from now */
3459      gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next);
3460  #endif /* defined(CONFIG_USER_ONLY) */
3461  }
3462  
gen_write_bhrb(TCGv_ptr base,TCGv offset,TCGv mask,TCGv value)3463  static inline TCGv gen_write_bhrb(TCGv_ptr base, TCGv offset, TCGv mask, TCGv value)
3464  {
3465      TCGv_ptr tmp = tcg_temp_new_ptr();
3466  
3467      /* add base and offset to get address of bhrb entry */
3468      tcg_gen_add_ptr(tmp, base, (TCGv_ptr)offset);
3469  
3470      /* store value into bhrb at bhrb_offset */
3471      tcg_gen_st_i64(value, tmp, 0);
3472  
3473      /* add 8 to current bhrb_offset */
3474      tcg_gen_addi_tl(offset, offset, 8);
3475  
3476      /* apply offset mask */
3477      tcg_gen_and_tl(offset, offset, mask);
3478  
3479      return offset;
3480  }
3481  #endif /* #if defined(TARGET_PPC64) */
3482  
gen_update_branch_history(DisasContext * ctx,target_ulong nip,TCGv target,target_long inst_type)3483  static inline void gen_update_branch_history(DisasContext *ctx,
3484                                               target_ulong nip,
3485                                               TCGv target,
3486                                               target_long inst_type)
3487  {
3488  #if defined(TARGET_PPC64)
3489      TCGv_ptr base;
3490      TCGv tmp;
3491      TCGv offset;
3492      TCGv mask;
3493      TCGLabel *no_update;
3494  
3495      if (ctx->has_cfar) {
3496          tcg_gen_movi_tl(cpu_cfar, nip);
3497      }
3498  
3499      if (!ctx->has_bhrb ||
3500          !ctx->bhrb_enable ||
3501          inst_type == BHRB_TYPE_NORECORD) {
3502          return;
3503      }
3504  
3505      tmp = tcg_temp_new();
3506      no_update = gen_new_label();
3507  
3508      /* check for bhrb filtering */
3509      tcg_gen_ld_tl(tmp, tcg_env, offsetof(CPUPPCState, bhrb_filter));
3510      tcg_gen_andi_tl(tmp, tmp, inst_type);
3511      tcg_gen_brcondi_tl(TCG_COND_EQ, tmp, 0, no_update);
3512  
3513      base = tcg_temp_new_ptr();
3514      offset = tcg_temp_new();
3515      mask = tcg_temp_new();
3516  
3517      /* load bhrb base address */
3518      tcg_gen_ld_ptr(base, tcg_env, offsetof(CPUPPCState, bhrb_base));
3519  
3520      /* load current bhrb_offset */
3521      tcg_gen_ld_tl(offset, tcg_env, offsetof(CPUPPCState, bhrb_offset));
3522  
3523      /* load a BHRB offset mask */
3524      tcg_gen_ld_tl(mask, tcg_env, offsetof(CPUPPCState, bhrb_offset_mask));
3525  
3526      offset = gen_write_bhrb(base, offset, mask, tcg_constant_i64(nip));
3527  
3528      /* Also record the target address for XL-Form branches */
3529      if (inst_type & BHRB_TYPE_XL_FORM) {
3530  
3531          /* Set the 'T' bit for target entries */
3532          tcg_gen_ori_tl(tmp, target, 0x2);
3533  
3534          offset = gen_write_bhrb(base, offset, mask, tmp);
3535      }
3536  
3537      /* save updated bhrb_offset for next time */
3538      tcg_gen_st_tl(offset, tcg_env, offsetof(CPUPPCState, bhrb_offset));
3539  
3540      gen_set_label(no_update);
3541  #endif
3542  }
3543  
3544  #if defined(TARGET_PPC64)
pmu_count_insns(DisasContext * ctx)3545  static void pmu_count_insns(DisasContext *ctx)
3546  {
3547      /*
3548       * Do not bother calling the helper if the PMU isn't counting
3549       * instructions.
3550       */
3551      if (!ctx->pmu_insn_cnt) {
3552          return;
3553      }
3554  
3555   #if !defined(CONFIG_USER_ONLY)
3556      TCGLabel *l;
3557      TCGv t0;
3558  
3559      /*
3560       * The PMU insns_inc() helper stops the internal PMU timer if a
3561       * counter overflows happens. In that case, if the guest is
3562       * running with icount and we do not handle it beforehand,
3563       * the helper can trigger a 'bad icount read'.
3564       */
3565      translator_io_start(&ctx->base);
3566  
3567      /* Avoid helper calls when only PMC5-6 are enabled. */
3568      if (!ctx->pmc_other) {
3569          l = gen_new_label();
3570          t0 = tcg_temp_new();
3571  
3572          gen_load_spr(t0, SPR_POWER_PMC5);
3573          tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
3574          gen_store_spr(SPR_POWER_PMC5, t0);
3575          /* Check for overflow, if it's enabled */
3576          if (ctx->mmcr0_pmcjce) {
3577              tcg_gen_brcondi_tl(TCG_COND_LT, t0, PMC_COUNTER_NEGATIVE_VAL, l);
3578              gen_helper_handle_pmc5_overflow(tcg_env);
3579          }
3580  
3581          gen_set_label(l);
3582      } else {
3583          gen_helper_insns_inc(tcg_env, tcg_constant_i32(ctx->base.num_insns));
3584      }
3585    #else
3586      /*
3587       * User mode can read (but not write) PMC5 and start/stop
3588       * the PMU via MMCR0_FC. In this case just increment
3589       * PMC5 with base.num_insns.
3590       */
3591      TCGv t0 = tcg_temp_new();
3592  
3593      gen_load_spr(t0, SPR_POWER_PMC5);
3594      tcg_gen_addi_tl(t0, t0, ctx->base.num_insns);
3595      gen_store_spr(SPR_POWER_PMC5, t0);
3596    #endif /* #if !defined(CONFIG_USER_ONLY) */
3597  }
3598  #else
pmu_count_insns(DisasContext * ctx)3599  static void pmu_count_insns(DisasContext *ctx)
3600  {
3601      return;
3602  }
3603  #endif /* #if defined(TARGET_PPC64) */
3604  
use_goto_tb(DisasContext * ctx,target_ulong dest)3605  static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest)
3606  {
3607      if (unlikely(ctx->singlestep_enabled)) {
3608          return false;
3609      }
3610      return translator_use_goto_tb(&ctx->base, dest);
3611  }
3612  
gen_lookup_and_goto_ptr(DisasContext * ctx)3613  static void gen_lookup_and_goto_ptr(DisasContext *ctx)
3614  {
3615      if (unlikely(ctx->singlestep_enabled)) {
3616          gen_debug_exception(ctx, false);
3617      } else {
3618          /*
3619           * tcg_gen_lookup_and_goto_ptr will exit the TB if
3620           * CF_NO_GOTO_PTR is set. Count insns now.
3621           */
3622          if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
3623              pmu_count_insns(ctx);
3624          }
3625  
3626          tcg_gen_lookup_and_goto_ptr();
3627      }
3628  }
3629  
3630  /***                                Branch                                 ***/
gen_goto_tb(DisasContext * ctx,int n,target_ulong dest)3631  static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest)
3632  {
3633      if (NARROW_MODE(ctx)) {
3634          dest = (uint32_t) dest;
3635      }
3636      if (use_goto_tb(ctx, dest)) {
3637          pmu_count_insns(ctx);
3638          tcg_gen_goto_tb(n);
3639          tcg_gen_movi_tl(cpu_nip, dest & ~3);
3640          tcg_gen_exit_tb(ctx->base.tb, n);
3641      } else {
3642          tcg_gen_movi_tl(cpu_nip, dest & ~3);
3643          gen_lookup_and_goto_ptr(ctx);
3644      }
3645  }
3646  
gen_setlr(DisasContext * ctx,target_ulong nip)3647  static inline void gen_setlr(DisasContext *ctx, target_ulong nip)
3648  {
3649      if (NARROW_MODE(ctx)) {
3650          nip = (uint32_t)nip;
3651      }
3652      tcg_gen_movi_tl(cpu_lr, nip);
3653  }
3654  
3655  /* b ba bl bla */
gen_b(DisasContext * ctx)3656  static void gen_b(DisasContext *ctx)
3657  {
3658      target_ulong li, target;
3659  
3660      /* sign extend LI */
3661      li = LI(ctx->opcode);
3662      li = (li ^ 0x02000000) - 0x02000000;
3663      if (likely(AA(ctx->opcode) == 0)) {
3664          target = ctx->cia + li;
3665      } else {
3666          target = li;
3667      }
3668      if (LK(ctx->opcode)) {
3669          gen_setlr(ctx, ctx->base.pc_next);
3670          gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_CALL);
3671      } else {
3672          gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_OTHER);
3673      }
3674      gen_goto_tb(ctx, 0, target);
3675      ctx->base.is_jmp = DISAS_NORETURN;
3676  }
3677  
3678  #define BCOND_IM  0
3679  #define BCOND_LR  1
3680  #define BCOND_CTR 2
3681  #define BCOND_TAR 3
3682  
gen_bcond(DisasContext * ctx,int type)3683  static void gen_bcond(DisasContext *ctx, int type)
3684  {
3685      uint32_t bo = BO(ctx->opcode);
3686      TCGLabel *l1;
3687      TCGv target;
3688      target_long bhrb_type = BHRB_TYPE_OTHER;
3689  
3690      if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) {
3691          target = tcg_temp_new();
3692          if (type == BCOND_CTR) {
3693              tcg_gen_mov_tl(target, cpu_ctr);
3694          } else if (type == BCOND_TAR) {
3695              gen_load_spr(target, SPR_TAR);
3696          } else {
3697              tcg_gen_mov_tl(target, cpu_lr);
3698          }
3699          if (!LK(ctx->opcode)) {
3700              bhrb_type |= BHRB_TYPE_INDIRECT;
3701          }
3702          bhrb_type |= BHRB_TYPE_XL_FORM;
3703      } else {
3704          target = NULL;
3705      }
3706      if (LK(ctx->opcode)) {
3707          gen_setlr(ctx, ctx->base.pc_next);
3708          bhrb_type |= BHRB_TYPE_CALL;
3709      }
3710      l1 = gen_new_label();
3711      if ((bo & 0x4) == 0) {
3712          /* Decrement and test CTR */
3713          TCGv temp = tcg_temp_new();
3714  
3715          if (type == BCOND_CTR) {
3716              /*
3717               * All ISAs up to v3 describe this form of bcctr as invalid but
3718               * some processors, ie. 64-bit server processors compliant with
3719               * arch 2.x, do implement a "test and decrement" logic instead,
3720               * as described in their respective UMs. This logic involves CTR
3721               * to act as both the branch target and a counter, which makes
3722               * it basically useless and thus never used in real code.
3723               *
3724               * This form was hence chosen to trigger extra micro-architectural
3725               * side-effect on real HW needed for the Spectre v2 workaround.
3726               * It is up to guests that implement such workaround, ie. linux, to
3727               * use this form in a way it just triggers the side-effect without
3728               * doing anything else harmful.
3729               */
3730              if (unlikely(!is_book3s_arch2x(ctx))) {
3731                  gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3732                  return;
3733              }
3734  
3735              if (NARROW_MODE(ctx)) {
3736                  tcg_gen_ext32u_tl(temp, cpu_ctr);
3737              } else {
3738                  tcg_gen_mov_tl(temp, cpu_ctr);
3739              }
3740              if (bo & 0x2) {
3741                  tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3742              } else {
3743                  tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3744              }
3745              tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3746          } else {
3747              tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1);
3748              if (NARROW_MODE(ctx)) {
3749                  tcg_gen_ext32u_tl(temp, cpu_ctr);
3750              } else {
3751                  tcg_gen_mov_tl(temp, cpu_ctr);
3752              }
3753              if (bo & 0x2) {
3754                  tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1);
3755              } else {
3756                  tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1);
3757              }
3758          }
3759          bhrb_type |= BHRB_TYPE_COND;
3760      }
3761      if ((bo & 0x10) == 0) {
3762          /* Test CR */
3763          uint32_t bi = BI(ctx->opcode);
3764          uint32_t mask = 0x08 >> (bi & 0x03);
3765          TCGv_i32 temp = tcg_temp_new_i32();
3766  
3767          if (bo & 0x8) {
3768              tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3769              tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1);
3770          } else {
3771              tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask);
3772              tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1);
3773          }
3774          bhrb_type |= BHRB_TYPE_COND;
3775      }
3776  
3777      gen_update_branch_history(ctx, ctx->cia, target, bhrb_type);
3778  
3779      if (type == BCOND_IM) {
3780          target_ulong li = (target_long)((int16_t)(BD(ctx->opcode)));
3781          if (likely(AA(ctx->opcode) == 0)) {
3782              gen_goto_tb(ctx, 0, ctx->cia + li);
3783          } else {
3784              gen_goto_tb(ctx, 0, li);
3785          }
3786      } else {
3787          if (NARROW_MODE(ctx)) {
3788              tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3);
3789          } else {
3790              tcg_gen_andi_tl(cpu_nip, target, ~3);
3791          }
3792          gen_lookup_and_goto_ptr(ctx);
3793      }
3794      if ((bo & 0x14) != 0x14) {
3795          /* fallthrough case */
3796          gen_set_label(l1);
3797          gen_goto_tb(ctx, 1, ctx->base.pc_next);
3798      }
3799      ctx->base.is_jmp = DISAS_NORETURN;
3800  }
3801  
gen_bc(DisasContext * ctx)3802  static void gen_bc(DisasContext *ctx)
3803  {
3804      gen_bcond(ctx, BCOND_IM);
3805  }
3806  
gen_bcctr(DisasContext * ctx)3807  static void gen_bcctr(DisasContext *ctx)
3808  {
3809      gen_bcond(ctx, BCOND_CTR);
3810  }
3811  
gen_bclr(DisasContext * ctx)3812  static void gen_bclr(DisasContext *ctx)
3813  {
3814      gen_bcond(ctx, BCOND_LR);
3815  }
3816  
gen_bctar(DisasContext * ctx)3817  static void gen_bctar(DisasContext *ctx)
3818  {
3819      gen_bcond(ctx, BCOND_TAR);
3820  }
3821  
3822  /***                      Condition register logical                       ***/
3823  #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
3824  static void glue(gen_, name)(DisasContext *ctx)                               \
3825  {                                                                             \
3826      uint8_t bitmask;                                                          \
3827      int sh;                                                                   \
3828      TCGv_i32 t0, t1;                                                          \
3829      sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03);             \
3830      t0 = tcg_temp_new_i32();                                                  \
3831      if (sh > 0)                                                               \
3832          tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh);            \
3833      else if (sh < 0)                                                          \
3834          tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh);           \
3835      else                                                                      \
3836          tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]);                 \
3837      t1 = tcg_temp_new_i32();                                                  \
3838      sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03);             \
3839      if (sh > 0)                                                               \
3840          tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh);            \
3841      else if (sh < 0)                                                          \
3842          tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh);           \
3843      else                                                                      \
3844          tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]);                 \
3845      tcg_op(t0, t0, t1);                                                       \
3846      bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03);                             \
3847      tcg_gen_andi_i32(t0, t0, bitmask);                                        \
3848      tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask);          \
3849      tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1);                  \
3850  }
3851  
3852  /* crand */
3853  GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08);
3854  /* crandc */
3855  GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04);
3856  /* creqv */
3857  GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09);
3858  /* crnand */
3859  GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07);
3860  /* crnor */
3861  GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01);
3862  /* cror */
3863  GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E);
3864  /* crorc */
3865  GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D);
3866  /* crxor */
3867  GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06);
3868  
3869  /* mcrf */
gen_mcrf(DisasContext * ctx)3870  static void gen_mcrf(DisasContext *ctx)
3871  {
3872      tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]);
3873  }
3874  
3875  /***                           System linkage                              ***/
3876  
3877  /* rfi (supervisor only) */
gen_rfi(DisasContext * ctx)3878  static void gen_rfi(DisasContext *ctx)
3879  {
3880  #if defined(CONFIG_USER_ONLY)
3881      GEN_PRIV(ctx);
3882  #else
3883      /*
3884       * This instruction doesn't exist anymore on 64-bit server
3885       * processors compliant with arch 2.x
3886       */
3887      if (is_book3s_arch2x(ctx)) {
3888          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
3889          return;
3890      }
3891      /* Restore CPU state */
3892      CHK_SV(ctx);
3893      translator_io_start(&ctx->base);
3894      gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3895      gen_helper_rfi(tcg_env);
3896      ctx->base.is_jmp = DISAS_EXIT;
3897  #endif
3898  }
3899  
3900  #if defined(TARGET_PPC64)
gen_rfid(DisasContext * ctx)3901  static void gen_rfid(DisasContext *ctx)
3902  {
3903  #if defined(CONFIG_USER_ONLY)
3904      GEN_PRIV(ctx);
3905  #else
3906      /* Restore CPU state */
3907      CHK_SV(ctx);
3908      translator_io_start(&ctx->base);
3909      gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3910      gen_helper_rfid(tcg_env);
3911      ctx->base.is_jmp = DISAS_EXIT;
3912  #endif
3913  }
3914  
3915  #if !defined(CONFIG_USER_ONLY)
gen_rfscv(DisasContext * ctx)3916  static void gen_rfscv(DisasContext *ctx)
3917  {
3918  #if defined(CONFIG_USER_ONLY)
3919      GEN_PRIV(ctx);
3920  #else
3921      /* Restore CPU state */
3922      CHK_SV(ctx);
3923      translator_io_start(&ctx->base);
3924      gen_update_branch_history(ctx, ctx->cia, NULL, BHRB_TYPE_NORECORD);
3925      gen_helper_rfscv(tcg_env);
3926      ctx->base.is_jmp = DISAS_EXIT;
3927  #endif
3928  }
3929  #endif
3930  
gen_hrfid(DisasContext * ctx)3931  static void gen_hrfid(DisasContext *ctx)
3932  {
3933  #if defined(CONFIG_USER_ONLY)
3934      GEN_PRIV(ctx);
3935  #else
3936      /* Restore CPU state */
3937      CHK_HV(ctx);
3938      translator_io_start(&ctx->base);
3939      gen_helper_hrfid(tcg_env);
3940      ctx->base.is_jmp = DISAS_EXIT;
3941  #endif
3942  }
3943  #endif
3944  
3945  /* sc */
3946  #if defined(CONFIG_USER_ONLY)
3947  #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER
3948  #else
3949  #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL
3950  #endif
gen_sc(DisasContext * ctx)3951  static void gen_sc(DisasContext *ctx)
3952  {
3953      uint32_t lev;
3954  
3955      /*
3956       * LEV is a 7-bit field, but the top 6 bits are treated as a reserved
3957       * field (i.e., ignored). ISA v3.1 changes that to 5 bits, but that is
3958       * for Ultravisor which TCG does not support, so just ignore the top 6.
3959       */
3960      lev = (ctx->opcode >> 5) & 0x1;
3961      gen_exception_err(ctx, POWERPC_SYSCALL, lev);
3962  }
3963  
3964  #if defined(TARGET_PPC64)
3965  #if !defined(CONFIG_USER_ONLY)
gen_scv(DisasContext * ctx)3966  static void gen_scv(DisasContext *ctx)
3967  {
3968      uint32_t lev = (ctx->opcode >> 5) & 0x7F;
3969  
3970      /* Set the PC back to the faulting instruction. */
3971      gen_update_nip(ctx, ctx->cia);
3972      gen_helper_scv(tcg_env, tcg_constant_i32(lev));
3973  
3974      ctx->base.is_jmp = DISAS_NORETURN;
3975  }
3976  #endif
3977  #endif
3978  
3979  /***                                Trap                                   ***/
3980  
3981  /* Check for unconditional traps (always or never) */
check_unconditional_trap(DisasContext * ctx,int to)3982  static bool check_unconditional_trap(DisasContext *ctx, int to)
3983  {
3984      /* Trap never */
3985      if (to == 0) {
3986          return true;
3987      }
3988      /* Trap always */
3989      if (to == 31) {
3990          gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP);
3991          return true;
3992      }
3993      return false;
3994  }
3995  
3996  /***                          Processor control                            ***/
3997  
3998  /* mcrxr */
gen_mcrxr(DisasContext * ctx)3999  static void gen_mcrxr(DisasContext *ctx)
4000  {
4001      TCGv_i32 t0 = tcg_temp_new_i32();
4002      TCGv_i32 t1 = tcg_temp_new_i32();
4003      TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4004  
4005      tcg_gen_trunc_tl_i32(t0, cpu_so);
4006      tcg_gen_trunc_tl_i32(t1, cpu_ov);
4007      tcg_gen_trunc_tl_i32(dst, cpu_ca);
4008      tcg_gen_shli_i32(t0, t0, 3);
4009      tcg_gen_shli_i32(t1, t1, 2);
4010      tcg_gen_shli_i32(dst, dst, 1);
4011      tcg_gen_or_i32(dst, dst, t0);
4012      tcg_gen_or_i32(dst, dst, t1);
4013  
4014      tcg_gen_movi_tl(cpu_so, 0);
4015      tcg_gen_movi_tl(cpu_ov, 0);
4016      tcg_gen_movi_tl(cpu_ca, 0);
4017  }
4018  
4019  #ifdef TARGET_PPC64
4020  /* mcrxrx */
gen_mcrxrx(DisasContext * ctx)4021  static void gen_mcrxrx(DisasContext *ctx)
4022  {
4023      TCGv t0 = tcg_temp_new();
4024      TCGv t1 = tcg_temp_new();
4025      TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)];
4026  
4027      /* copy OV and OV32 */
4028      tcg_gen_shli_tl(t0, cpu_ov, 1);
4029      tcg_gen_or_tl(t0, t0, cpu_ov32);
4030      tcg_gen_shli_tl(t0, t0, 2);
4031      /* copy CA and CA32 */
4032      tcg_gen_shli_tl(t1, cpu_ca, 1);
4033      tcg_gen_or_tl(t1, t1, cpu_ca32);
4034      tcg_gen_or_tl(t0, t0, t1);
4035      tcg_gen_trunc_tl_i32(dst, t0);
4036  }
4037  #endif
4038  
4039  /* mfcr mfocrf */
gen_mfcr(DisasContext * ctx)4040  static void gen_mfcr(DisasContext *ctx)
4041  {
4042      uint32_t crm, crn;
4043  
4044      if (likely(ctx->opcode & 0x00100000)) {
4045          crm = CRM(ctx->opcode);
4046          if (likely(crm && ((crm & (crm - 1)) == 0))) {
4047              crn = ctz32(crm);
4048              tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]);
4049              tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)],
4050                              cpu_gpr[rD(ctx->opcode)], crn * 4);
4051          }
4052      } else {
4053          TCGv_i32 t0 = tcg_temp_new_i32();
4054          tcg_gen_mov_i32(t0, cpu_crf[0]);
4055          tcg_gen_shli_i32(t0, t0, 4);
4056          tcg_gen_or_i32(t0, t0, cpu_crf[1]);
4057          tcg_gen_shli_i32(t0, t0, 4);
4058          tcg_gen_or_i32(t0, t0, cpu_crf[2]);
4059          tcg_gen_shli_i32(t0, t0, 4);
4060          tcg_gen_or_i32(t0, t0, cpu_crf[3]);
4061          tcg_gen_shli_i32(t0, t0, 4);
4062          tcg_gen_or_i32(t0, t0, cpu_crf[4]);
4063          tcg_gen_shli_i32(t0, t0, 4);
4064          tcg_gen_or_i32(t0, t0, cpu_crf[5]);
4065          tcg_gen_shli_i32(t0, t0, 4);
4066          tcg_gen_or_i32(t0, t0, cpu_crf[6]);
4067          tcg_gen_shli_i32(t0, t0, 4);
4068          tcg_gen_or_i32(t0, t0, cpu_crf[7]);
4069          tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4070      }
4071  }
4072  
4073  /* mfmsr */
gen_mfmsr(DisasContext * ctx)4074  static void gen_mfmsr(DisasContext *ctx)
4075  {
4076      CHK_SV(ctx);
4077      tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr);
4078  }
4079  
4080  /* mfspr */
gen_op_mfspr(DisasContext * ctx)4081  static inline void gen_op_mfspr(DisasContext *ctx)
4082  {
4083      void (*read_cb)(DisasContext *ctx, int gprn, int sprn);
4084      uint32_t sprn = SPR(ctx->opcode);
4085  
4086  #if defined(CONFIG_USER_ONLY)
4087      read_cb = ctx->spr_cb[sprn].uea_read;
4088  #else
4089      if (ctx->pr) {
4090          read_cb = ctx->spr_cb[sprn].uea_read;
4091      } else if (ctx->hv) {
4092          read_cb = ctx->spr_cb[sprn].hea_read;
4093      } else {
4094          read_cb = ctx->spr_cb[sprn].oea_read;
4095      }
4096  #endif
4097      if (likely(read_cb != NULL)) {
4098          if (likely(read_cb != SPR_NOACCESS)) {
4099              (*read_cb)(ctx, rD(ctx->opcode), sprn);
4100          } else {
4101              /* Privilege exception */
4102              /*
4103               * This is a hack to avoid warnings when running Linux:
4104               * this OS breaks the PowerPC virtualisation model,
4105               * allowing userland application to read the PVR
4106               */
4107              if (sprn != SPR_PVR) {
4108                  qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr "
4109                                "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4110                                ctx->cia);
4111              }
4112              gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4113          }
4114      } else {
4115          /* ISA 2.07 defines these as no-ops */
4116          if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4117              (sprn >= 808 && sprn <= 811)) {
4118              /* This is a nop */
4119              return;
4120          }
4121          /* Not defined */
4122          qemu_log_mask(LOG_GUEST_ERROR,
4123                        "Trying to read invalid spr %d (0x%03x) at "
4124                        TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4125  
4126          /*
4127           * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4128           * generate a priv, a hv emu or a no-op
4129           */
4130          if (sprn & 0x10) {
4131              if (ctx->pr) {
4132                  gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4133              }
4134          } else {
4135              if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) {
4136                  gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4137              }
4138          }
4139      }
4140  }
4141  
gen_mfspr(DisasContext * ctx)4142  static void gen_mfspr(DisasContext *ctx)
4143  {
4144      gen_op_mfspr(ctx);
4145  }
4146  
4147  /* mftb */
gen_mftb(DisasContext * ctx)4148  static void gen_mftb(DisasContext *ctx)
4149  {
4150      gen_op_mfspr(ctx);
4151  }
4152  
4153  /* mtcrf mtocrf*/
gen_mtcrf(DisasContext * ctx)4154  static void gen_mtcrf(DisasContext *ctx)
4155  {
4156      uint32_t crm, crn;
4157  
4158      crm = CRM(ctx->opcode);
4159      if (likely((ctx->opcode & 0x00100000))) {
4160          if (crm && ((crm & (crm - 1)) == 0)) {
4161              TCGv_i32 temp = tcg_temp_new_i32();
4162              crn = ctz32(crm);
4163              tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4164              tcg_gen_shri_i32(temp, temp, crn * 4);
4165              tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf);
4166          }
4167      } else {
4168          TCGv_i32 temp = tcg_temp_new_i32();
4169          tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]);
4170          for (crn = 0 ; crn < 8 ; crn++) {
4171              if (crm & (1 << crn)) {
4172                      tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4);
4173                      tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf);
4174              }
4175          }
4176      }
4177  }
4178  
4179  /* mtmsr */
4180  #if defined(TARGET_PPC64)
gen_mtmsrd(DisasContext * ctx)4181  static void gen_mtmsrd(DisasContext *ctx)
4182  {
4183      if (unlikely(!is_book3s_arch2x(ctx))) {
4184          gen_invalid(ctx);
4185          return;
4186      }
4187  
4188      CHK_SV(ctx);
4189  
4190  #if !defined(CONFIG_USER_ONLY)
4191      TCGv t0, t1;
4192      target_ulong mask;
4193  
4194      t0 = tcg_temp_new();
4195      t1 = tcg_temp_new();
4196  
4197      translator_io_start(&ctx->base);
4198  
4199      if (ctx->opcode & 0x00010000) {
4200          /* L=1 form only updates EE and RI */
4201          mask = (1ULL << MSR_RI) | (1ULL << MSR_EE);
4202      } else {
4203          /* mtmsrd does not alter HV, S, ME, or LE */
4204          mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) |
4205                   (1ULL << MSR_HV));
4206          /*
4207           * XXX: we need to update nip before the store if we enter
4208           *      power saving mode, we will exit the loop directly from
4209           *      ppc_store_msr
4210           */
4211          gen_update_nip(ctx, ctx->base.pc_next);
4212      }
4213  
4214      tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4215      tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4216      tcg_gen_or_tl(t0, t0, t1);
4217  
4218      gen_helper_store_msr(tcg_env, t0);
4219  
4220      /* Must stop the translation as machine state (may have) changed */
4221      ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4222  #endif /* !defined(CONFIG_USER_ONLY) */
4223  }
4224  #endif /* defined(TARGET_PPC64) */
4225  
gen_mtmsr(DisasContext * ctx)4226  static void gen_mtmsr(DisasContext *ctx)
4227  {
4228      CHK_SV(ctx);
4229  
4230  #if !defined(CONFIG_USER_ONLY)
4231      TCGv t0, t1;
4232      target_ulong mask = 0xFFFFFFFF;
4233  
4234      t0 = tcg_temp_new();
4235      t1 = tcg_temp_new();
4236  
4237      translator_io_start(&ctx->base);
4238      if (ctx->opcode & 0x00010000) {
4239          /* L=1 form only updates EE and RI */
4240          mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE);
4241      } else {
4242          /* mtmsr does not alter S, ME, or LE */
4243          mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S));
4244  
4245          /*
4246           * XXX: we need to update nip before the store if we enter
4247           *      power saving mode, we will exit the loop directly from
4248           *      ppc_store_msr
4249           */
4250          gen_update_nip(ctx, ctx->base.pc_next);
4251      }
4252  
4253      tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask);
4254      tcg_gen_andi_tl(t1, cpu_msr, ~mask);
4255      tcg_gen_or_tl(t0, t0, t1);
4256  
4257      gen_helper_store_msr(tcg_env, t0);
4258  
4259      /* Must stop the translation as machine state (may have) changed */
4260      ctx->base.is_jmp = DISAS_EXIT_UPDATE;
4261  #endif
4262  }
4263  
4264  /* mtspr */
gen_mtspr(DisasContext * ctx)4265  static void gen_mtspr(DisasContext *ctx)
4266  {
4267      void (*write_cb)(DisasContext *ctx, int sprn, int gprn);
4268      uint32_t sprn = SPR(ctx->opcode);
4269  
4270  #if defined(CONFIG_USER_ONLY)
4271      write_cb = ctx->spr_cb[sprn].uea_write;
4272  #else
4273      if (ctx->pr) {
4274          write_cb = ctx->spr_cb[sprn].uea_write;
4275      } else if (ctx->hv) {
4276          write_cb = ctx->spr_cb[sprn].hea_write;
4277      } else {
4278          write_cb = ctx->spr_cb[sprn].oea_write;
4279      }
4280  #endif
4281      if (likely(write_cb != NULL)) {
4282          if (likely(write_cb != SPR_NOACCESS)) {
4283              (*write_cb)(ctx, sprn, rS(ctx->opcode));
4284          } else {
4285              /* Privilege exception */
4286              qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr "
4287                            "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn,
4288                            ctx->cia);
4289              gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4290          }
4291      } else {
4292          /* ISA 2.07 defines these as no-ops */
4293          if ((ctx->insns_flags2 & PPC2_ISA207S) &&
4294              (sprn >= 808 && sprn <= 811)) {
4295              /* This is a nop */
4296              return;
4297          }
4298  
4299          /* Not defined */
4300          qemu_log_mask(LOG_GUEST_ERROR,
4301                        "Trying to write invalid spr %d (0x%03x) at "
4302                        TARGET_FMT_lx "\n", sprn, sprn, ctx->cia);
4303  
4304  
4305          /*
4306           * The behaviour depends on MSR:PR and SPR# bit 0x10, it can
4307           * generate a priv, a hv emu or a no-op
4308           */
4309          if (sprn & 0x10) {
4310              if (ctx->pr) {
4311                  gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4312              }
4313          } else {
4314              if (ctx->pr || sprn == 0) {
4315                  gen_hvpriv_exception(ctx, POWERPC_EXCP_PRIV_REG);
4316              }
4317          }
4318      }
4319  }
4320  
4321  #if defined(TARGET_PPC64)
4322  /* setb */
gen_setb(DisasContext * ctx)4323  static void gen_setb(DisasContext *ctx)
4324  {
4325      TCGv_i32 t0 = tcg_temp_new_i32();
4326      TCGv_i32 t8 = tcg_constant_i32(8);
4327      TCGv_i32 tm1 = tcg_constant_i32(-1);
4328      int crf = crfS(ctx->opcode);
4329  
4330      tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4);
4331      tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0);
4332      tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0);
4333  }
4334  #endif
4335  
4336  /***                         Cache management                              ***/
4337  
4338  /* dcbf */
gen_dcbf(DisasContext * ctx)4339  static void gen_dcbf(DisasContext *ctx)
4340  {
4341      /* XXX: specification says this is treated as a load by the MMU */
4342      TCGv t0;
4343      gen_set_access_type(ctx, ACCESS_CACHE);
4344      t0 = tcg_temp_new();
4345      gen_addr_reg_index(ctx, t0);
4346      gen_qemu_ld8u(ctx, t0, t0);
4347  }
4348  
4349  /* dcbfep (external PID dcbf) */
gen_dcbfep(DisasContext * ctx)4350  static void gen_dcbfep(DisasContext *ctx)
4351  {
4352      /* XXX: specification says this is treated as a load by the MMU */
4353      TCGv t0;
4354      CHK_SV(ctx);
4355      gen_set_access_type(ctx, ACCESS_CACHE);
4356      t0 = tcg_temp_new();
4357      gen_addr_reg_index(ctx, t0);
4358      tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4359  }
4360  
4361  /* dcbi (Supervisor only) */
gen_dcbi(DisasContext * ctx)4362  static void gen_dcbi(DisasContext *ctx)
4363  {
4364  #if defined(CONFIG_USER_ONLY)
4365      GEN_PRIV(ctx);
4366  #else
4367      TCGv EA, val;
4368  
4369      CHK_SV(ctx);
4370      EA = tcg_temp_new();
4371      gen_set_access_type(ctx, ACCESS_CACHE);
4372      gen_addr_reg_index(ctx, EA);
4373      val = tcg_temp_new();
4374      /* XXX: specification says this should be treated as a store by the MMU */
4375      gen_qemu_ld8u(ctx, val, EA);
4376      gen_qemu_st8(ctx, val, EA);
4377  #endif /* defined(CONFIG_USER_ONLY) */
4378  }
4379  
4380  /* dcdst */
gen_dcbst(DisasContext * ctx)4381  static void gen_dcbst(DisasContext *ctx)
4382  {
4383      /* XXX: specification say this is treated as a load by the MMU */
4384      TCGv t0;
4385      gen_set_access_type(ctx, ACCESS_CACHE);
4386      t0 = tcg_temp_new();
4387      gen_addr_reg_index(ctx, t0);
4388      gen_qemu_ld8u(ctx, t0, t0);
4389  }
4390  
4391  /* dcbstep (dcbstep External PID version) */
gen_dcbstep(DisasContext * ctx)4392  static void gen_dcbstep(DisasContext *ctx)
4393  {
4394      /* XXX: specification say this is treated as a load by the MMU */
4395      TCGv t0;
4396      gen_set_access_type(ctx, ACCESS_CACHE);
4397      t0 = tcg_temp_new();
4398      gen_addr_reg_index(ctx, t0);
4399      tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB));
4400  }
4401  
4402  /* dcbt */
gen_dcbt(DisasContext * ctx)4403  static void gen_dcbt(DisasContext *ctx)
4404  {
4405      /*
4406       * interpreted as no-op
4407       * XXX: specification say this is treated as a load by the MMU but
4408       *      does not generate any exception
4409       */
4410  }
4411  
4412  /* dcbtep */
gen_dcbtep(DisasContext * ctx)4413  static void gen_dcbtep(DisasContext *ctx)
4414  {
4415      /*
4416       * interpreted as no-op
4417       * XXX: specification say this is treated as a load by the MMU but
4418       *      does not generate any exception
4419       */
4420  }
4421  
4422  /* dcbtst */
gen_dcbtst(DisasContext * ctx)4423  static void gen_dcbtst(DisasContext *ctx)
4424  {
4425      /*
4426       * interpreted as no-op
4427       * XXX: specification say this is treated as a load by the MMU but
4428       *      does not generate any exception
4429       */
4430  }
4431  
4432  /* dcbtstep */
gen_dcbtstep(DisasContext * ctx)4433  static void gen_dcbtstep(DisasContext *ctx)
4434  {
4435      /*
4436       * interpreted as no-op
4437       * XXX: specification say this is treated as a load by the MMU but
4438       *      does not generate any exception
4439       */
4440  }
4441  
4442  /* dcbtls */
gen_dcbtls(DisasContext * ctx)4443  static void gen_dcbtls(DisasContext *ctx)
4444  {
4445      /* Always fails locking the cache */
4446      TCGv t0 = tcg_temp_new();
4447      gen_load_spr(t0, SPR_Exxx_L1CSR0);
4448      tcg_gen_ori_tl(t0, t0, L1CSR0_CUL);
4449      gen_store_spr(SPR_Exxx_L1CSR0, t0);
4450  }
4451  
4452  /* dcblc */
gen_dcblc(DisasContext * ctx)4453  static void gen_dcblc(DisasContext *ctx)
4454  {
4455      /*
4456       * interpreted as no-op
4457       */
4458  }
4459  
4460  /* dcbz */
gen_dcbz(DisasContext * ctx)4461  static void gen_dcbz(DisasContext *ctx)
4462  {
4463      TCGv tcgv_addr = tcg_temp_new();
4464  
4465      gen_set_access_type(ctx, ACCESS_CACHE);
4466      gen_addr_reg_index(ctx, tcgv_addr);
4467  
4468  #ifdef TARGET_PPC64
4469      if (ctx->excp_model == POWERPC_EXCP_970 && !(ctx->opcode & 0x00200000)) {
4470          gen_helper_dcbzl(tcg_env, tcgv_addr);
4471          return;
4472      }
4473  #endif
4474  
4475      gen_helper_dcbz(tcg_env, tcgv_addr, tcg_constant_i32(ctx->mem_idx));
4476  }
4477  
4478  /* dcbzep */
gen_dcbzep(DisasContext * ctx)4479  static void gen_dcbzep(DisasContext *ctx)
4480  {
4481      TCGv tcgv_addr = tcg_temp_new();
4482  
4483      gen_set_access_type(ctx, ACCESS_CACHE);
4484      gen_addr_reg_index(ctx, tcgv_addr);
4485      gen_helper_dcbz(tcg_env, tcgv_addr, tcg_constant_i32(PPC_TLB_EPID_STORE));
4486  }
4487  
4488  /* dst / dstt */
gen_dst(DisasContext * ctx)4489  static void gen_dst(DisasContext *ctx)
4490  {
4491      if (rA(ctx->opcode) == 0) {
4492          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4493      } else {
4494          /* interpreted as no-op */
4495      }
4496  }
4497  
4498  /* dstst /dststt */
gen_dstst(DisasContext * ctx)4499  static void gen_dstst(DisasContext *ctx)
4500  {
4501      if (rA(ctx->opcode) == 0) {
4502          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4503      } else {
4504          /* interpreted as no-op */
4505      }
4506  
4507  }
4508  
4509  /* dss / dssall */
gen_dss(DisasContext * ctx)4510  static void gen_dss(DisasContext *ctx)
4511  {
4512      /* interpreted as no-op */
4513  }
4514  
4515  /* icbi */
gen_icbi(DisasContext * ctx)4516  static void gen_icbi(DisasContext *ctx)
4517  {
4518      TCGv t0;
4519      gen_set_access_type(ctx, ACCESS_CACHE);
4520      t0 = tcg_temp_new();
4521      gen_addr_reg_index(ctx, t0);
4522      gen_helper_icbi(tcg_env, t0);
4523  }
4524  
4525  /* icbiep */
gen_icbiep(DisasContext * ctx)4526  static void gen_icbiep(DisasContext *ctx)
4527  {
4528      TCGv t0;
4529      gen_set_access_type(ctx, ACCESS_CACHE);
4530      t0 = tcg_temp_new();
4531      gen_addr_reg_index(ctx, t0);
4532      gen_helper_icbiep(tcg_env, t0);
4533  }
4534  
4535  /* Optional: */
4536  /* dcba */
gen_dcba(DisasContext * ctx)4537  static void gen_dcba(DisasContext *ctx)
4538  {
4539      /*
4540       * interpreted as no-op
4541       * XXX: specification say this is treated as a store by the MMU
4542       *      but does not generate any exception
4543       */
4544  }
4545  
4546  /***                    Segment register manipulation                      ***/
4547  /* Supervisor only: */
4548  
4549  /* mfsr */
gen_mfsr(DisasContext * ctx)4550  static void gen_mfsr(DisasContext *ctx)
4551  {
4552  #if defined(CONFIG_USER_ONLY)
4553      GEN_PRIV(ctx);
4554  #else
4555      TCGv t0;
4556  
4557      CHK_SV(ctx);
4558      t0 = tcg_constant_tl(SR(ctx->opcode));
4559      gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4560  #endif /* defined(CONFIG_USER_ONLY) */
4561  }
4562  
4563  /* mfsrin */
gen_mfsrin(DisasContext * ctx)4564  static void gen_mfsrin(DisasContext *ctx)
4565  {
4566  #if defined(CONFIG_USER_ONLY)
4567      GEN_PRIV(ctx);
4568  #else
4569      TCGv t0;
4570  
4571      CHK_SV(ctx);
4572      t0 = tcg_temp_new();
4573      tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4574      gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4575  #endif /* defined(CONFIG_USER_ONLY) */
4576  }
4577  
4578  /* mtsr */
gen_mtsr(DisasContext * ctx)4579  static void gen_mtsr(DisasContext *ctx)
4580  {
4581  #if defined(CONFIG_USER_ONLY)
4582      GEN_PRIV(ctx);
4583  #else
4584      TCGv t0;
4585  
4586      CHK_SV(ctx);
4587      t0 = tcg_constant_tl(SR(ctx->opcode));
4588      gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4589  #endif /* defined(CONFIG_USER_ONLY) */
4590  }
4591  
4592  /* mtsrin */
gen_mtsrin(DisasContext * ctx)4593  static void gen_mtsrin(DisasContext *ctx)
4594  {
4595  #if defined(CONFIG_USER_ONLY)
4596      GEN_PRIV(ctx);
4597  #else
4598      TCGv t0;
4599      CHK_SV(ctx);
4600  
4601      t0 = tcg_temp_new();
4602      tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4603      gen_helper_store_sr(tcg_env, t0, cpu_gpr[rD(ctx->opcode)]);
4604  #endif /* defined(CONFIG_USER_ONLY) */
4605  }
4606  
4607  #if defined(TARGET_PPC64)
4608  /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */
4609  
4610  /* mfsr */
gen_mfsr_64b(DisasContext * ctx)4611  static void gen_mfsr_64b(DisasContext *ctx)
4612  {
4613  #if defined(CONFIG_USER_ONLY)
4614      GEN_PRIV(ctx);
4615  #else
4616      TCGv t0;
4617  
4618      CHK_SV(ctx);
4619      t0 = tcg_constant_tl(SR(ctx->opcode));
4620      gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4621  #endif /* defined(CONFIG_USER_ONLY) */
4622  }
4623  
4624  /* mfsrin */
gen_mfsrin_64b(DisasContext * ctx)4625  static void gen_mfsrin_64b(DisasContext *ctx)
4626  {
4627  #if defined(CONFIG_USER_ONLY)
4628      GEN_PRIV(ctx);
4629  #else
4630      TCGv t0;
4631  
4632      CHK_SV(ctx);
4633      t0 = tcg_temp_new();
4634      tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4635      gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
4636  #endif /* defined(CONFIG_USER_ONLY) */
4637  }
4638  
4639  /* mtsr */
gen_mtsr_64b(DisasContext * ctx)4640  static void gen_mtsr_64b(DisasContext *ctx)
4641  {
4642  #if defined(CONFIG_USER_ONLY)
4643      GEN_PRIV(ctx);
4644  #else
4645      TCGv t0;
4646  
4647      CHK_SV(ctx);
4648      t0 = tcg_constant_tl(SR(ctx->opcode));
4649      gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4650  #endif /* defined(CONFIG_USER_ONLY) */
4651  }
4652  
4653  /* mtsrin */
gen_mtsrin_64b(DisasContext * ctx)4654  static void gen_mtsrin_64b(DisasContext *ctx)
4655  {
4656  #if defined(CONFIG_USER_ONLY)
4657      GEN_PRIV(ctx);
4658  #else
4659      TCGv t0;
4660  
4661      CHK_SV(ctx);
4662      t0 = tcg_temp_new();
4663      tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4);
4664      gen_helper_store_sr(tcg_env, t0, cpu_gpr[rS(ctx->opcode)]);
4665  #endif /* defined(CONFIG_USER_ONLY) */
4666  }
4667  
4668  #endif /* defined(TARGET_PPC64) */
4669  
4670  /***                      Lookaside buffer management                      ***/
4671  /* Optional & supervisor only: */
4672  
4673  /* tlbia */
gen_tlbia(DisasContext * ctx)4674  static void gen_tlbia(DisasContext *ctx)
4675  {
4676  #if defined(CONFIG_USER_ONLY)
4677      GEN_PRIV(ctx);
4678  #else
4679      CHK_HV(ctx);
4680  
4681      gen_helper_tlbia(tcg_env);
4682  #endif  /* defined(CONFIG_USER_ONLY) */
4683  }
4684  
4685  /* tlbsync */
gen_tlbsync(DisasContext * ctx)4686  static void gen_tlbsync(DisasContext *ctx)
4687  {
4688  #if defined(CONFIG_USER_ONLY)
4689      GEN_PRIV(ctx);
4690  #else
4691  
4692      if (ctx->gtse) {
4693          CHK_SV(ctx); /* If gtse is set then tlbsync is supervisor privileged */
4694      } else {
4695          CHK_HV(ctx); /* Else hypervisor privileged */
4696      }
4697  
4698      /* BookS does both ptesync and tlbsync make tlbsync a nop for server */
4699      if (ctx->insns_flags & PPC_BOOKE) {
4700          gen_check_tlb_flush(ctx, true);
4701      }
4702  #endif /* defined(CONFIG_USER_ONLY) */
4703  }
4704  
4705  /***                              External control                         ***/
4706  /* Optional: */
4707  
4708  /* eciwx */
gen_eciwx(DisasContext * ctx)4709  static void gen_eciwx(DisasContext *ctx)
4710  {
4711      TCGv t0;
4712      /* Should check EAR[E] ! */
4713      gen_set_access_type(ctx, ACCESS_EXT);
4714      t0 = tcg_temp_new();
4715      gen_addr_reg_index(ctx, t0);
4716      tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
4717                         DEF_MEMOP(MO_UL | MO_ALIGN));
4718  }
4719  
4720  /* ecowx */
gen_ecowx(DisasContext * ctx)4721  static void gen_ecowx(DisasContext *ctx)
4722  {
4723      TCGv t0;
4724      /* Should check EAR[E] ! */
4725      gen_set_access_type(ctx, ACCESS_EXT);
4726      t0 = tcg_temp_new();
4727      gen_addr_reg_index(ctx, t0);
4728      tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx,
4729                         DEF_MEMOP(MO_UL | MO_ALIGN));
4730  }
4731  
4732  /* 602 - 603 - G2 TLB management */
4733  
4734  /* tlbld */
gen_tlbld_6xx(DisasContext * ctx)4735  static void gen_tlbld_6xx(DisasContext *ctx)
4736  {
4737  #if defined(CONFIG_USER_ONLY)
4738      GEN_PRIV(ctx);
4739  #else
4740      CHK_SV(ctx);
4741      gen_helper_6xx_tlbd(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4742  #endif /* defined(CONFIG_USER_ONLY) */
4743  }
4744  
4745  /* tlbli */
gen_tlbli_6xx(DisasContext * ctx)4746  static void gen_tlbli_6xx(DisasContext *ctx)
4747  {
4748  #if defined(CONFIG_USER_ONLY)
4749      GEN_PRIV(ctx);
4750  #else
4751      CHK_SV(ctx);
4752      gen_helper_6xx_tlbi(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4753  #endif /* defined(CONFIG_USER_ONLY) */
4754  }
4755  
4756  /* BookE specific instructions */
4757  
4758  /* XXX: not implemented on 440 ? */
gen_mfapidi(DisasContext * ctx)4759  static void gen_mfapidi(DisasContext *ctx)
4760  {
4761      /* XXX: TODO */
4762      gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
4763  }
4764  
4765  /* XXX: not implemented on 440 ? */
gen_tlbiva(DisasContext * ctx)4766  static void gen_tlbiva(DisasContext *ctx)
4767  {
4768  #if defined(CONFIG_USER_ONLY)
4769      GEN_PRIV(ctx);
4770  #else
4771      TCGv t0;
4772  
4773      CHK_SV(ctx);
4774      t0 = tcg_temp_new();
4775      gen_addr_reg_index(ctx, t0);
4776      gen_helper_tlbiva(tcg_env, cpu_gpr[rB(ctx->opcode)]);
4777  #endif /* defined(CONFIG_USER_ONLY) */
4778  }
4779  
4780  /* All 405 MAC instructions are translated here */
gen_405_mulladd_insn(DisasContext * ctx,int opc2,int opc3,int ra,int rb,int rt,int Rc)4781  static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3,
4782                                          int ra, int rb, int rt, int Rc)
4783  {
4784      TCGv t0, t1;
4785  
4786      t0 = tcg_temp_new();
4787      t1 = tcg_temp_new();
4788  
4789      switch (opc3 & 0x0D) {
4790      case 0x05:
4791          /* macchw    - macchw.    - macchwo   - macchwo.   */
4792          /* macchws   - macchws.   - macchwso  - macchwso.  */
4793          /* nmacchw   - nmacchw.   - nmacchwo  - nmacchwo.  */
4794          /* nmacchws  - nmacchws.  - nmacchwso - nmacchwso. */
4795          /* mulchw - mulchw. */
4796          tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
4797          tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
4798          tcg_gen_ext16s_tl(t1, t1);
4799          break;
4800      case 0x04:
4801          /* macchwu   - macchwu.   - macchwuo  - macchwuo.  */
4802          /* macchwsu  - macchwsu.  - macchwsuo - macchwsuo. */
4803          /* mulchwu - mulchwu. */
4804          tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
4805          tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
4806          tcg_gen_ext16u_tl(t1, t1);
4807          break;
4808      case 0x01:
4809          /* machhw    - machhw.    - machhwo   - machhwo.   */
4810          /* machhws   - machhws.   - machhwso  - machhwso.  */
4811          /* nmachhw   - nmachhw.   - nmachhwo  - nmachhwo.  */
4812          /* nmachhws  - nmachhws.  - nmachhwso - nmachhwso. */
4813          /* mulhhw - mulhhw. */
4814          tcg_gen_sari_tl(t0, cpu_gpr[ra], 16);
4815          tcg_gen_ext16s_tl(t0, t0);
4816          tcg_gen_sari_tl(t1, cpu_gpr[rb], 16);
4817          tcg_gen_ext16s_tl(t1, t1);
4818          break;
4819      case 0x00:
4820          /* machhwu   - machhwu.   - machhwuo  - machhwuo.  */
4821          /* machhwsu  - machhwsu.  - machhwsuo - machhwsuo. */
4822          /* mulhhwu - mulhhwu. */
4823          tcg_gen_shri_tl(t0, cpu_gpr[ra], 16);
4824          tcg_gen_ext16u_tl(t0, t0);
4825          tcg_gen_shri_tl(t1, cpu_gpr[rb], 16);
4826          tcg_gen_ext16u_tl(t1, t1);
4827          break;
4828      case 0x0D:
4829          /* maclhw    - maclhw.    - maclhwo   - maclhwo.   */
4830          /* maclhws   - maclhws.   - maclhwso  - maclhwso.  */
4831          /* nmaclhw   - nmaclhw.   - nmaclhwo  - nmaclhwo.  */
4832          /* nmaclhws  - nmaclhws.  - nmaclhwso - nmaclhwso. */
4833          /* mullhw - mullhw. */
4834          tcg_gen_ext16s_tl(t0, cpu_gpr[ra]);
4835          tcg_gen_ext16s_tl(t1, cpu_gpr[rb]);
4836          break;
4837      case 0x0C:
4838          /* maclhwu   - maclhwu.   - maclhwuo  - maclhwuo.  */
4839          /* maclhwsu  - maclhwsu.  - maclhwsuo - maclhwsuo. */
4840          /* mullhwu - mullhwu. */
4841          tcg_gen_ext16u_tl(t0, cpu_gpr[ra]);
4842          tcg_gen_ext16u_tl(t1, cpu_gpr[rb]);
4843          break;
4844      }
4845      if (opc2 & 0x04) {
4846          /* (n)multiply-and-accumulate (0x0C / 0x0E) */
4847          tcg_gen_mul_tl(t1, t0, t1);
4848          if (opc2 & 0x02) {
4849              /* nmultiply-and-accumulate (0x0E) */
4850              tcg_gen_sub_tl(t0, cpu_gpr[rt], t1);
4851          } else {
4852              /* multiply-and-accumulate (0x0C) */
4853              tcg_gen_add_tl(t0, cpu_gpr[rt], t1);
4854          }
4855  
4856          if (opc3 & 0x12) {
4857              /* Check overflow and/or saturate */
4858              TCGLabel *l1 = gen_new_label();
4859  
4860              if (opc3 & 0x10) {
4861                  /* Start with XER OV disabled, the most likely case */
4862                  tcg_gen_movi_tl(cpu_ov, 0);
4863              }
4864              if (opc3 & 0x01) {
4865                  /* Signed */
4866                  tcg_gen_xor_tl(t1, cpu_gpr[rt], t1);
4867                  tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1);
4868                  tcg_gen_xor_tl(t1, cpu_gpr[rt], t0);
4869                  tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1);
4870                  if (opc3 & 0x02) {
4871                      /* Saturate */
4872                      tcg_gen_sari_tl(t0, cpu_gpr[rt], 31);
4873                      tcg_gen_xori_tl(t0, t0, 0x7fffffff);
4874                  }
4875              } else {
4876                  /* Unsigned */
4877                  tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1);
4878                  if (opc3 & 0x02) {
4879                      /* Saturate */
4880                      tcg_gen_movi_tl(t0, UINT32_MAX);
4881                  }
4882              }
4883              if (opc3 & 0x10) {
4884                  /* Check overflow */
4885                  tcg_gen_movi_tl(cpu_ov, 1);
4886                  tcg_gen_movi_tl(cpu_so, 1);
4887              }
4888              gen_set_label(l1);
4889              tcg_gen_mov_tl(cpu_gpr[rt], t0);
4890          }
4891      } else {
4892          tcg_gen_mul_tl(cpu_gpr[rt], t0, t1);
4893      }
4894      if (unlikely(Rc) != 0) {
4895          /* Update Rc0 */
4896          gen_set_Rc0(ctx, cpu_gpr[rt]);
4897      }
4898  }
4899  
4900  #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
4901  static void glue(gen_, name)(DisasContext *ctx)                               \
4902  {                                                                             \
4903      gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode),   \
4904                           rD(ctx->opcode), Rc(ctx->opcode));                   \
4905  }
4906  
4907  /* macchw    - macchw.    */
4908  GEN_MAC_HANDLER(macchw, 0x0C, 0x05);
4909  /* macchwo   - macchwo.   */
4910  GEN_MAC_HANDLER(macchwo, 0x0C, 0x15);
4911  /* macchws   - macchws.   */
4912  GEN_MAC_HANDLER(macchws, 0x0C, 0x07);
4913  /* macchwso  - macchwso.  */
4914  GEN_MAC_HANDLER(macchwso, 0x0C, 0x17);
4915  /* macchwsu  - macchwsu.  */
4916  GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06);
4917  /* macchwsuo - macchwsuo. */
4918  GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16);
4919  /* macchwu   - macchwu.   */
4920  GEN_MAC_HANDLER(macchwu, 0x0C, 0x04);
4921  /* macchwuo  - macchwuo.  */
4922  GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14);
4923  /* machhw    - machhw.    */
4924  GEN_MAC_HANDLER(machhw, 0x0C, 0x01);
4925  /* machhwo   - machhwo.   */
4926  GEN_MAC_HANDLER(machhwo, 0x0C, 0x11);
4927  /* machhws   - machhws.   */
4928  GEN_MAC_HANDLER(machhws, 0x0C, 0x03);
4929  /* machhwso  - machhwso.  */
4930  GEN_MAC_HANDLER(machhwso, 0x0C, 0x13);
4931  /* machhwsu  - machhwsu.  */
4932  GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02);
4933  /* machhwsuo - machhwsuo. */
4934  GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12);
4935  /* machhwu   - machhwu.   */
4936  GEN_MAC_HANDLER(machhwu, 0x0C, 0x00);
4937  /* machhwuo  - machhwuo.  */
4938  GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10);
4939  /* maclhw    - maclhw.    */
4940  GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D);
4941  /* maclhwo   - maclhwo.   */
4942  GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D);
4943  /* maclhws   - maclhws.   */
4944  GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F);
4945  /* maclhwso  - maclhwso.  */
4946  GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F);
4947  /* maclhwu   - maclhwu.   */
4948  GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C);
4949  /* maclhwuo  - maclhwuo.  */
4950  GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C);
4951  /* maclhwsu  - maclhwsu.  */
4952  GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E);
4953  /* maclhwsuo - maclhwsuo. */
4954  GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E);
4955  /* nmacchw   - nmacchw.   */
4956  GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05);
4957  /* nmacchwo  - nmacchwo.  */
4958  GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15);
4959  /* nmacchws  - nmacchws.  */
4960  GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07);
4961  /* nmacchwso - nmacchwso. */
4962  GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17);
4963  /* nmachhw   - nmachhw.   */
4964  GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01);
4965  /* nmachhwo  - nmachhwo.  */
4966  GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11);
4967  /* nmachhws  - nmachhws.  */
4968  GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03);
4969  /* nmachhwso - nmachhwso. */
4970  GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13);
4971  /* nmaclhw   - nmaclhw.   */
4972  GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D);
4973  /* nmaclhwo  - nmaclhwo.  */
4974  GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D);
4975  /* nmaclhws  - nmaclhws.  */
4976  GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F);
4977  /* nmaclhwso - nmaclhwso. */
4978  GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F);
4979  
4980  /* mulchw  - mulchw.  */
4981  GEN_MAC_HANDLER(mulchw, 0x08, 0x05);
4982  /* mulchwu - mulchwu. */
4983  GEN_MAC_HANDLER(mulchwu, 0x08, 0x04);
4984  /* mulhhw  - mulhhw.  */
4985  GEN_MAC_HANDLER(mulhhw, 0x08, 0x01);
4986  /* mulhhwu - mulhhwu. */
4987  GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00);
4988  /* mullhw  - mullhw.  */
4989  GEN_MAC_HANDLER(mullhw, 0x08, 0x0D);
4990  /* mullhwu - mullhwu. */
4991  GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C);
4992  
4993  /* mfdcr */
gen_mfdcr(DisasContext * ctx)4994  static void gen_mfdcr(DisasContext *ctx)
4995  {
4996  #if defined(CONFIG_USER_ONLY)
4997      GEN_PRIV(ctx);
4998  #else
4999      TCGv dcrn;
5000  
5001      CHK_SV(ctx);
5002      dcrn = tcg_constant_tl(SPR(ctx->opcode));
5003      gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], tcg_env, dcrn);
5004  #endif /* defined(CONFIG_USER_ONLY) */
5005  }
5006  
5007  /* mtdcr */
gen_mtdcr(DisasContext * ctx)5008  static void gen_mtdcr(DisasContext *ctx)
5009  {
5010  #if defined(CONFIG_USER_ONLY)
5011      GEN_PRIV(ctx);
5012  #else
5013      TCGv dcrn;
5014  
5015      CHK_SV(ctx);
5016      dcrn = tcg_constant_tl(SPR(ctx->opcode));
5017      gen_helper_store_dcr(tcg_env, dcrn, cpu_gpr[rS(ctx->opcode)]);
5018  #endif /* defined(CONFIG_USER_ONLY) */
5019  }
5020  
5021  /* mfdcrx */
5022  /* XXX: not implemented on 440 ? */
gen_mfdcrx(DisasContext * ctx)5023  static void gen_mfdcrx(DisasContext *ctx)
5024  {
5025  #if defined(CONFIG_USER_ONLY)
5026      GEN_PRIV(ctx);
5027  #else
5028      CHK_SV(ctx);
5029      gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], tcg_env,
5030                          cpu_gpr[rA(ctx->opcode)]);
5031      /* Note: Rc update flag set leads to undefined state of Rc0 */
5032  #endif /* defined(CONFIG_USER_ONLY) */
5033  }
5034  
5035  /* mtdcrx */
5036  /* XXX: not implemented on 440 ? */
gen_mtdcrx(DisasContext * ctx)5037  static void gen_mtdcrx(DisasContext *ctx)
5038  {
5039  #if defined(CONFIG_USER_ONLY)
5040      GEN_PRIV(ctx);
5041  #else
5042      CHK_SV(ctx);
5043      gen_helper_store_dcr(tcg_env, cpu_gpr[rA(ctx->opcode)],
5044                           cpu_gpr[rS(ctx->opcode)]);
5045      /* Note: Rc update flag set leads to undefined state of Rc0 */
5046  #endif /* defined(CONFIG_USER_ONLY) */
5047  }
5048  
5049  /* dccci */
gen_dccci(DisasContext * ctx)5050  static void gen_dccci(DisasContext *ctx)
5051  {
5052      CHK_SV(ctx);
5053      /* interpreted as no-op */
5054  }
5055  
5056  /* dcread */
gen_dcread(DisasContext * ctx)5057  static void gen_dcread(DisasContext *ctx)
5058  {
5059  #if defined(CONFIG_USER_ONLY)
5060      GEN_PRIV(ctx);
5061  #else
5062      TCGv EA, val;
5063  
5064      CHK_SV(ctx);
5065      gen_set_access_type(ctx, ACCESS_CACHE);
5066      EA = tcg_temp_new();
5067      gen_addr_reg_index(ctx, EA);
5068      val = tcg_temp_new();
5069      gen_qemu_ld32u(ctx, val, EA);
5070      tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA);
5071  #endif /* defined(CONFIG_USER_ONLY) */
5072  }
5073  
5074  /* icbt */
gen_icbt_40x(DisasContext * ctx)5075  static void gen_icbt_40x(DisasContext *ctx)
5076  {
5077      /*
5078       * interpreted as no-op
5079       * XXX: specification say this is treated as a load by the MMU but
5080       *      does not generate any exception
5081       */
5082  }
5083  
5084  /* iccci */
gen_iccci(DisasContext * ctx)5085  static void gen_iccci(DisasContext *ctx)
5086  {
5087      CHK_SV(ctx);
5088      /* interpreted as no-op */
5089  }
5090  
5091  /* icread */
gen_icread(DisasContext * ctx)5092  static void gen_icread(DisasContext *ctx)
5093  {
5094      CHK_SV(ctx);
5095      /* interpreted as no-op */
5096  }
5097  
5098  /* rfci (supervisor only) */
gen_rfci_40x(DisasContext * ctx)5099  static void gen_rfci_40x(DisasContext *ctx)
5100  {
5101  #if defined(CONFIG_USER_ONLY)
5102      GEN_PRIV(ctx);
5103  #else
5104      CHK_SV(ctx);
5105      /* Restore CPU state */
5106      gen_helper_40x_rfci(tcg_env);
5107      ctx->base.is_jmp = DISAS_EXIT;
5108  #endif /* defined(CONFIG_USER_ONLY) */
5109  }
5110  
gen_rfci(DisasContext * ctx)5111  static void gen_rfci(DisasContext *ctx)
5112  {
5113  #if defined(CONFIG_USER_ONLY)
5114      GEN_PRIV(ctx);
5115  #else
5116      CHK_SV(ctx);
5117      /* Restore CPU state */
5118      gen_helper_rfci(tcg_env);
5119      ctx->base.is_jmp = DISAS_EXIT;
5120  #endif /* defined(CONFIG_USER_ONLY) */
5121  }
5122  
5123  /* BookE specific */
5124  
5125  /* XXX: not implemented on 440 ? */
gen_rfdi(DisasContext * ctx)5126  static void gen_rfdi(DisasContext *ctx)
5127  {
5128  #if defined(CONFIG_USER_ONLY)
5129      GEN_PRIV(ctx);
5130  #else
5131      CHK_SV(ctx);
5132      /* Restore CPU state */
5133      gen_helper_rfdi(tcg_env);
5134      ctx->base.is_jmp = DISAS_EXIT;
5135  #endif /* defined(CONFIG_USER_ONLY) */
5136  }
5137  
5138  /* XXX: not implemented on 440 ? */
gen_rfmci(DisasContext * ctx)5139  static void gen_rfmci(DisasContext *ctx)
5140  {
5141  #if defined(CONFIG_USER_ONLY)
5142      GEN_PRIV(ctx);
5143  #else
5144      CHK_SV(ctx);
5145      /* Restore CPU state */
5146      gen_helper_rfmci(tcg_env);
5147      ctx->base.is_jmp = DISAS_EXIT;
5148  #endif /* defined(CONFIG_USER_ONLY) */
5149  }
5150  
5151  /* TLB management - PowerPC 405 implementation */
5152  
5153  /* tlbre */
gen_tlbre_40x(DisasContext * ctx)5154  static void gen_tlbre_40x(DisasContext *ctx)
5155  {
5156  #if defined(CONFIG_USER_ONLY)
5157      GEN_PRIV(ctx);
5158  #else
5159      CHK_SV(ctx);
5160      switch (rB(ctx->opcode)) {
5161      case 0:
5162          gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], tcg_env,
5163                                  cpu_gpr[rA(ctx->opcode)]);
5164          break;
5165      case 1:
5166          gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], tcg_env,
5167                                  cpu_gpr[rA(ctx->opcode)]);
5168          break;
5169      default:
5170          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5171          break;
5172      }
5173  #endif /* defined(CONFIG_USER_ONLY) */
5174  }
5175  
5176  /* tlbsx - tlbsx. */
gen_tlbsx_40x(DisasContext * ctx)5177  static void gen_tlbsx_40x(DisasContext *ctx)
5178  {
5179  #if defined(CONFIG_USER_ONLY)
5180      GEN_PRIV(ctx);
5181  #else
5182      TCGv t0;
5183  
5184      CHK_SV(ctx);
5185      t0 = tcg_temp_new();
5186      gen_addr_reg_index(ctx, t0);
5187      gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
5188      if (Rc(ctx->opcode)) {
5189          TCGLabel *l1 = gen_new_label();
5190          tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5191          tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5192          tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5193          gen_set_label(l1);
5194      }
5195  #endif /* defined(CONFIG_USER_ONLY) */
5196  }
5197  
5198  /* tlbwe */
gen_tlbwe_40x(DisasContext * ctx)5199  static void gen_tlbwe_40x(DisasContext *ctx)
5200  {
5201  #if defined(CONFIG_USER_ONLY)
5202      GEN_PRIV(ctx);
5203  #else
5204      CHK_SV(ctx);
5205  
5206      switch (rB(ctx->opcode)) {
5207      case 0:
5208          gen_helper_4xx_tlbwe_hi(tcg_env, cpu_gpr[rA(ctx->opcode)],
5209                                  cpu_gpr[rS(ctx->opcode)]);
5210          break;
5211      case 1:
5212          gen_helper_4xx_tlbwe_lo(tcg_env, cpu_gpr[rA(ctx->opcode)],
5213                                  cpu_gpr[rS(ctx->opcode)]);
5214          break;
5215      default:
5216          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5217          break;
5218      }
5219  #endif /* defined(CONFIG_USER_ONLY) */
5220  }
5221  
5222  /* TLB management - PowerPC 440 implementation */
5223  
5224  /* tlbre */
gen_tlbre_440(DisasContext * ctx)5225  static void gen_tlbre_440(DisasContext *ctx)
5226  {
5227  #if defined(CONFIG_USER_ONLY)
5228      GEN_PRIV(ctx);
5229  #else
5230      CHK_SV(ctx);
5231  
5232      switch (rB(ctx->opcode)) {
5233      case 0:
5234      case 1:
5235      case 2:
5236          {
5237              TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5238              gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], tcg_env,
5239                                   t0, cpu_gpr[rA(ctx->opcode)]);
5240          }
5241          break;
5242      default:
5243          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5244          break;
5245      }
5246  #endif /* defined(CONFIG_USER_ONLY) */
5247  }
5248  
5249  /* tlbsx - tlbsx. */
gen_tlbsx_440(DisasContext * ctx)5250  static void gen_tlbsx_440(DisasContext *ctx)
5251  {
5252  #if defined(CONFIG_USER_ONLY)
5253      GEN_PRIV(ctx);
5254  #else
5255      TCGv t0;
5256  
5257      CHK_SV(ctx);
5258      t0 = tcg_temp_new();
5259      gen_addr_reg_index(ctx, t0);
5260      gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], tcg_env, t0);
5261      if (Rc(ctx->opcode)) {
5262          TCGLabel *l1 = gen_new_label();
5263          tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so);
5264          tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1);
5265          tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02);
5266          gen_set_label(l1);
5267      }
5268  #endif /* defined(CONFIG_USER_ONLY) */
5269  }
5270  
5271  /* tlbwe */
gen_tlbwe_440(DisasContext * ctx)5272  static void gen_tlbwe_440(DisasContext *ctx)
5273  {
5274  #if defined(CONFIG_USER_ONLY)
5275      GEN_PRIV(ctx);
5276  #else
5277      CHK_SV(ctx);
5278      switch (rB(ctx->opcode)) {
5279      case 0:
5280      case 1:
5281      case 2:
5282          {
5283              TCGv_i32 t0 = tcg_constant_i32(rB(ctx->opcode));
5284              gen_helper_440_tlbwe(tcg_env, t0, cpu_gpr[rA(ctx->opcode)],
5285                                   cpu_gpr[rS(ctx->opcode)]);
5286          }
5287          break;
5288      default:
5289          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5290          break;
5291      }
5292  #endif /* defined(CONFIG_USER_ONLY) */
5293  }
5294  
5295  /* TLB management - PowerPC BookE 2.06 implementation */
5296  
5297  /* tlbre */
gen_tlbre_booke206(DisasContext * ctx)5298  static void gen_tlbre_booke206(DisasContext *ctx)
5299  {
5300   #if defined(CONFIG_USER_ONLY)
5301      GEN_PRIV(ctx);
5302  #else
5303     CHK_SV(ctx);
5304      gen_helper_booke206_tlbre(tcg_env);
5305  #endif /* defined(CONFIG_USER_ONLY) */
5306  }
5307  
5308  /* tlbsx - tlbsx. */
gen_tlbsx_booke206(DisasContext * ctx)5309  static void gen_tlbsx_booke206(DisasContext *ctx)
5310  {
5311  #if defined(CONFIG_USER_ONLY)
5312      GEN_PRIV(ctx);
5313  #else
5314      TCGv t0;
5315  
5316      CHK_SV(ctx);
5317      if (rA(ctx->opcode)) {
5318          t0 = tcg_temp_new();
5319          tcg_gen_add_tl(t0, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]);
5320      } else {
5321          t0 = cpu_gpr[rB(ctx->opcode)];
5322      }
5323      gen_helper_booke206_tlbsx(tcg_env, t0);
5324  #endif /* defined(CONFIG_USER_ONLY) */
5325  }
5326  
5327  /* tlbwe */
gen_tlbwe_booke206(DisasContext * ctx)5328  static void gen_tlbwe_booke206(DisasContext *ctx)
5329  {
5330  #if defined(CONFIG_USER_ONLY)
5331      GEN_PRIV(ctx);
5332  #else
5333      CHK_SV(ctx);
5334      gen_helper_booke206_tlbwe(tcg_env);
5335  #endif /* defined(CONFIG_USER_ONLY) */
5336  }
5337  
gen_tlbivax_booke206(DisasContext * ctx)5338  static void gen_tlbivax_booke206(DisasContext *ctx)
5339  {
5340  #if defined(CONFIG_USER_ONLY)
5341      GEN_PRIV(ctx);
5342  #else
5343      TCGv t0;
5344  
5345      CHK_SV(ctx);
5346      t0 = tcg_temp_new();
5347      gen_addr_reg_index(ctx, t0);
5348      gen_helper_booke206_tlbivax(tcg_env, t0);
5349  #endif /* defined(CONFIG_USER_ONLY) */
5350  }
5351  
gen_tlbilx_booke206(DisasContext * ctx)5352  static void gen_tlbilx_booke206(DisasContext *ctx)
5353  {
5354  #if defined(CONFIG_USER_ONLY)
5355      GEN_PRIV(ctx);
5356  #else
5357      TCGv t0;
5358  
5359      CHK_SV(ctx);
5360      t0 = tcg_temp_new();
5361      gen_addr_reg_index(ctx, t0);
5362  
5363      switch ((ctx->opcode >> 21) & 0x3) {
5364      case 0:
5365          gen_helper_booke206_tlbilx0(tcg_env, t0);
5366          break;
5367      case 1:
5368          gen_helper_booke206_tlbilx1(tcg_env, t0);
5369          break;
5370      case 3:
5371          gen_helper_booke206_tlbilx3(tcg_env, t0);
5372          break;
5373      default:
5374          gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL);
5375          break;
5376      }
5377  #endif /* defined(CONFIG_USER_ONLY) */
5378  }
5379  
5380  /* wrtee */
gen_wrtee(DisasContext * ctx)5381  static void gen_wrtee(DisasContext *ctx)
5382  {
5383  #if defined(CONFIG_USER_ONLY)
5384      GEN_PRIV(ctx);
5385  #else
5386      TCGv t0;
5387  
5388      CHK_SV(ctx);
5389      t0 = tcg_temp_new();
5390      tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE));
5391      tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5392      tcg_gen_or_tl(cpu_msr, cpu_msr, t0);
5393      gen_ppc_maybe_interrupt(ctx);
5394      /*
5395       * Stop translation to have a chance to raise an exception if we
5396       * just set msr_ee to 1
5397       */
5398      ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5399  #endif /* defined(CONFIG_USER_ONLY) */
5400  }
5401  
5402  /* wrteei */
gen_wrteei(DisasContext * ctx)5403  static void gen_wrteei(DisasContext *ctx)
5404  {
5405  #if defined(CONFIG_USER_ONLY)
5406      GEN_PRIV(ctx);
5407  #else
5408      CHK_SV(ctx);
5409      if (ctx->opcode & 0x00008000) {
5410          tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE));
5411          gen_ppc_maybe_interrupt(ctx);
5412          /* Stop translation to have a chance to raise an exception */
5413          ctx->base.is_jmp = DISAS_EXIT_UPDATE;
5414      } else {
5415          tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE));
5416      }
5417  #endif /* defined(CONFIG_USER_ONLY) */
5418  }
5419  
5420  /* PowerPC 440 specific instructions */
5421  
5422  /* dlmzb */
gen_dlmzb(DisasContext * ctx)5423  static void gen_dlmzb(DisasContext *ctx)
5424  {
5425      TCGv_i32 t0 = tcg_constant_i32(Rc(ctx->opcode));
5426      gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], tcg_env,
5427                       cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0);
5428  }
5429  
5430  /* icbt */
gen_icbt_440(DisasContext * ctx)5431  static void gen_icbt_440(DisasContext *ctx)
5432  {
5433      /*
5434       * interpreted as no-op
5435       * XXX: specification say this is treated as a load by the MMU but
5436       *      does not generate any exception
5437       */
5438  }
5439  
gen_tbegin(DisasContext * ctx)5440  static void gen_tbegin(DisasContext *ctx)
5441  {
5442      if (unlikely(!ctx->tm_enabled)) {
5443          gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
5444          return;
5445      }
5446      gen_helper_tbegin(tcg_env);
5447  }
5448  
5449  #define GEN_TM_NOOP(name)                                      \
5450  static inline void gen_##name(DisasContext *ctx)               \
5451  {                                                              \
5452      if (unlikely(!ctx->tm_enabled)) {                          \
5453          gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
5454          return;                                                \
5455      }                                                          \
5456      /*                                                         \
5457       * Because tbegin always fails in QEMU, these user         \
5458       * space instructions all have a simple implementation:    \
5459       *                                                         \
5460       *     CR[0] = 0b0 || MSR[TS] || 0b0                       \
5461       *           = 0b0 || 0b00    || 0b0                       \
5462       */                                                        \
5463      tcg_gen_movi_i32(cpu_crf[0], 0);                           \
5464  }
5465  
5466  GEN_TM_NOOP(tend);
5467  GEN_TM_NOOP(tabort);
5468  GEN_TM_NOOP(tabortwc);
5469  GEN_TM_NOOP(tabortwci);
5470  GEN_TM_NOOP(tabortdc);
5471  GEN_TM_NOOP(tabortdci);
5472  GEN_TM_NOOP(tsr);
5473  
gen_cp_abort(DisasContext * ctx)5474  static inline void gen_cp_abort(DisasContext *ctx)
5475  {
5476      /* Do Nothing */
5477  }
5478  
5479  #define GEN_CP_PASTE_NOOP(name)                           \
5480  static inline void gen_##name(DisasContext *ctx)          \
5481  {                                                         \
5482      /*                                                    \
5483       * Generate invalid exception until we have an        \
5484       * implementation of the copy paste facility          \
5485       */                                                   \
5486      gen_invalid(ctx);                                     \
5487  }
5488  
5489  GEN_CP_PASTE_NOOP(copy)
GEN_CP_PASTE_NOOP(paste)5490  GEN_CP_PASTE_NOOP(paste)
5491  
5492  static void gen_tcheck(DisasContext *ctx)
5493  {
5494      if (unlikely(!ctx->tm_enabled)) {
5495          gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);
5496          return;
5497      }
5498      /*
5499       * Because tbegin always fails, the tcheck implementation is
5500       * simple:
5501       *
5502       * CR[CRF] = TDOOMED || MSR[TS] || 0b0
5503       *         = 0b1 || 0b00 || 0b0
5504       */
5505      tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8);
5506  }
5507  
5508  #if defined(CONFIG_USER_ONLY)
5509  #define GEN_TM_PRIV_NOOP(name)                                 \
5510  static inline void gen_##name(DisasContext *ctx)               \
5511  {                                                              \
5512      gen_priv_opc(ctx);                                         \
5513  }
5514  
5515  #else
5516  
5517  #define GEN_TM_PRIV_NOOP(name)                                 \
5518  static inline void gen_##name(DisasContext *ctx)               \
5519  {                                                              \
5520      CHK_SV(ctx);                                               \
5521      if (unlikely(!ctx->tm_enabled)) {                          \
5522          gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM);   \
5523          return;                                                \
5524      }                                                          \
5525      /*                                                         \
5526       * Because tbegin always fails, the implementation is      \
5527       * simple:                                                 \
5528       *                                                         \
5529       *   CR[0] = 0b0 || MSR[TS] || 0b0                         \
5530       *         = 0b0 || 0b00 | 0b0                             \
5531       */                                                        \
5532      tcg_gen_movi_i32(cpu_crf[0], 0);                           \
5533  }
5534  
5535  #endif
5536  
5537  GEN_TM_PRIV_NOOP(treclaim);
5538  GEN_TM_PRIV_NOOP(trechkpt);
5539  
get_fpr(TCGv_i64 dst,int regno)5540  static inline void get_fpr(TCGv_i64 dst, int regno)
5541  {
5542      tcg_gen_ld_i64(dst, tcg_env, fpr_offset(regno));
5543  }
5544  
set_fpr(int regno,TCGv_i64 src)5545  static inline void set_fpr(int regno, TCGv_i64 src)
5546  {
5547      tcg_gen_st_i64(src, tcg_env, fpr_offset(regno));
5548      /*
5549       * Before PowerISA v3.1 the result of doubleword 1 of the VSR
5550       * corresponding to the target FPR was undefined. However,
5551       * most (if not all) real hardware were setting the result to 0.
5552       * Starting at ISA v3.1, the result for doubleword 1 is now defined
5553       * to be 0.
5554       */
5555      tcg_gen_st_i64(tcg_constant_i64(0), tcg_env, vsr64_offset(regno, false));
5556  }
5557  
5558  /*
5559   * Helpers for decodetree used by !function for decoding arguments.
5560   */
times_2(DisasContext * ctx,int x)5561  static int times_2(DisasContext *ctx, int x)
5562  {
5563      return x * 2;
5564  }
5565  
times_4(DisasContext * ctx,int x)5566  static int times_4(DisasContext *ctx, int x)
5567  {
5568      return x * 4;
5569  }
5570  
times_16(DisasContext * ctx,int x)5571  static int times_16(DisasContext *ctx, int x)
5572  {
5573      return x * 16;
5574  }
5575  
dw_compose_ea(DisasContext * ctx,int x)5576  static int64_t dw_compose_ea(DisasContext *ctx, int x)
5577  {
5578      return deposit64(0xfffffffffffffe00, 3, 6, x);
5579  }
5580  
5581  /*
5582   * Helpers for trans_* functions to check for specific insns flags.
5583   * Use token pasting to ensure that we use the proper flag with the
5584   * proper variable.
5585   */
5586  #define REQUIRE_INSNS_FLAGS(CTX, NAME) \
5587      do {                                                \
5588          if (((CTX)->insns_flags & PPC_##NAME) == 0) {   \
5589              return false;                               \
5590          }                                               \
5591      } while (0)
5592  
5593  #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \
5594      do {                                                \
5595          if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \
5596              return false;                               \
5597          }                                               \
5598      } while (0)
5599  
5600  /* Then special-case the check for 64-bit so that we elide code for ppc32. */
5601  #if TARGET_LONG_BITS == 32
5602  # define REQUIRE_64BIT(CTX)  return false
5603  #else
5604  # define REQUIRE_64BIT(CTX)  REQUIRE_INSNS_FLAGS(CTX, 64B)
5605  #endif
5606  
5607  #define REQUIRE_VECTOR(CTX)                             \
5608      do {                                                \
5609          if (unlikely(!(CTX)->altivec_enabled)) {        \
5610              gen_exception((CTX), POWERPC_EXCP_VPU);     \
5611              return true;                                \
5612          }                                               \
5613      } while (0)
5614  
5615  #define REQUIRE_VSX(CTX)                                \
5616      do {                                                \
5617          if (unlikely(!(CTX)->vsx_enabled)) {            \
5618              gen_exception((CTX), POWERPC_EXCP_VSXU);    \
5619              return true;                                \
5620          }                                               \
5621      } while (0)
5622  
5623  #define REQUIRE_FPU(ctx)                                \
5624      do {                                                \
5625          if (unlikely(!(ctx)->fpu_enabled)) {            \
5626              gen_exception((ctx), POWERPC_EXCP_FPU);     \
5627              return true;                                \
5628          }                                               \
5629      } while (0)
5630  
5631  #if !defined(CONFIG_USER_ONLY)
5632  #define REQUIRE_SV(CTX)             \
5633      do {                            \
5634          if (unlikely((CTX)->pr)) {  \
5635              gen_priv_opc(CTX);      \
5636              return true;            \
5637          }                           \
5638      } while (0)
5639  
5640  #define REQUIRE_HV(CTX)                             \
5641      do {                                            \
5642          if (unlikely((CTX)->pr || !(CTX)->hv)) {    \
5643              gen_priv_opc(CTX);                      \
5644              return true;                            \
5645          }                                           \
5646      } while (0)
5647  #else
5648  #define REQUIRE_SV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
5649  #define REQUIRE_HV(CTX) do { gen_priv_opc(CTX); return true; } while (0)
5650  #endif
5651  
5652  /*
5653   * Helpers for implementing sets of trans_* functions.
5654   * Defer the implementation of NAME to FUNC, with optional extra arguments.
5655   */
5656  #define TRANS(NAME, FUNC, ...) \
5657      static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5658      { return FUNC(ctx, a, __VA_ARGS__); }
5659  #define TRANS_FLAGS(FLAGS, NAME, FUNC, ...) \
5660      static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5661      {                                                          \
5662          REQUIRE_INSNS_FLAGS(ctx, FLAGS);                       \
5663          return FUNC(ctx, a, __VA_ARGS__);                      \
5664      }
5665  #define TRANS_FLAGS2(FLAGS2, NAME, FUNC, ...) \
5666      static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5667      {                                                          \
5668          REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
5669          return FUNC(ctx, a, __VA_ARGS__);                      \
5670      }
5671  
5672  #define TRANS64(NAME, FUNC, ...) \
5673      static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5674      { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); }
5675  #define TRANS64_FLAGS2(FLAGS2, NAME, FUNC, ...) \
5676      static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \
5677      {                                                          \
5678          REQUIRE_64BIT(ctx);                                    \
5679          REQUIRE_INSNS_FLAGS2(ctx, FLAGS2);                     \
5680          return FUNC(ctx, a, __VA_ARGS__);                      \
5681      }
5682  
5683  /* TODO: More TRANS* helpers for extra insn_flags checks. */
5684  
5685  
5686  #include "decode-insn32.c.inc"
5687  #include "decode-insn64.c.inc"
5688  #include "power8-pmu-regs.c.inc"
5689  
5690  /*
5691   * Incorporate CIA into the constant when R=1.
5692   * Validate that when R=1, RA=0.
5693   */
resolve_PLS_D(DisasContext * ctx,arg_D * d,arg_PLS_D * a)5694  static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a)
5695  {
5696      d->rt = a->rt;
5697      d->ra = a->ra;
5698      d->si = a->si;
5699      if (a->r) {
5700          if (unlikely(a->ra != 0)) {
5701              gen_invalid(ctx);
5702              return false;
5703          }
5704          d->si += ctx->cia;
5705      }
5706      return true;
5707  }
5708  
5709  #include "translate/fixedpoint-impl.c.inc"
5710  
5711  #include "translate/fp-impl.c.inc"
5712  
5713  #include "translate/vmx-impl.c.inc"
5714  
5715  #include "translate/vsx-impl.c.inc"
5716  
5717  #include "translate/dfp-impl.c.inc"
5718  
5719  #include "translate/spe-impl.c.inc"
5720  
5721  #include "translate/branch-impl.c.inc"
5722  
5723  #include "translate/processor-ctrl-impl.c.inc"
5724  
5725  #include "translate/storage-ctrl-impl.c.inc"
5726  
5727  #include "translate/misc-impl.c.inc"
5728  
5729  #include "translate/bhrb-impl.c.inc"
5730  
5731  /* Handles lfdp */
gen_dform39(DisasContext * ctx)5732  static void gen_dform39(DisasContext *ctx)
5733  {
5734      if ((ctx->opcode & 0x3) == 0) {
5735          if (ctx->insns_flags2 & PPC2_ISA205) {
5736              return gen_lfdp(ctx);
5737          }
5738      }
5739      return gen_invalid(ctx);
5740  }
5741  
5742  /* Handles stfdp */
gen_dform3D(DisasContext * ctx)5743  static void gen_dform3D(DisasContext *ctx)
5744  {
5745      if ((ctx->opcode & 3) == 0) { /* DS-FORM */
5746          /* stfdp */
5747          if (ctx->insns_flags2 & PPC2_ISA205) {
5748              return gen_stfdp(ctx);
5749          }
5750      }
5751      return gen_invalid(ctx);
5752  }
5753  
5754  #if defined(TARGET_PPC64)
5755  /* brd */
gen_brd(DisasContext * ctx)5756  static void gen_brd(DisasContext *ctx)
5757  {
5758      tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5759  }
5760  
5761  /* brw */
gen_brw(DisasContext * ctx)5762  static void gen_brw(DisasContext *ctx)
5763  {
5764      tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]);
5765      tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32);
5766  
5767  }
5768  
5769  /* brh */
gen_brh(DisasContext * ctx)5770  static void gen_brh(DisasContext *ctx)
5771  {
5772      TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull);
5773      TCGv_i64 t1 = tcg_temp_new_i64();
5774      TCGv_i64 t2 = tcg_temp_new_i64();
5775  
5776      tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8);
5777      tcg_gen_and_i64(t2, t1, mask);
5778      tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask);
5779      tcg_gen_shli_i64(t1, t1, 8);
5780      tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2);
5781  }
5782  #endif
5783  
5784  static opcode_t opcodes[] = {
5785  #if defined(TARGET_PPC64)
5786  GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310),
5787  GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310),
5788  GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310),
5789  #endif
5790  GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE),
5791  GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300),
5792  GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300),
5793  GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300),
5794  GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5795  GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5796  GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5797  GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER),
5798  GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER),
5799  GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER),
5800  GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER),
5801  #if defined(TARGET_PPC64)
5802  GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B),
5803  GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B),
5804  GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B),
5805  GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B),
5806  GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B),
5807  GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000,
5808                 PPC_NONE, PPC2_ISA300),
5809  GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000,
5810                 PPC_NONE, PPC2_ISA300),
5811  #endif
5812  /* handles lfdp, lxsd, lxssp */
5813  GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
5814  /* handles stfdp, stxsd, stxssp */
5815  GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205),
5816  GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5817  GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER),
5818  GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING),
5819  GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING),
5820  GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING),
5821  GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING),
5822  GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM),
5823  GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5824  GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5825  GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES),
5826  GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300),
5827  GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300),
5828  GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5829  GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206),
5830  GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES),
5831  #if defined(TARGET_PPC64)
5832  GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300),
5833  GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300),
5834  GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B),
5835  GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207),
5836  GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B),
5837  GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207),
5838  #endif
5839  /* ISA v3.0 changed the extended opcode from 62 to 30 */
5840  GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x039FF801, PPC_WAIT),
5841  GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039CF801, PPC_NONE, PPC2_ISA300),
5842  GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
5843  GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW),
5844  GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW),
5845  GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW),
5846  GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207),
5847  GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER),
5848  GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW),
5849  #if defined(TARGET_PPC64)
5850  GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B),
5851  #if !defined(CONFIG_USER_ONLY)
5852  /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
5853  GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
5854  GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300),
5855  GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300),
5856  #endif
5857  GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300),
5858  GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5859  GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5860  GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5861  GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206),
5862  GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H),
5863  #endif
5864  /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */
5865  GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW),
5866  GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW),
5867  GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC),
5868  GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC),
5869  GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC),
5870  GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC),
5871  GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB),
5872  GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC),
5873  #if defined(TARGET_PPC64)
5874  GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B),
5875  GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300),
5876  GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300),
5877  #endif
5878  GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC),
5879  GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC),
5880  GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE),
5881  GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
5882  GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE),
5883  GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE),
5884  GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
5885  GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE),
5886  GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206),
5887  GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE),
5888  GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206),
5889  GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
5890  GEN_HANDLER_E(dcblc, 0x1F, 0x06, 0x0c, 0x02000001, PPC_BOOKE, PPC2_BOOKE206),
5891  GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ),
5892  GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206),
5893  GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC),
5894  GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC),
5895  GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC),
5896  GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI),
5897  GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206),
5898  GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA),
5899  GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT),
5900  GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT),
5901  GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT),
5902  GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT),
5903  #if defined(TARGET_PPC64)
5904  GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B),
5905  GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001,
5906               PPC_SEGMENT_64B),
5907  GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B),
5908  GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001,
5909               PPC_SEGMENT_64B),
5910  #endif
5911  GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA),
5912  /*
5913   * XXX Those instructions will need to be handled differently for
5914   * different ISA versions
5915   */
5916  GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC),
5917  GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN),
5918  GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN),
5919  GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB),
5920  GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB),
5921  GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI),
5922  GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA),
5923  GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR),
5924  GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR),
5925  GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX),
5926  GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX),
5927  GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON),
5928  GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON),
5929  GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT),
5930  GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON),
5931  GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON),
5932  GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP),
5933  GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206),
5934  GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI),
5935  GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI),
5936  GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB),
5937  GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB),
5938  GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB),
5939  GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE),
5940  GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE),
5941  GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE),
5942  GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001,
5943                 PPC_NONE, PPC2_BOOKE206),
5944  GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000,
5945                 PPC_NONE, PPC2_BOOKE206),
5946  GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001,
5947                 PPC_NONE, PPC2_BOOKE206),
5948  GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001,
5949                 PPC_NONE, PPC2_BOOKE206),
5950  GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001,
5951                 PPC_NONE, PPC2_BOOKE206),
5952  GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE),
5953  GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE),
5954  GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC),
5955  GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001,
5956                 PPC_BOOKE, PPC2_BOOKE206),
5957  GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001,
5958               PPC_440_SPEC),
5959  GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC),
5960  GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC),
5961  
5962  #if defined(TARGET_PPC64)
5963  #undef GEN_PPC64_R2
5964  #undef GEN_PPC64_R4
5965  #define GEN_PPC64_R2(name, opc1, opc2)                                        \
5966  GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
5967  GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
5968               PPC_64B)
5969  #define GEN_PPC64_R4(name, opc1, opc2)                                        \
5970  GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\
5971  GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000,   \
5972               PPC_64B),                                                        \
5973  GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000,   \
5974               PPC_64B),                                                        \
5975  GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000,   \
5976               PPC_64B)
5977  GEN_PPC64_R4(rldicl, 0x1E, 0x00),
5978  GEN_PPC64_R4(rldicr, 0x1E, 0x02),
5979  GEN_PPC64_R4(rldic, 0x1E, 0x04),
5980  GEN_PPC64_R2(rldcl, 0x1E, 0x08),
5981  GEN_PPC64_R2(rldcr, 0x1E, 0x09),
5982  GEN_PPC64_R4(rldimi, 0x1E, 0x06),
5983  #endif
5984  
5985  #undef GEN_LDX_E
5986  #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk)                   \
5987  GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2),
5988  
5989  #if defined(TARGET_PPC64)
5990  GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE)
5991  
5992  /* HV/P7 and later only */
5993  GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST)
5994  GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST)
5995  GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST)
5996  GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST)
5997  #endif
5998  GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER)
5999  GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER)
6000  
6001  /* External PID based load */
6002  #undef GEN_LDEPX
6003  #define GEN_LDEPX(name, ldop, opc2, opc3)                                     \
6004  GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6005                0x00000001, PPC_NONE, PPC2_BOOKE206),
6006  
6007  GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02)
6008  GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08)
6009  GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00)
6010  #if defined(TARGET_PPC64)
6011  GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00)
6012  #endif
6013  
6014  #undef GEN_STX_E
6015  #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk)                   \
6016  GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2),
6017  
6018  #if defined(TARGET_PPC64)
6019  GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE)
6020  GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST)
6021  GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST)
6022  GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST)
6023  GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST)
6024  #endif
6025  GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER)
6026  GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER)
6027  
6028  #undef GEN_STEPX
6029  #define GEN_STEPX(name, ldop, opc2, opc3)                                     \
6030  GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3,                                    \
6031                0x00000001, PPC_NONE, PPC2_BOOKE206),
6032  
6033  GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06)
6034  GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C)
6035  GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04)
6036  #if defined(TARGET_PPC64)
6037  GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04)
6038  #endif
6039  
6040  #undef GEN_CRLOGIC
6041  #define GEN_CRLOGIC(name, tcg_op, opc)                                        \
6042  GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER)
6043  GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08),
6044  GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04),
6045  GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09),
6046  GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07),
6047  GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01),
6048  GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E),
6049  GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D),
6050  GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06),
6051  
6052  #undef GEN_MAC_HANDLER
6053  #define GEN_MAC_HANDLER(name, opc2, opc3)                                     \
6054  GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC)
6055  GEN_MAC_HANDLER(macchw, 0x0C, 0x05),
6056  GEN_MAC_HANDLER(macchwo, 0x0C, 0x15),
6057  GEN_MAC_HANDLER(macchws, 0x0C, 0x07),
6058  GEN_MAC_HANDLER(macchwso, 0x0C, 0x17),
6059  GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06),
6060  GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16),
6061  GEN_MAC_HANDLER(macchwu, 0x0C, 0x04),
6062  GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14),
6063  GEN_MAC_HANDLER(machhw, 0x0C, 0x01),
6064  GEN_MAC_HANDLER(machhwo, 0x0C, 0x11),
6065  GEN_MAC_HANDLER(machhws, 0x0C, 0x03),
6066  GEN_MAC_HANDLER(machhwso, 0x0C, 0x13),
6067  GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02),
6068  GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12),
6069  GEN_MAC_HANDLER(machhwu, 0x0C, 0x00),
6070  GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10),
6071  GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D),
6072  GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D),
6073  GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F),
6074  GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F),
6075  GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C),
6076  GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C),
6077  GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E),
6078  GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E),
6079  GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05),
6080  GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15),
6081  GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07),
6082  GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17),
6083  GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01),
6084  GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11),
6085  GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03),
6086  GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13),
6087  GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D),
6088  GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D),
6089  GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F),
6090  GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F),
6091  GEN_MAC_HANDLER(mulchw, 0x08, 0x05),
6092  GEN_MAC_HANDLER(mulchwu, 0x08, 0x04),
6093  GEN_MAC_HANDLER(mulhhw, 0x08, 0x01),
6094  GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00),
6095  GEN_MAC_HANDLER(mullhw, 0x08, 0x0D),
6096  GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C),
6097  
6098  GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \
6099                 PPC_NONE, PPC2_TM),
6100  GEN_HANDLER2_E(tend,   "tend",   0x1F, 0x0E, 0x15, 0x01FFF800, \
6101                 PPC_NONE, PPC2_TM),
6102  GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \
6103                 PPC_NONE, PPC2_TM),
6104  GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \
6105                 PPC_NONE, PPC2_TM),
6106  GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \
6107                 PPC_NONE, PPC2_TM),
6108  GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \
6109                 PPC_NONE, PPC2_TM),
6110  GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \
6111                 PPC_NONE, PPC2_TM),
6112  GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \
6113                 PPC_NONE, PPC2_TM),
6114  GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \
6115                 PPC_NONE, PPC2_TM),
6116  GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \
6117                 PPC_NONE, PPC2_TM),
6118  GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \
6119                 PPC_NONE, PPC2_TM),
6120  
6121  #include "translate/fp-ops.c.inc"
6122  
6123  #include "translate/vmx-ops.c.inc"
6124  
6125  #include "translate/vsx-ops.c.inc"
6126  
6127  #include "translate/spe-ops.c.inc"
6128  };
6129  
6130  /*****************************************************************************/
6131  /* Opcode types */
6132  enum {
6133      PPC_DIRECT   = 0, /* Opcode routine        */
6134      PPC_INDIRECT = 1, /* Indirect opcode table */
6135  };
6136  
6137  #define PPC_OPCODE_MASK 0x3
6138  
is_indirect_opcode(void * handler)6139  static inline int is_indirect_opcode(void *handler)
6140  {
6141      return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT;
6142  }
6143  
ind_table(void * handler)6144  static inline opc_handler_t **ind_table(void *handler)
6145  {
6146      return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK);
6147  }
6148  
6149  /* Instruction table creation */
6150  /* Opcodes tables creation */
fill_new_table(opc_handler_t ** table,int len)6151  static void fill_new_table(opc_handler_t **table, int len)
6152  {
6153      int i;
6154  
6155      for (i = 0; i < len; i++) {
6156          table[i] = &invalid_handler;
6157      }
6158  }
6159  
create_new_table(opc_handler_t ** table,unsigned char idx)6160  static int create_new_table(opc_handler_t **table, unsigned char idx)
6161  {
6162      opc_handler_t **tmp;
6163  
6164      tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN);
6165      fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN);
6166      table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT);
6167  
6168      return 0;
6169  }
6170  
insert_in_table(opc_handler_t ** table,unsigned char idx,opc_handler_t * handler)6171  static int insert_in_table(opc_handler_t **table, unsigned char idx,
6172                              opc_handler_t *handler)
6173  {
6174      if (table[idx] != &invalid_handler) {
6175          return -1;
6176      }
6177      table[idx] = handler;
6178  
6179      return 0;
6180  }
6181  
register_direct_insn(opc_handler_t ** ppc_opcodes,unsigned char idx,opc_handler_t * handler)6182  static int register_direct_insn(opc_handler_t **ppc_opcodes,
6183                                  unsigned char idx, opc_handler_t *handler)
6184  {
6185      if (insert_in_table(ppc_opcodes, idx, handler) < 0) {
6186          printf("*** ERROR: opcode %02x already assigned in main "
6187                 "opcode table\n", idx);
6188          return -1;
6189      }
6190  
6191      return 0;
6192  }
6193  
register_ind_in_table(opc_handler_t ** table,unsigned char idx1,unsigned char idx2,opc_handler_t * handler)6194  static int register_ind_in_table(opc_handler_t **table,
6195                                   unsigned char idx1, unsigned char idx2,
6196                                   opc_handler_t *handler)
6197  {
6198      if (table[idx1] == &invalid_handler) {
6199          if (create_new_table(table, idx1) < 0) {
6200              printf("*** ERROR: unable to create indirect table "
6201                     "idx=%02x\n", idx1);
6202              return -1;
6203          }
6204      } else {
6205          if (!is_indirect_opcode(table[idx1])) {
6206              printf("*** ERROR: idx %02x already assigned to a direct "
6207                     "opcode\n", idx1);
6208              return -1;
6209          }
6210      }
6211      if (handler != NULL &&
6212          insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) {
6213          printf("*** ERROR: opcode %02x already assigned in "
6214                 "opcode table %02x\n", idx2, idx1);
6215          return -1;
6216      }
6217  
6218      return 0;
6219  }
6220  
register_ind_insn(opc_handler_t ** ppc_opcodes,unsigned char idx1,unsigned char idx2,opc_handler_t * handler)6221  static int register_ind_insn(opc_handler_t **ppc_opcodes,
6222                               unsigned char idx1, unsigned char idx2,
6223                               opc_handler_t *handler)
6224  {
6225      return register_ind_in_table(ppc_opcodes, idx1, idx2, handler);
6226  }
6227  
register_dblind_insn(opc_handler_t ** ppc_opcodes,unsigned char idx1,unsigned char idx2,unsigned char idx3,opc_handler_t * handler)6228  static int register_dblind_insn(opc_handler_t **ppc_opcodes,
6229                                  unsigned char idx1, unsigned char idx2,
6230                                  unsigned char idx3, opc_handler_t *handler)
6231  {
6232      if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6233          printf("*** ERROR: unable to join indirect table idx "
6234                 "[%02x-%02x]\n", idx1, idx2);
6235          return -1;
6236      }
6237      if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3,
6238                                handler) < 0) {
6239          printf("*** ERROR: unable to insert opcode "
6240                 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6241          return -1;
6242      }
6243  
6244      return 0;
6245  }
6246  
register_trplind_insn(opc_handler_t ** ppc_opcodes,unsigned char idx1,unsigned char idx2,unsigned char idx3,unsigned char idx4,opc_handler_t * handler)6247  static int register_trplind_insn(opc_handler_t **ppc_opcodes,
6248                                   unsigned char idx1, unsigned char idx2,
6249                                   unsigned char idx3, unsigned char idx4,
6250                                   opc_handler_t *handler)
6251  {
6252      opc_handler_t **table;
6253  
6254      if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) {
6255          printf("*** ERROR: unable to join indirect table idx "
6256                 "[%02x-%02x]\n", idx1, idx2);
6257          return -1;
6258      }
6259      table = ind_table(ppc_opcodes[idx1]);
6260      if (register_ind_in_table(table, idx2, idx3, NULL) < 0) {
6261          printf("*** ERROR: unable to join 2nd-level indirect table idx "
6262                 "[%02x-%02x-%02x]\n", idx1, idx2, idx3);
6263          return -1;
6264      }
6265      table = ind_table(table[idx2]);
6266      if (register_ind_in_table(table, idx3, idx4, handler) < 0) {
6267          printf("*** ERROR: unable to insert opcode "
6268                 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4);
6269          return -1;
6270      }
6271      return 0;
6272  }
register_insn(opc_handler_t ** ppc_opcodes,opcode_t * insn)6273  static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn)
6274  {
6275      if (insn->opc2 != 0xFF) {
6276          if (insn->opc3 != 0xFF) {
6277              if (insn->opc4 != 0xFF) {
6278                  if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2,
6279                                            insn->opc3, insn->opc4,
6280                                            &insn->handler) < 0) {
6281                      return -1;
6282                  }
6283              } else {
6284                  if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2,
6285                                           insn->opc3, &insn->handler) < 0) {
6286                      return -1;
6287                  }
6288              }
6289          } else {
6290              if (register_ind_insn(ppc_opcodes, insn->opc1,
6291                                    insn->opc2, &insn->handler) < 0) {
6292                  return -1;
6293              }
6294          }
6295      } else {
6296          if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) {
6297              return -1;
6298          }
6299      }
6300  
6301      return 0;
6302  }
6303  
test_opcode_table(opc_handler_t ** table,int len)6304  static int test_opcode_table(opc_handler_t **table, int len)
6305  {
6306      int i, count, tmp;
6307  
6308      for (i = 0, count = 0; i < len; i++) {
6309          /* Consistency fixup */
6310          if (table[i] == NULL) {
6311              table[i] = &invalid_handler;
6312          }
6313          if (table[i] != &invalid_handler) {
6314              if (is_indirect_opcode(table[i])) {
6315                  tmp = test_opcode_table(ind_table(table[i]),
6316                      PPC_CPU_INDIRECT_OPCODES_LEN);
6317                  if (tmp == 0) {
6318                      g_free(table[i]);
6319                      table[i] = &invalid_handler;
6320                  } else {
6321                      count++;
6322                  }
6323              } else {
6324                  count++;
6325              }
6326          }
6327      }
6328  
6329      return count;
6330  }
6331  
fix_opcode_tables(opc_handler_t ** ppc_opcodes)6332  static void fix_opcode_tables(opc_handler_t **ppc_opcodes)
6333  {
6334      if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) {
6335          printf("*** WARNING: no opcode defined !\n");
6336      }
6337  }
6338  
6339  /*****************************************************************************/
create_ppc_opcodes(PowerPCCPU * cpu,Error ** errp)6340  void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp)
6341  {
6342      PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu);
6343      opcode_t *opc;
6344  
6345      fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN);
6346      for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) {
6347          if (((opc->handler.type & pcc->insns_flags) != 0) ||
6348              ((opc->handler.type2 & pcc->insns_flags2) != 0)) {
6349              if (register_insn(cpu->opcodes, opc) < 0) {
6350                  error_setg(errp, "ERROR initializing PowerPC instruction "
6351                             "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2,
6352                             opc->opc3);
6353                  return;
6354              }
6355          }
6356      }
6357      fix_opcode_tables(cpu->opcodes);
6358      fflush(stdout);
6359      fflush(stderr);
6360  }
6361  
destroy_ppc_opcodes(PowerPCCPU * cpu)6362  void destroy_ppc_opcodes(PowerPCCPU *cpu)
6363  {
6364      opc_handler_t **table, **table_2;
6365      int i, j, k;
6366  
6367      for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) {
6368          if (cpu->opcodes[i] == &invalid_handler) {
6369              continue;
6370          }
6371          if (is_indirect_opcode(cpu->opcodes[i])) {
6372              table = ind_table(cpu->opcodes[i]);
6373              for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) {
6374                  if (table[j] == &invalid_handler) {
6375                      continue;
6376                  }
6377                  if (is_indirect_opcode(table[j])) {
6378                      table_2 = ind_table(table[j]);
6379                      for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) {
6380                          if (table_2[k] != &invalid_handler &&
6381                              is_indirect_opcode(table_2[k])) {
6382                              g_free((opc_handler_t *)((uintptr_t)table_2[k] &
6383                                                       ~PPC_INDIRECT));
6384                          }
6385                      }
6386                      g_free((opc_handler_t *)((uintptr_t)table[j] &
6387                                               ~PPC_INDIRECT));
6388                  }
6389              }
6390              g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] &
6391                  ~PPC_INDIRECT));
6392          }
6393      }
6394  }
6395  
ppc_fixup_cpu(PowerPCCPU * cpu)6396  int ppc_fixup_cpu(PowerPCCPU *cpu)
6397  {
6398      CPUPPCState *env = &cpu->env;
6399  
6400      /*
6401       * TCG doesn't (yet) emulate some groups of instructions that are
6402       * implemented on some otherwise supported CPUs (e.g. VSX and
6403       * decimal floating point instructions on POWER7).  We remove
6404       * unsupported instruction groups from the cpu state's instruction
6405       * masks and hope the guest can cope.  For at least the pseries
6406       * machine, the unavailability of these instructions can be
6407       * advertised to the guest via the device tree.
6408       */
6409      if ((env->insns_flags & ~PPC_TCG_INSNS)
6410          || (env->insns_flags2 & ~PPC_TCG_INSNS2)) {
6411          warn_report("Disabling some instructions which are not "
6412                      "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")",
6413                      env->insns_flags & ~PPC_TCG_INSNS,
6414                      env->insns_flags2 & ~PPC_TCG_INSNS2);
6415      }
6416      env->insns_flags &= PPC_TCG_INSNS;
6417      env->insns_flags2 &= PPC_TCG_INSNS2;
6418      return 0;
6419  }
6420  
decode_legacy(PowerPCCPU * cpu,DisasContext * ctx,uint32_t insn)6421  static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn)
6422  {
6423      opc_handler_t **table, *handler;
6424      uint32_t inval;
6425  
6426      LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n",
6427                insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6428                ctx->le_mode ? "little" : "big");
6429  
6430      table = cpu->opcodes;
6431      handler = table[opc1(insn)];
6432      if (is_indirect_opcode(handler)) {
6433          table = ind_table(handler);
6434          handler = table[opc2(insn)];
6435          if (is_indirect_opcode(handler)) {
6436              table = ind_table(handler);
6437              handler = table[opc3(insn)];
6438              if (is_indirect_opcode(handler)) {
6439                  table = ind_table(handler);
6440                  handler = table[opc4(insn)];
6441              }
6442          }
6443      }
6444  
6445      /* Is opcode *REALLY* valid ? */
6446      if (unlikely(handler->handler == &gen_invalid)) {
6447          qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: "
6448                        "%02x - %02x - %02x - %02x (%08x) "
6449                        TARGET_FMT_lx "\n",
6450                        opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6451                        insn, ctx->cia);
6452          return false;
6453      }
6454  
6455      if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE)
6456                   && Rc(insn))) {
6457          inval = handler->inval2;
6458      } else {
6459          inval = handler->inval1;
6460      }
6461  
6462      if (unlikely((insn & inval) != 0)) {
6463          qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: "
6464                        "%02x - %02x - %02x - %02x (%08x) "
6465                        TARGET_FMT_lx "\n", insn & inval,
6466                        opc1(insn), opc2(insn), opc3(insn), opc4(insn),
6467                        insn, ctx->cia);
6468          return false;
6469      }
6470  
6471      handler->handler(ctx);
6472      return true;
6473  }
6474  
ppc_tr_init_disas_context(DisasContextBase * dcbase,CPUState * cs)6475  static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs)
6476  {
6477      DisasContext *ctx = container_of(dcbase, DisasContext, base);
6478      CPUPPCState *env = cpu_env(cs);
6479      uint32_t hflags = ctx->base.tb->flags;
6480  
6481      ctx->spr_cb = env->spr_cb;
6482      ctx->pr = (hflags >> HFLAGS_PR) & 1;
6483      ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7;
6484      ctx->dr = (hflags >> HFLAGS_DR) & 1;
6485      ctx->hv = (hflags >> HFLAGS_HV) & 1;
6486      ctx->insns_flags = env->insns_flags;
6487      ctx->insns_flags2 = env->insns_flags2;
6488      ctx->access_type = -1;
6489      ctx->need_access_type = !mmu_is_64bit(env->mmu_model);
6490      ctx->le_mode = (hflags >> HFLAGS_LE) & 1;
6491      ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE;
6492      ctx->flags = env->flags;
6493  #if defined(TARGET_PPC64)
6494      ctx->excp_model = env->excp_model;
6495      ctx->sf_mode = (hflags >> HFLAGS_64) & 1;
6496      ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR);
6497      ctx->has_bhrb = !!(env->flags & POWERPC_FLAG_BHRB);
6498  #endif
6499      ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B
6500          || env->mmu_model & POWERPC_MMU_64;
6501  
6502      ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1;
6503      ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1;
6504      ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1;
6505      ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1;
6506      ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1;
6507      ctx->gtse = (hflags >> HFLAGS_GTSE) & 1;
6508      ctx->hr = (hflags >> HFLAGS_HR) & 1;
6509      ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1;
6510      ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1;
6511      ctx->mmcr0_pmcjce = (hflags >> HFLAGS_PMCJCE) & 1;
6512      ctx->pmc_other = (hflags >> HFLAGS_PMC_OTHER) & 1;
6513      ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1;
6514      ctx->bhrb_enable = (hflags >> HFLAGS_BHRB_ENABLE) & 1;
6515  
6516      ctx->singlestep_enabled = 0;
6517      if ((hflags >> HFLAGS_SE) & 1) {
6518          ctx->singlestep_enabled |= CPU_SINGLE_STEP;
6519          ctx->base.max_insns = 1;
6520      }
6521      if ((hflags >> HFLAGS_BE) & 1) {
6522          ctx->singlestep_enabled |= CPU_BRANCH_STEP;
6523      }
6524  }
6525  
ppc_tr_tb_start(DisasContextBase * db,CPUState * cs)6526  static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs)
6527  {
6528  }
6529  
ppc_tr_insn_start(DisasContextBase * dcbase,CPUState * cs)6530  static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs)
6531  {
6532      tcg_gen_insn_start(dcbase->pc_next);
6533  }
6534  
is_prefix_insn(DisasContext * ctx,uint32_t insn)6535  static bool is_prefix_insn(DisasContext *ctx, uint32_t insn)
6536  {
6537      REQUIRE_INSNS_FLAGS2(ctx, ISA310);
6538      return opc1(insn) == 1;
6539  }
6540  
ppc_tr_translate_insn(DisasContextBase * dcbase,CPUState * cs)6541  static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs)
6542  {
6543      DisasContext *ctx = container_of(dcbase, DisasContext, base);
6544      PowerPCCPU *cpu = POWERPC_CPU(cs);
6545      CPUPPCState *env = cpu_env(cs);
6546      target_ulong pc;
6547      uint32_t insn;
6548      bool ok;
6549  
6550      LOG_DISAS("----------------\n");
6551      LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n",
6552                ctx->base.pc_next, ctx->mem_idx, (int)msr_ir);
6553  
6554      ctx->cia = pc = ctx->base.pc_next;
6555      insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx));
6556      ctx->base.pc_next = pc += 4;
6557  
6558      if (!is_prefix_insn(ctx, insn)) {
6559          ctx->opcode = insn;
6560          ok = (decode_insn32(ctx, insn) ||
6561                decode_legacy(cpu, ctx, insn));
6562      } else if ((pc & 63) == 0) {
6563          /*
6564           * Power v3.1, section 1.9 Exceptions:
6565           * attempt to execute a prefixed instruction that crosses a
6566           * 64-byte address boundary (system alignment error).
6567           */
6568          gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN);
6569          ok = true;
6570      } else {
6571          uint32_t insn2 = translator_ldl_swap(env, dcbase, pc,
6572                                               need_byteswap(ctx));
6573          ctx->base.pc_next = pc += 4;
6574          ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn));
6575      }
6576      if (!ok) {
6577          gen_invalid(ctx);
6578      }
6579  
6580      /* End the TB when crossing a page boundary. */
6581      if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) {
6582          ctx->base.is_jmp = DISAS_TOO_MANY;
6583      }
6584  }
6585  
ppc_tr_tb_stop(DisasContextBase * dcbase,CPUState * cs)6586  static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs)
6587  {
6588      DisasContext *ctx = container_of(dcbase, DisasContext, base);
6589      DisasJumpType is_jmp = ctx->base.is_jmp;
6590      target_ulong nip = ctx->base.pc_next;
6591  
6592      if (is_jmp == DISAS_NORETURN) {
6593          /* We have already exited the TB. */
6594          return;
6595      }
6596  
6597      /* Honor single stepping. */
6598      if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP)) {
6599          bool rfi_type = false;
6600  
6601          switch (is_jmp) {
6602          case DISAS_TOO_MANY:
6603          case DISAS_EXIT_UPDATE:
6604          case DISAS_CHAIN_UPDATE:
6605              gen_update_nip(ctx, nip);
6606              break;
6607          case DISAS_EXIT:
6608          case DISAS_CHAIN:
6609              /*
6610               * This is a heuristic, to put it kindly. The rfi class of
6611               * instructions are among the few outside branches that change
6612               * NIP without taking an interrupt. Single step trace interrupts
6613               * do not fire on completion of these instructions.
6614               */
6615              rfi_type = true;
6616              break;
6617          default:
6618              g_assert_not_reached();
6619          }
6620  
6621          gen_debug_exception(ctx, rfi_type);
6622          return;
6623      }
6624  
6625      switch (is_jmp) {
6626      case DISAS_TOO_MANY:
6627          if (use_goto_tb(ctx, nip)) {
6628              pmu_count_insns(ctx);
6629              tcg_gen_goto_tb(0);
6630              gen_update_nip(ctx, nip);
6631              tcg_gen_exit_tb(ctx->base.tb, 0);
6632              break;
6633          }
6634          /* fall through */
6635      case DISAS_CHAIN_UPDATE:
6636          gen_update_nip(ctx, nip);
6637          /* fall through */
6638      case DISAS_CHAIN:
6639          /*
6640           * tcg_gen_lookup_and_goto_ptr will exit the TB if
6641           * CF_NO_GOTO_PTR is set. Count insns now.
6642           */
6643          if (ctx->base.tb->flags & CF_NO_GOTO_PTR) {
6644              pmu_count_insns(ctx);
6645          }
6646  
6647          tcg_gen_lookup_and_goto_ptr();
6648          break;
6649  
6650      case DISAS_EXIT_UPDATE:
6651          gen_update_nip(ctx, nip);
6652          /* fall through */
6653      case DISAS_EXIT:
6654          pmu_count_insns(ctx);
6655          tcg_gen_exit_tb(NULL, 0);
6656          break;
6657  
6658      default:
6659          g_assert_not_reached();
6660      }
6661  }
6662  
6663  static const TranslatorOps ppc_tr_ops = {
6664      .init_disas_context = ppc_tr_init_disas_context,
6665      .tb_start           = ppc_tr_tb_start,
6666      .insn_start         = ppc_tr_insn_start,
6667      .translate_insn     = ppc_tr_translate_insn,
6668      .tb_stop            = ppc_tr_tb_stop,
6669  };
6670  
gen_intermediate_code(CPUState * cs,TranslationBlock * tb,int * max_insns,vaddr pc,void * host_pc)6671  void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int *max_insns,
6672                             vaddr pc, void *host_pc)
6673  {
6674      DisasContext ctx;
6675  
6676      translator_loop(cs, tb, max_insns, pc, host_pc, &ppc_tr_ops, &ctx.base);
6677  }
6678