1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2.1 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg/tcg-op.h" 27 #include "tcg/tcg-op-gvec.h" 28 #include "qemu/host-utils.h" 29 #include "qemu/main-loop.h" 30 #include "exec/cpu_ldst.h" 31 32 #include "exec/helper-proto.h" 33 #include "exec/helper-gen.h" 34 35 #include "exec/translator.h" 36 #include "exec/log.h" 37 #include "qemu/atomic128.h" 38 #include "spr_tcg.h" 39 40 #include "qemu/qemu-print.h" 41 #include "qapi/error.h" 42 43 #define CPU_SINGLE_STEP 0x1 44 #define CPU_BRANCH_STEP 0x2 45 46 /* Include definitions for instructions classes and implementations flags */ 47 /* #define PPC_DEBUG_DISAS */ 48 49 #ifdef PPC_DEBUG_DISAS 50 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 51 #else 52 # define LOG_DISAS(...) do { } while (0) 53 #endif 54 /*****************************************************************************/ 55 /* Code translation helpers */ 56 57 /* global register indexes */ 58 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */ 59 + 10 * 4 + 22 * 5 /* SPE GPRh */ 60 + 8 * 5 /* CRF */]; 61 static TCGv cpu_gpr[32]; 62 static TCGv cpu_gprh[32]; 63 static TCGv_i32 cpu_crf[8]; 64 static TCGv cpu_nip; 65 static TCGv cpu_msr; 66 static TCGv cpu_ctr; 67 static TCGv cpu_lr; 68 #if defined(TARGET_PPC64) 69 static TCGv cpu_cfar; 70 #endif 71 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 72 static TCGv cpu_reserve; 73 static TCGv cpu_reserve_val; 74 static TCGv cpu_fpscr; 75 static TCGv_i32 cpu_access_type; 76 77 #include "exec/gen-icount.h" 78 79 void ppc_translate_init(void) 80 { 81 int i; 82 char *p; 83 size_t cpu_reg_names_size; 84 85 p = cpu_reg_names; 86 cpu_reg_names_size = sizeof(cpu_reg_names); 87 88 for (i = 0; i < 8; i++) { 89 snprintf(p, cpu_reg_names_size, "crf%d", i); 90 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 91 offsetof(CPUPPCState, crf[i]), p); 92 p += 5; 93 cpu_reg_names_size -= 5; 94 } 95 96 for (i = 0; i < 32; i++) { 97 snprintf(p, cpu_reg_names_size, "r%d", i); 98 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 99 offsetof(CPUPPCState, gpr[i]), p); 100 p += (i < 10) ? 3 : 4; 101 cpu_reg_names_size -= (i < 10) ? 3 : 4; 102 snprintf(p, cpu_reg_names_size, "r%dH", i); 103 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 104 offsetof(CPUPPCState, gprh[i]), p); 105 p += (i < 10) ? 4 : 5; 106 cpu_reg_names_size -= (i < 10) ? 4 : 5; 107 } 108 109 cpu_nip = tcg_global_mem_new(cpu_env, 110 offsetof(CPUPPCState, nip), "nip"); 111 112 cpu_msr = tcg_global_mem_new(cpu_env, 113 offsetof(CPUPPCState, msr), "msr"); 114 115 cpu_ctr = tcg_global_mem_new(cpu_env, 116 offsetof(CPUPPCState, ctr), "ctr"); 117 118 cpu_lr = tcg_global_mem_new(cpu_env, 119 offsetof(CPUPPCState, lr), "lr"); 120 121 #if defined(TARGET_PPC64) 122 cpu_cfar = tcg_global_mem_new(cpu_env, 123 offsetof(CPUPPCState, cfar), "cfar"); 124 #endif 125 126 cpu_xer = tcg_global_mem_new(cpu_env, 127 offsetof(CPUPPCState, xer), "xer"); 128 cpu_so = tcg_global_mem_new(cpu_env, 129 offsetof(CPUPPCState, so), "SO"); 130 cpu_ov = tcg_global_mem_new(cpu_env, 131 offsetof(CPUPPCState, ov), "OV"); 132 cpu_ca = tcg_global_mem_new(cpu_env, 133 offsetof(CPUPPCState, ca), "CA"); 134 cpu_ov32 = tcg_global_mem_new(cpu_env, 135 offsetof(CPUPPCState, ov32), "OV32"); 136 cpu_ca32 = tcg_global_mem_new(cpu_env, 137 offsetof(CPUPPCState, ca32), "CA32"); 138 139 cpu_reserve = tcg_global_mem_new(cpu_env, 140 offsetof(CPUPPCState, reserve_addr), 141 "reserve_addr"); 142 cpu_reserve_val = tcg_global_mem_new(cpu_env, 143 offsetof(CPUPPCState, reserve_val), 144 "reserve_val"); 145 146 cpu_fpscr = tcg_global_mem_new(cpu_env, 147 offsetof(CPUPPCState, fpscr), "fpscr"); 148 149 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 150 offsetof(CPUPPCState, access_type), 151 "access_type"); 152 } 153 154 /* internal defines */ 155 struct DisasContext { 156 DisasContextBase base; 157 target_ulong cia; /* current instruction address */ 158 uint32_t opcode; 159 /* Routine used to access memory */ 160 bool pr, hv, dr, le_mode; 161 bool lazy_tlb_flush; 162 bool need_access_type; 163 int mem_idx; 164 int access_type; 165 /* Translation flags */ 166 MemOp default_tcg_memop_mask; 167 #if defined(TARGET_PPC64) 168 bool sf_mode; 169 bool has_cfar; 170 #endif 171 bool fpu_enabled; 172 bool altivec_enabled; 173 bool vsx_enabled; 174 bool spe_enabled; 175 bool tm_enabled; 176 bool gtse; 177 bool hr; 178 bool mmcr0_pmcc0; 179 bool mmcr0_pmcc1; 180 bool pmu_insn_cnt; 181 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 182 int singlestep_enabled; 183 uint32_t flags; 184 uint64_t insns_flags; 185 uint64_t insns_flags2; 186 }; 187 188 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */ 189 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */ 190 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */ 191 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */ 192 193 /* Return true iff byteswap is needed in a scalar memop */ 194 static inline bool need_byteswap(const DisasContext *ctx) 195 { 196 #if defined(TARGET_WORDS_BIGENDIAN) 197 return ctx->le_mode; 198 #else 199 return !ctx->le_mode; 200 #endif 201 } 202 203 /* True when active word size < size of target_long. */ 204 #ifdef TARGET_PPC64 205 # define NARROW_MODE(C) (!(C)->sf_mode) 206 #else 207 # define NARROW_MODE(C) 0 208 #endif 209 210 struct opc_handler_t { 211 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 212 uint32_t inval1; 213 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 214 uint32_t inval2; 215 /* instruction type */ 216 uint64_t type; 217 /* extended instruction type */ 218 uint64_t type2; 219 /* handler */ 220 void (*handler)(DisasContext *ctx); 221 }; 222 223 /* SPR load/store helpers */ 224 static inline void gen_load_spr(TCGv t, int reg) 225 { 226 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 227 } 228 229 static inline void gen_store_spr(int reg, TCGv t) 230 { 231 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 232 } 233 234 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 235 { 236 if (ctx->need_access_type && ctx->access_type != access_type) { 237 tcg_gen_movi_i32(cpu_access_type, access_type); 238 ctx->access_type = access_type; 239 } 240 } 241 242 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 243 { 244 if (NARROW_MODE(ctx)) { 245 nip = (uint32_t)nip; 246 } 247 tcg_gen_movi_tl(cpu_nip, nip); 248 } 249 250 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 251 { 252 TCGv_i32 t0, t1; 253 254 /* 255 * These are all synchronous exceptions, we set the PC back to the 256 * faulting instruction 257 */ 258 gen_update_nip(ctx, ctx->cia); 259 t0 = tcg_const_i32(excp); 260 t1 = tcg_const_i32(error); 261 gen_helper_raise_exception_err(cpu_env, t0, t1); 262 tcg_temp_free_i32(t0); 263 tcg_temp_free_i32(t1); 264 ctx->base.is_jmp = DISAS_NORETURN; 265 } 266 267 static void gen_exception(DisasContext *ctx, uint32_t excp) 268 { 269 TCGv_i32 t0; 270 271 /* 272 * These are all synchronous exceptions, we set the PC back to the 273 * faulting instruction 274 */ 275 gen_update_nip(ctx, ctx->cia); 276 t0 = tcg_const_i32(excp); 277 gen_helper_raise_exception(cpu_env, t0); 278 tcg_temp_free_i32(t0); 279 ctx->base.is_jmp = DISAS_NORETURN; 280 } 281 282 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 283 target_ulong nip) 284 { 285 TCGv_i32 t0; 286 287 gen_update_nip(ctx, nip); 288 t0 = tcg_const_i32(excp); 289 gen_helper_raise_exception(cpu_env, t0); 290 tcg_temp_free_i32(t0); 291 ctx->base.is_jmp = DISAS_NORETURN; 292 } 293 294 static void gen_icount_io_start(DisasContext *ctx) 295 { 296 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 297 gen_io_start(); 298 /* 299 * An I/O instruction must be last in the TB. 300 * Chain to the next TB, and let the code from gen_tb_start 301 * decide if we need to return to the main loop. 302 * Doing this first also allows this value to be overridden. 303 */ 304 ctx->base.is_jmp = DISAS_TOO_MANY; 305 } 306 } 307 308 /* 309 * Tells the caller what is the appropriate exception to generate and prepares 310 * SPR registers for this exception. 311 * 312 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or 313 * POWERPC_EXCP_DEBUG (on BookE). 314 */ 315 static uint32_t gen_prep_dbgex(DisasContext *ctx) 316 { 317 if (ctx->flags & POWERPC_FLAG_DE) { 318 target_ulong dbsr = 0; 319 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) { 320 dbsr = DBCR0_ICMP; 321 } else { 322 /* Must have been branch */ 323 dbsr = DBCR0_BRT; 324 } 325 TCGv t0 = tcg_temp_new(); 326 gen_load_spr(t0, SPR_BOOKE_DBSR); 327 tcg_gen_ori_tl(t0, t0, dbsr); 328 gen_store_spr(SPR_BOOKE_DBSR, t0); 329 tcg_temp_free(t0); 330 return POWERPC_EXCP_DEBUG; 331 } else { 332 return POWERPC_EXCP_TRACE; 333 } 334 } 335 336 static void gen_debug_exception(DisasContext *ctx) 337 { 338 gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx))); 339 ctx->base.is_jmp = DISAS_NORETURN; 340 } 341 342 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 343 { 344 /* Will be converted to program check if needed */ 345 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 346 } 347 348 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 349 { 350 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 351 } 352 353 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 354 { 355 /* Will be converted to program check if needed */ 356 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 357 } 358 359 /*****************************************************************************/ 360 /* SPR READ/WRITE CALLBACKS */ 361 362 void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 363 { 364 #if 0 365 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 366 printf("ERROR: try to access SPR %d !\n", sprn); 367 #endif 368 } 369 370 /* #define PPC_DUMP_SPR_ACCESSES */ 371 372 /* 373 * Generic callbacks: 374 * do nothing but store/retrieve spr value 375 */ 376 static void spr_load_dump_spr(int sprn) 377 { 378 #ifdef PPC_DUMP_SPR_ACCESSES 379 TCGv_i32 t0 = tcg_const_i32(sprn); 380 gen_helper_load_dump_spr(cpu_env, t0); 381 tcg_temp_free_i32(t0); 382 #endif 383 } 384 385 void spr_read_generic(DisasContext *ctx, int gprn, int sprn) 386 { 387 gen_load_spr(cpu_gpr[gprn], sprn); 388 spr_load_dump_spr(sprn); 389 } 390 391 static void spr_store_dump_spr(int sprn) 392 { 393 #ifdef PPC_DUMP_SPR_ACCESSES 394 TCGv_i32 t0 = tcg_const_i32(sprn); 395 gen_helper_store_dump_spr(cpu_env, t0); 396 tcg_temp_free_i32(t0); 397 #endif 398 } 399 400 void spr_write_generic(DisasContext *ctx, int sprn, int gprn) 401 { 402 gen_store_spr(sprn, cpu_gpr[gprn]); 403 spr_store_dump_spr(sprn); 404 } 405 406 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn) 407 { 408 spr_write_generic(ctx, sprn, gprn); 409 410 /* 411 * SPR_CTRL writes must force a new translation block, 412 * allowing the PMU to calculate the run latch events with 413 * more accuracy. 414 */ 415 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 416 } 417 418 #if !defined(CONFIG_USER_ONLY) 419 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn) 420 { 421 #ifdef TARGET_PPC64 422 TCGv t0 = tcg_temp_new(); 423 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]); 424 gen_store_spr(sprn, t0); 425 tcg_temp_free(t0); 426 spr_store_dump_spr(sprn); 427 #else 428 spr_write_generic(ctx, sprn, gprn); 429 #endif 430 } 431 432 void spr_write_clear(DisasContext *ctx, int sprn, int gprn) 433 { 434 TCGv t0 = tcg_temp_new(); 435 TCGv t1 = tcg_temp_new(); 436 gen_load_spr(t0, sprn); 437 tcg_gen_neg_tl(t1, cpu_gpr[gprn]); 438 tcg_gen_and_tl(t0, t0, t1); 439 gen_store_spr(sprn, t0); 440 tcg_temp_free(t0); 441 tcg_temp_free(t1); 442 } 443 444 void spr_access_nop(DisasContext *ctx, int sprn, int gprn) 445 { 446 } 447 448 #endif 449 450 /* SPR common to all PowerPC */ 451 /* XER */ 452 void spr_read_xer(DisasContext *ctx, int gprn, int sprn) 453 { 454 TCGv dst = cpu_gpr[gprn]; 455 TCGv t0 = tcg_temp_new(); 456 TCGv t1 = tcg_temp_new(); 457 TCGv t2 = tcg_temp_new(); 458 tcg_gen_mov_tl(dst, cpu_xer); 459 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 460 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 461 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 462 tcg_gen_or_tl(t0, t0, t1); 463 tcg_gen_or_tl(dst, dst, t2); 464 tcg_gen_or_tl(dst, dst, t0); 465 if (is_isa300(ctx)) { 466 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 467 tcg_gen_or_tl(dst, dst, t0); 468 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 469 tcg_gen_or_tl(dst, dst, t0); 470 } 471 tcg_temp_free(t0); 472 tcg_temp_free(t1); 473 tcg_temp_free(t2); 474 } 475 476 void spr_write_xer(DisasContext *ctx, int sprn, int gprn) 477 { 478 TCGv src = cpu_gpr[gprn]; 479 /* Write all flags, while reading back check for isa300 */ 480 tcg_gen_andi_tl(cpu_xer, src, 481 ~((1u << XER_SO) | 482 (1u << XER_OV) | (1u << XER_OV32) | 483 (1u << XER_CA) | (1u << XER_CA32))); 484 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 485 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 486 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 487 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 488 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 489 } 490 491 /* LR */ 492 void spr_read_lr(DisasContext *ctx, int gprn, int sprn) 493 { 494 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr); 495 } 496 497 void spr_write_lr(DisasContext *ctx, int sprn, int gprn) 498 { 499 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]); 500 } 501 502 /* CFAR */ 503 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 504 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn) 505 { 506 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar); 507 } 508 509 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn) 510 { 511 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]); 512 } 513 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */ 514 515 /* CTR */ 516 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn) 517 { 518 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr); 519 } 520 521 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn) 522 { 523 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]); 524 } 525 526 /* User read access to SPR */ 527 /* USPRx */ 528 /* UMMCRx */ 529 /* UPMCx */ 530 /* USIA */ 531 /* UDECR */ 532 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn) 533 { 534 gen_load_spr(cpu_gpr[gprn], sprn + 0x10); 535 } 536 537 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 538 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn) 539 { 540 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]); 541 } 542 #endif 543 544 /* SPR common to all non-embedded PowerPC */ 545 /* DECR */ 546 #if !defined(CONFIG_USER_ONLY) 547 void spr_read_decr(DisasContext *ctx, int gprn, int sprn) 548 { 549 gen_icount_io_start(ctx); 550 gen_helper_load_decr(cpu_gpr[gprn], cpu_env); 551 } 552 553 void spr_write_decr(DisasContext *ctx, int sprn, int gprn) 554 { 555 gen_icount_io_start(ctx); 556 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]); 557 } 558 #endif 559 560 /* SPR common to all non-embedded PowerPC, except 601 */ 561 /* Time base */ 562 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn) 563 { 564 gen_icount_io_start(ctx); 565 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env); 566 } 567 568 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn) 569 { 570 gen_icount_io_start(ctx); 571 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env); 572 } 573 574 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn) 575 { 576 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env); 577 } 578 579 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn) 580 { 581 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env); 582 } 583 584 #if !defined(CONFIG_USER_ONLY) 585 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn) 586 { 587 gen_icount_io_start(ctx); 588 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]); 589 } 590 591 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn) 592 { 593 gen_icount_io_start(ctx); 594 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]); 595 } 596 597 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn) 598 { 599 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]); 600 } 601 602 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn) 603 { 604 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]); 605 } 606 607 #if defined(TARGET_PPC64) 608 void spr_read_purr(DisasContext *ctx, int gprn, int sprn) 609 { 610 gen_icount_io_start(ctx); 611 gen_helper_load_purr(cpu_gpr[gprn], cpu_env); 612 } 613 614 void spr_write_purr(DisasContext *ctx, int sprn, int gprn) 615 { 616 gen_icount_io_start(ctx); 617 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]); 618 } 619 620 /* HDECR */ 621 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn) 622 { 623 gen_icount_io_start(ctx); 624 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env); 625 } 626 627 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn) 628 { 629 gen_icount_io_start(ctx); 630 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]); 631 } 632 633 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn) 634 { 635 gen_icount_io_start(ctx); 636 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env); 637 } 638 639 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn) 640 { 641 gen_icount_io_start(ctx); 642 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]); 643 } 644 645 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn) 646 { 647 gen_icount_io_start(ctx); 648 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]); 649 } 650 651 #endif 652 #endif 653 654 #if !defined(CONFIG_USER_ONLY) 655 /* IBAT0U...IBAT0U */ 656 /* IBAT0L...IBAT7L */ 657 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn) 658 { 659 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 660 offsetof(CPUPPCState, 661 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 662 } 663 664 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn) 665 { 666 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 667 offsetof(CPUPPCState, 668 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4])); 669 } 670 671 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn) 672 { 673 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 674 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 675 tcg_temp_free_i32(t0); 676 } 677 678 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn) 679 { 680 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4); 681 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 682 tcg_temp_free_i32(t0); 683 } 684 685 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn) 686 { 687 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2); 688 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 689 tcg_temp_free_i32(t0); 690 } 691 692 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn) 693 { 694 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4); 695 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 696 tcg_temp_free_i32(t0); 697 } 698 699 /* DBAT0U...DBAT7U */ 700 /* DBAT0L...DBAT7L */ 701 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn) 702 { 703 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 704 offsetof(CPUPPCState, 705 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2])); 706 } 707 708 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn) 709 { 710 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 711 offsetof(CPUPPCState, 712 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4])); 713 } 714 715 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn) 716 { 717 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2); 718 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 719 tcg_temp_free_i32(t0); 720 } 721 722 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn) 723 { 724 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4); 725 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 726 tcg_temp_free_i32(t0); 727 } 728 729 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn) 730 { 731 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2); 732 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 733 tcg_temp_free_i32(t0); 734 } 735 736 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn) 737 { 738 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4); 739 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 740 tcg_temp_free_i32(t0); 741 } 742 743 /* SDR1 */ 744 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn) 745 { 746 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]); 747 } 748 749 #if defined(TARGET_PPC64) 750 /* 64 bits PowerPC specific SPRs */ 751 /* PIDR */ 752 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn) 753 { 754 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]); 755 } 756 757 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn) 758 { 759 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]); 760 } 761 762 void spr_read_hior(DisasContext *ctx, int gprn, int sprn) 763 { 764 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix)); 765 } 766 767 void spr_write_hior(DisasContext *ctx, int sprn, int gprn) 768 { 769 TCGv t0 = tcg_temp_new(); 770 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL); 771 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 772 tcg_temp_free(t0); 773 } 774 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn) 775 { 776 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]); 777 } 778 779 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn) 780 { 781 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]); 782 } 783 784 /* DPDES */ 785 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn) 786 { 787 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env); 788 } 789 790 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn) 791 { 792 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]); 793 } 794 #endif 795 #endif 796 797 /* PowerPC 601 specific registers */ 798 /* RTC */ 799 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn) 800 { 801 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env); 802 } 803 804 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn) 805 { 806 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env); 807 } 808 809 #if !defined(CONFIG_USER_ONLY) 810 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn) 811 { 812 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]); 813 } 814 815 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn) 816 { 817 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]); 818 } 819 820 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn) 821 { 822 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]); 823 /* Must stop the translation as endianness may have changed */ 824 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 825 } 826 #endif 827 828 /* Unified bats */ 829 #if !defined(CONFIG_USER_ONLY) 830 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn) 831 { 832 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 833 offsetof(CPUPPCState, 834 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 835 } 836 837 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn) 838 { 839 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 840 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]); 841 tcg_temp_free_i32(t0); 842 } 843 844 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn) 845 { 846 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 847 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]); 848 tcg_temp_free_i32(t0); 849 } 850 #endif 851 852 /* PowerPC 40x specific registers */ 853 #if !defined(CONFIG_USER_ONLY) 854 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn) 855 { 856 gen_icount_io_start(ctx); 857 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env); 858 } 859 860 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn) 861 { 862 gen_icount_io_start(ctx); 863 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]); 864 } 865 866 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn) 867 { 868 gen_icount_io_start(ctx); 869 gen_store_spr(sprn, cpu_gpr[gprn]); 870 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]); 871 /* We must stop translation as we may have rebooted */ 872 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 873 } 874 875 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn) 876 { 877 gen_icount_io_start(ctx); 878 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]); 879 } 880 881 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn) 882 { 883 gen_icount_io_start(ctx); 884 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]); 885 } 886 887 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn) 888 { 889 gen_icount_io_start(ctx); 890 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]); 891 } 892 #endif 893 894 /* PowerPC 403 specific registers */ 895 /* PBL1 / PBU1 / PBL2 / PBU2 */ 896 #if !defined(CONFIG_USER_ONLY) 897 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn) 898 { 899 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 900 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1])); 901 } 902 903 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn) 904 { 905 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1); 906 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]); 907 tcg_temp_free_i32(t0); 908 } 909 910 void spr_write_pir(DisasContext *ctx, int sprn, int gprn) 911 { 912 TCGv t0 = tcg_temp_new(); 913 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF); 914 gen_store_spr(SPR_PIR, t0); 915 tcg_temp_free(t0); 916 } 917 #endif 918 919 /* SPE specific registers */ 920 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn) 921 { 922 TCGv_i32 t0 = tcg_temp_new_i32(); 923 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 924 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0); 925 tcg_temp_free_i32(t0); 926 } 927 928 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn) 929 { 930 TCGv_i32 t0 = tcg_temp_new_i32(); 931 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]); 932 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 933 tcg_temp_free_i32(t0); 934 } 935 936 #if !defined(CONFIG_USER_ONLY) 937 /* Callback used to write the exception vector base */ 938 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn) 939 { 940 TCGv t0 = tcg_temp_new(); 941 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask)); 942 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 943 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 944 gen_store_spr(sprn, t0); 945 tcg_temp_free(t0); 946 } 947 948 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn) 949 { 950 int sprn_offs; 951 952 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) { 953 sprn_offs = sprn - SPR_BOOKE_IVOR0; 954 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) { 955 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32; 956 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) { 957 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38; 958 } else { 959 printf("Trying to write an unknown exception vector %d %03x\n", 960 sprn, sprn); 961 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 962 return; 963 } 964 965 TCGv t0 = tcg_temp_new(); 966 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask)); 967 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 968 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs])); 969 gen_store_spr(sprn, t0); 970 tcg_temp_free(t0); 971 } 972 #endif 973 974 #ifdef TARGET_PPC64 975 #ifndef CONFIG_USER_ONLY 976 void spr_write_amr(DisasContext *ctx, int sprn, int gprn) 977 { 978 TCGv t0 = tcg_temp_new(); 979 TCGv t1 = tcg_temp_new(); 980 TCGv t2 = tcg_temp_new(); 981 982 /* 983 * Note, the HV=1 PR=0 case is handled earlier by simply using 984 * spr_write_generic for HV mode in the SPR table 985 */ 986 987 /* Build insertion mask into t1 based on context */ 988 if (ctx->pr) { 989 gen_load_spr(t1, SPR_UAMOR); 990 } else { 991 gen_load_spr(t1, SPR_AMOR); 992 } 993 994 /* Mask new bits into t2 */ 995 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 996 997 /* Load AMR and clear new bits in t0 */ 998 gen_load_spr(t0, SPR_AMR); 999 tcg_gen_andc_tl(t0, t0, t1); 1000 1001 /* Or'in new bits and write it out */ 1002 tcg_gen_or_tl(t0, t0, t2); 1003 gen_store_spr(SPR_AMR, t0); 1004 spr_store_dump_spr(SPR_AMR); 1005 1006 tcg_temp_free(t0); 1007 tcg_temp_free(t1); 1008 tcg_temp_free(t2); 1009 } 1010 1011 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn) 1012 { 1013 TCGv t0 = tcg_temp_new(); 1014 TCGv t1 = tcg_temp_new(); 1015 TCGv t2 = tcg_temp_new(); 1016 1017 /* 1018 * Note, the HV=1 case is handled earlier by simply using 1019 * spr_write_generic for HV mode in the SPR table 1020 */ 1021 1022 /* Build insertion mask into t1 based on context */ 1023 gen_load_spr(t1, SPR_AMOR); 1024 1025 /* Mask new bits into t2 */ 1026 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1027 1028 /* Load AMR and clear new bits in t0 */ 1029 gen_load_spr(t0, SPR_UAMOR); 1030 tcg_gen_andc_tl(t0, t0, t1); 1031 1032 /* Or'in new bits and write it out */ 1033 tcg_gen_or_tl(t0, t0, t2); 1034 gen_store_spr(SPR_UAMOR, t0); 1035 spr_store_dump_spr(SPR_UAMOR); 1036 1037 tcg_temp_free(t0); 1038 tcg_temp_free(t1); 1039 tcg_temp_free(t2); 1040 } 1041 1042 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn) 1043 { 1044 TCGv t0 = tcg_temp_new(); 1045 TCGv t1 = tcg_temp_new(); 1046 TCGv t2 = tcg_temp_new(); 1047 1048 /* 1049 * Note, the HV=1 case is handled earlier by simply using 1050 * spr_write_generic for HV mode in the SPR table 1051 */ 1052 1053 /* Build insertion mask into t1 based on context */ 1054 gen_load_spr(t1, SPR_AMOR); 1055 1056 /* Mask new bits into t2 */ 1057 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1058 1059 /* Load AMR and clear new bits in t0 */ 1060 gen_load_spr(t0, SPR_IAMR); 1061 tcg_gen_andc_tl(t0, t0, t1); 1062 1063 /* Or'in new bits and write it out */ 1064 tcg_gen_or_tl(t0, t0, t2); 1065 gen_store_spr(SPR_IAMR, t0); 1066 spr_store_dump_spr(SPR_IAMR); 1067 1068 tcg_temp_free(t0); 1069 tcg_temp_free(t1); 1070 tcg_temp_free(t2); 1071 } 1072 #endif 1073 #endif 1074 1075 #ifndef CONFIG_USER_ONLY 1076 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn) 1077 { 1078 gen_helper_fixup_thrm(cpu_env); 1079 gen_load_spr(cpu_gpr[gprn], sprn); 1080 spr_load_dump_spr(sprn); 1081 } 1082 #endif /* !CONFIG_USER_ONLY */ 1083 1084 #if !defined(CONFIG_USER_ONLY) 1085 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn) 1086 { 1087 TCGv t0 = tcg_temp_new(); 1088 1089 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE); 1090 gen_store_spr(sprn, t0); 1091 tcg_temp_free(t0); 1092 } 1093 1094 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn) 1095 { 1096 TCGv t0 = tcg_temp_new(); 1097 1098 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE); 1099 gen_store_spr(sprn, t0); 1100 tcg_temp_free(t0); 1101 } 1102 1103 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn) 1104 { 1105 TCGv t0 = tcg_temp_new(); 1106 1107 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 1108 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC)); 1109 gen_store_spr(sprn, t0); 1110 tcg_temp_free(t0); 1111 } 1112 1113 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn) 1114 { 1115 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]); 1116 } 1117 1118 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn) 1119 { 1120 TCGv_i32 t0 = tcg_const_i32(sprn); 1121 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]); 1122 tcg_temp_free_i32(t0); 1123 } 1124 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn) 1125 { 1126 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]); 1127 } 1128 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn) 1129 { 1130 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]); 1131 } 1132 1133 #endif 1134 1135 #if !defined(CONFIG_USER_ONLY) 1136 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn) 1137 { 1138 TCGv val = tcg_temp_new(); 1139 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]); 1140 gen_store_spr(SPR_BOOKE_MAS3, val); 1141 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32); 1142 gen_store_spr(SPR_BOOKE_MAS7, val); 1143 tcg_temp_free(val); 1144 } 1145 1146 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn) 1147 { 1148 TCGv mas7 = tcg_temp_new(); 1149 TCGv mas3 = tcg_temp_new(); 1150 gen_load_spr(mas7, SPR_BOOKE_MAS7); 1151 tcg_gen_shli_tl(mas7, mas7, 32); 1152 gen_load_spr(mas3, SPR_BOOKE_MAS3); 1153 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7); 1154 tcg_temp_free(mas3); 1155 tcg_temp_free(mas7); 1156 } 1157 1158 #endif 1159 1160 #ifdef TARGET_PPC64 1161 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn, 1162 int bit, int sprn, int cause) 1163 { 1164 TCGv_i32 t1 = tcg_const_i32(bit); 1165 TCGv_i32 t2 = tcg_const_i32(sprn); 1166 TCGv_i32 t3 = tcg_const_i32(cause); 1167 1168 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3); 1169 1170 tcg_temp_free_i32(t3); 1171 tcg_temp_free_i32(t2); 1172 tcg_temp_free_i32(t1); 1173 } 1174 1175 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn, 1176 int bit, int sprn, int cause) 1177 { 1178 TCGv_i32 t1 = tcg_const_i32(bit); 1179 TCGv_i32 t2 = tcg_const_i32(sprn); 1180 TCGv_i32 t3 = tcg_const_i32(cause); 1181 1182 gen_helper_msr_facility_check(cpu_env, t1, t2, t3); 1183 1184 tcg_temp_free_i32(t3); 1185 tcg_temp_free_i32(t2); 1186 tcg_temp_free_i32(t1); 1187 } 1188 1189 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn) 1190 { 1191 TCGv spr_up = tcg_temp_new(); 1192 TCGv spr = tcg_temp_new(); 1193 1194 gen_load_spr(spr, sprn - 1); 1195 tcg_gen_shri_tl(spr_up, spr, 32); 1196 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up); 1197 1198 tcg_temp_free(spr); 1199 tcg_temp_free(spr_up); 1200 } 1201 1202 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn) 1203 { 1204 TCGv spr = tcg_temp_new(); 1205 1206 gen_load_spr(spr, sprn - 1); 1207 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32); 1208 gen_store_spr(sprn - 1, spr); 1209 1210 tcg_temp_free(spr); 1211 } 1212 1213 #if !defined(CONFIG_USER_ONLY) 1214 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn) 1215 { 1216 TCGv hmer = tcg_temp_new(); 1217 1218 gen_load_spr(hmer, sprn); 1219 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer); 1220 gen_store_spr(sprn, hmer); 1221 spr_store_dump_spr(sprn); 1222 tcg_temp_free(hmer); 1223 } 1224 1225 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn) 1226 { 1227 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]); 1228 } 1229 #endif /* !defined(CONFIG_USER_ONLY) */ 1230 1231 void spr_read_tar(DisasContext *ctx, int gprn, int sprn) 1232 { 1233 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1234 spr_read_generic(ctx, gprn, sprn); 1235 } 1236 1237 void spr_write_tar(DisasContext *ctx, int sprn, int gprn) 1238 { 1239 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1240 spr_write_generic(ctx, sprn, gprn); 1241 } 1242 1243 void spr_read_tm(DisasContext *ctx, int gprn, int sprn) 1244 { 1245 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1246 spr_read_generic(ctx, gprn, sprn); 1247 } 1248 1249 void spr_write_tm(DisasContext *ctx, int sprn, int gprn) 1250 { 1251 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1252 spr_write_generic(ctx, sprn, gprn); 1253 } 1254 1255 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn) 1256 { 1257 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1258 spr_read_prev_upper32(ctx, gprn, sprn); 1259 } 1260 1261 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn) 1262 { 1263 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1264 spr_write_prev_upper32(ctx, sprn, gprn); 1265 } 1266 1267 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn) 1268 { 1269 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1270 spr_read_generic(ctx, gprn, sprn); 1271 } 1272 1273 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn) 1274 { 1275 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1276 spr_write_generic(ctx, sprn, gprn); 1277 } 1278 1279 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn) 1280 { 1281 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1282 spr_read_prev_upper32(ctx, gprn, sprn); 1283 } 1284 1285 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn) 1286 { 1287 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1288 spr_write_prev_upper32(ctx, sprn, gprn); 1289 } 1290 #endif 1291 1292 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 1293 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 1294 1295 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 1296 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 1297 1298 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 1299 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 1300 1301 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 1302 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 1303 1304 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 1305 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 1306 1307 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 1308 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 1309 1310 typedef struct opcode_t { 1311 unsigned char opc1, opc2, opc3, opc4; 1312 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 1313 unsigned char pad[4]; 1314 #endif 1315 opc_handler_t handler; 1316 const char *oname; 1317 } opcode_t; 1318 1319 /* Helpers for priv. check */ 1320 #define GEN_PRIV \ 1321 do { \ 1322 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 1323 } while (0) 1324 1325 #if defined(CONFIG_USER_ONLY) 1326 #define CHK_HV GEN_PRIV 1327 #define CHK_SV GEN_PRIV 1328 #define CHK_HVRM GEN_PRIV 1329 #else 1330 #define CHK_HV \ 1331 do { \ 1332 if (unlikely(ctx->pr || !ctx->hv)) { \ 1333 GEN_PRIV; \ 1334 } \ 1335 } while (0) 1336 #define CHK_SV \ 1337 do { \ 1338 if (unlikely(ctx->pr)) { \ 1339 GEN_PRIV; \ 1340 } \ 1341 } while (0) 1342 #define CHK_HVRM \ 1343 do { \ 1344 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 1345 GEN_PRIV; \ 1346 } \ 1347 } while (0) 1348 #endif 1349 1350 #define CHK_NONE 1351 1352 /*****************************************************************************/ 1353 /* PowerPC instructions table */ 1354 1355 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 1356 { \ 1357 .opc1 = op1, \ 1358 .opc2 = op2, \ 1359 .opc3 = op3, \ 1360 .opc4 = 0xff, \ 1361 .handler = { \ 1362 .inval1 = invl, \ 1363 .type = _typ, \ 1364 .type2 = _typ2, \ 1365 .handler = &gen_##name, \ 1366 }, \ 1367 .oname = stringify(name), \ 1368 } 1369 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 1370 { \ 1371 .opc1 = op1, \ 1372 .opc2 = op2, \ 1373 .opc3 = op3, \ 1374 .opc4 = 0xff, \ 1375 .handler = { \ 1376 .inval1 = invl1, \ 1377 .inval2 = invl2, \ 1378 .type = _typ, \ 1379 .type2 = _typ2, \ 1380 .handler = &gen_##name, \ 1381 }, \ 1382 .oname = stringify(name), \ 1383 } 1384 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 1385 { \ 1386 .opc1 = op1, \ 1387 .opc2 = op2, \ 1388 .opc3 = op3, \ 1389 .opc4 = 0xff, \ 1390 .handler = { \ 1391 .inval1 = invl, \ 1392 .type = _typ, \ 1393 .type2 = _typ2, \ 1394 .handler = &gen_##name, \ 1395 }, \ 1396 .oname = onam, \ 1397 } 1398 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 1399 { \ 1400 .opc1 = op1, \ 1401 .opc2 = op2, \ 1402 .opc3 = op3, \ 1403 .opc4 = op4, \ 1404 .handler = { \ 1405 .inval1 = invl, \ 1406 .type = _typ, \ 1407 .type2 = _typ2, \ 1408 .handler = &gen_##name, \ 1409 }, \ 1410 .oname = stringify(name), \ 1411 } 1412 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 1413 { \ 1414 .opc1 = op1, \ 1415 .opc2 = op2, \ 1416 .opc3 = op3, \ 1417 .opc4 = op4, \ 1418 .handler = { \ 1419 .inval1 = invl, \ 1420 .type = _typ, \ 1421 .type2 = _typ2, \ 1422 .handler = &gen_##name, \ 1423 }, \ 1424 .oname = onam, \ 1425 } 1426 1427 /* Invalid instruction */ 1428 static void gen_invalid(DisasContext *ctx) 1429 { 1430 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 1431 } 1432 1433 static opc_handler_t invalid_handler = { 1434 .inval1 = 0xFFFFFFFF, 1435 .inval2 = 0xFFFFFFFF, 1436 .type = PPC_NONE, 1437 .type2 = PPC_NONE, 1438 .handler = gen_invalid, 1439 }; 1440 1441 /*** Integer comparison ***/ 1442 1443 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 1444 { 1445 TCGv t0 = tcg_temp_new(); 1446 TCGv t1 = tcg_temp_new(); 1447 TCGv_i32 t = tcg_temp_new_i32(); 1448 1449 tcg_gen_movi_tl(t0, CRF_EQ); 1450 tcg_gen_movi_tl(t1, CRF_LT); 1451 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), 1452 t0, arg0, arg1, t1, t0); 1453 tcg_gen_movi_tl(t1, CRF_GT); 1454 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), 1455 t0, arg0, arg1, t1, t0); 1456 1457 tcg_gen_trunc_tl_i32(t, t0); 1458 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 1459 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 1460 1461 tcg_temp_free(t0); 1462 tcg_temp_free(t1); 1463 tcg_temp_free_i32(t); 1464 } 1465 1466 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 1467 { 1468 TCGv t0 = tcg_const_tl(arg1); 1469 gen_op_cmp(arg0, t0, s, crf); 1470 tcg_temp_free(t0); 1471 } 1472 1473 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 1474 { 1475 TCGv t0, t1; 1476 t0 = tcg_temp_new(); 1477 t1 = tcg_temp_new(); 1478 if (s) { 1479 tcg_gen_ext32s_tl(t0, arg0); 1480 tcg_gen_ext32s_tl(t1, arg1); 1481 } else { 1482 tcg_gen_ext32u_tl(t0, arg0); 1483 tcg_gen_ext32u_tl(t1, arg1); 1484 } 1485 gen_op_cmp(t0, t1, s, crf); 1486 tcg_temp_free(t1); 1487 tcg_temp_free(t0); 1488 } 1489 1490 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 1491 { 1492 TCGv t0 = tcg_const_tl(arg1); 1493 gen_op_cmp32(arg0, t0, s, crf); 1494 tcg_temp_free(t0); 1495 } 1496 1497 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 1498 { 1499 if (NARROW_MODE(ctx)) { 1500 gen_op_cmpi32(reg, 0, 1, 0); 1501 } else { 1502 gen_op_cmpi(reg, 0, 1, 0); 1503 } 1504 } 1505 1506 /* cmprb - range comparison: isupper, isaplha, islower*/ 1507 static void gen_cmprb(DisasContext *ctx) 1508 { 1509 TCGv_i32 src1 = tcg_temp_new_i32(); 1510 TCGv_i32 src2 = tcg_temp_new_i32(); 1511 TCGv_i32 src2lo = tcg_temp_new_i32(); 1512 TCGv_i32 src2hi = tcg_temp_new_i32(); 1513 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 1514 1515 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 1516 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 1517 1518 tcg_gen_andi_i32(src1, src1, 0xFF); 1519 tcg_gen_ext8u_i32(src2lo, src2); 1520 tcg_gen_shri_i32(src2, src2, 8); 1521 tcg_gen_ext8u_i32(src2hi, src2); 1522 1523 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1524 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1525 tcg_gen_and_i32(crf, src2lo, src2hi); 1526 1527 if (ctx->opcode & 0x00200000) { 1528 tcg_gen_shri_i32(src2, src2, 8); 1529 tcg_gen_ext8u_i32(src2lo, src2); 1530 tcg_gen_shri_i32(src2, src2, 8); 1531 tcg_gen_ext8u_i32(src2hi, src2); 1532 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1533 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1534 tcg_gen_and_i32(src2lo, src2lo, src2hi); 1535 tcg_gen_or_i32(crf, crf, src2lo); 1536 } 1537 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 1538 tcg_temp_free_i32(src1); 1539 tcg_temp_free_i32(src2); 1540 tcg_temp_free_i32(src2lo); 1541 tcg_temp_free_i32(src2hi); 1542 } 1543 1544 #if defined(TARGET_PPC64) 1545 /* cmpeqb */ 1546 static void gen_cmpeqb(DisasContext *ctx) 1547 { 1548 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1549 cpu_gpr[rB(ctx->opcode)]); 1550 } 1551 #endif 1552 1553 /* isel (PowerPC 2.03 specification) */ 1554 static void gen_isel(DisasContext *ctx) 1555 { 1556 uint32_t bi = rC(ctx->opcode); 1557 uint32_t mask = 0x08 >> (bi & 0x03); 1558 TCGv t0 = tcg_temp_new(); 1559 TCGv zr; 1560 1561 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 1562 tcg_gen_andi_tl(t0, t0, mask); 1563 1564 zr = tcg_const_tl(0); 1565 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 1566 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 1567 cpu_gpr[rB(ctx->opcode)]); 1568 tcg_temp_free(zr); 1569 tcg_temp_free(t0); 1570 } 1571 1572 /* cmpb: PowerPC 2.05 specification */ 1573 static void gen_cmpb(DisasContext *ctx) 1574 { 1575 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 1576 cpu_gpr[rB(ctx->opcode)]); 1577 } 1578 1579 /*** Integer arithmetic ***/ 1580 1581 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 1582 TCGv arg1, TCGv arg2, int sub) 1583 { 1584 TCGv t0 = tcg_temp_new(); 1585 1586 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 1587 tcg_gen_xor_tl(t0, arg1, arg2); 1588 if (sub) { 1589 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 1590 } else { 1591 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 1592 } 1593 tcg_temp_free(t0); 1594 if (NARROW_MODE(ctx)) { 1595 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 1596 if (is_isa300(ctx)) { 1597 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1598 } 1599 } else { 1600 if (is_isa300(ctx)) { 1601 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 1602 } 1603 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 1604 } 1605 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1606 } 1607 1608 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 1609 TCGv res, TCGv arg0, TCGv arg1, 1610 TCGv ca32, int sub) 1611 { 1612 TCGv t0; 1613 1614 if (!is_isa300(ctx)) { 1615 return; 1616 } 1617 1618 t0 = tcg_temp_new(); 1619 if (sub) { 1620 tcg_gen_eqv_tl(t0, arg0, arg1); 1621 } else { 1622 tcg_gen_xor_tl(t0, arg0, arg1); 1623 } 1624 tcg_gen_xor_tl(t0, t0, res); 1625 tcg_gen_extract_tl(ca32, t0, 32, 1); 1626 tcg_temp_free(t0); 1627 } 1628 1629 /* Common add function */ 1630 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 1631 TCGv arg2, TCGv ca, TCGv ca32, 1632 bool add_ca, bool compute_ca, 1633 bool compute_ov, bool compute_rc0) 1634 { 1635 TCGv t0 = ret; 1636 1637 if (compute_ca || compute_ov) { 1638 t0 = tcg_temp_new(); 1639 } 1640 1641 if (compute_ca) { 1642 if (NARROW_MODE(ctx)) { 1643 /* 1644 * Caution: a non-obvious corner case of the spec is that 1645 * we must produce the *entire* 64-bit addition, but 1646 * produce the carry into bit 32. 1647 */ 1648 TCGv t1 = tcg_temp_new(); 1649 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 1650 tcg_gen_add_tl(t0, arg1, arg2); 1651 if (add_ca) { 1652 tcg_gen_add_tl(t0, t0, ca); 1653 } 1654 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 1655 tcg_temp_free(t1); 1656 tcg_gen_extract_tl(ca, ca, 32, 1); 1657 if (is_isa300(ctx)) { 1658 tcg_gen_mov_tl(ca32, ca); 1659 } 1660 } else { 1661 TCGv zero = tcg_const_tl(0); 1662 if (add_ca) { 1663 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 1664 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 1665 } else { 1666 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 1667 } 1668 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 1669 tcg_temp_free(zero); 1670 } 1671 } else { 1672 tcg_gen_add_tl(t0, arg1, arg2); 1673 if (add_ca) { 1674 tcg_gen_add_tl(t0, t0, ca); 1675 } 1676 } 1677 1678 if (compute_ov) { 1679 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 1680 } 1681 if (unlikely(compute_rc0)) { 1682 gen_set_Rc0(ctx, t0); 1683 } 1684 1685 if (t0 != ret) { 1686 tcg_gen_mov_tl(ret, t0); 1687 tcg_temp_free(t0); 1688 } 1689 } 1690 /* Add functions with two operands */ 1691 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 1692 static void glue(gen_, name)(DisasContext *ctx) \ 1693 { \ 1694 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1695 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1696 ca, glue(ca, 32), \ 1697 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1698 } 1699 /* Add functions with one operand and one immediate */ 1700 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 1701 add_ca, compute_ca, compute_ov) \ 1702 static void glue(gen_, name)(DisasContext *ctx) \ 1703 { \ 1704 TCGv t0 = tcg_const_tl(const_val); \ 1705 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1706 cpu_gpr[rA(ctx->opcode)], t0, \ 1707 ca, glue(ca, 32), \ 1708 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1709 tcg_temp_free(t0); \ 1710 } 1711 1712 /* add add. addo addo. */ 1713 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 1714 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 1715 /* addc addc. addco addco. */ 1716 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 1717 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 1718 /* adde adde. addeo addeo. */ 1719 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 1720 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 1721 /* addme addme. addmeo addmeo. */ 1722 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 1723 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 1724 /* addex */ 1725 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 1726 /* addze addze. addzeo addzeo.*/ 1727 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 1728 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 1729 /* addic addic.*/ 1730 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 1731 { 1732 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1733 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1734 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 1735 tcg_temp_free(c); 1736 } 1737 1738 static void gen_addic(DisasContext *ctx) 1739 { 1740 gen_op_addic(ctx, 0); 1741 } 1742 1743 static void gen_addic_(DisasContext *ctx) 1744 { 1745 gen_op_addic(ctx, 1); 1746 } 1747 1748 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1749 TCGv arg2, int sign, int compute_ov) 1750 { 1751 TCGv_i32 t0 = tcg_temp_new_i32(); 1752 TCGv_i32 t1 = tcg_temp_new_i32(); 1753 TCGv_i32 t2 = tcg_temp_new_i32(); 1754 TCGv_i32 t3 = tcg_temp_new_i32(); 1755 1756 tcg_gen_trunc_tl_i32(t0, arg1); 1757 tcg_gen_trunc_tl_i32(t1, arg2); 1758 if (sign) { 1759 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1760 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1761 tcg_gen_and_i32(t2, t2, t3); 1762 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1763 tcg_gen_or_i32(t2, t2, t3); 1764 tcg_gen_movi_i32(t3, 0); 1765 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1766 tcg_gen_div_i32(t3, t0, t1); 1767 tcg_gen_extu_i32_tl(ret, t3); 1768 } else { 1769 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1770 tcg_gen_movi_i32(t3, 0); 1771 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1772 tcg_gen_divu_i32(t3, t0, t1); 1773 tcg_gen_extu_i32_tl(ret, t3); 1774 } 1775 if (compute_ov) { 1776 tcg_gen_extu_i32_tl(cpu_ov, t2); 1777 if (is_isa300(ctx)) { 1778 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1779 } 1780 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1781 } 1782 tcg_temp_free_i32(t0); 1783 tcg_temp_free_i32(t1); 1784 tcg_temp_free_i32(t2); 1785 tcg_temp_free_i32(t3); 1786 1787 if (unlikely(Rc(ctx->opcode) != 0)) { 1788 gen_set_Rc0(ctx, ret); 1789 } 1790 } 1791 /* Div functions */ 1792 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1793 static void glue(gen_, name)(DisasContext *ctx) \ 1794 { \ 1795 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1796 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1797 sign, compute_ov); \ 1798 } 1799 /* divwu divwu. divwuo divwuo. */ 1800 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1801 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1802 /* divw divw. divwo divwo. */ 1803 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1804 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1805 1806 /* div[wd]eu[o][.] */ 1807 #define GEN_DIVE(name, hlpr, compute_ov) \ 1808 static void gen_##name(DisasContext *ctx) \ 1809 { \ 1810 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1811 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1812 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1813 tcg_temp_free_i32(t0); \ 1814 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1815 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1816 } \ 1817 } 1818 1819 GEN_DIVE(divweu, divweu, 0); 1820 GEN_DIVE(divweuo, divweu, 1); 1821 GEN_DIVE(divwe, divwe, 0); 1822 GEN_DIVE(divweo, divwe, 1); 1823 1824 #if defined(TARGET_PPC64) 1825 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1826 TCGv arg2, int sign, int compute_ov) 1827 { 1828 TCGv_i64 t0 = tcg_temp_new_i64(); 1829 TCGv_i64 t1 = tcg_temp_new_i64(); 1830 TCGv_i64 t2 = tcg_temp_new_i64(); 1831 TCGv_i64 t3 = tcg_temp_new_i64(); 1832 1833 tcg_gen_mov_i64(t0, arg1); 1834 tcg_gen_mov_i64(t1, arg2); 1835 if (sign) { 1836 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1837 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1838 tcg_gen_and_i64(t2, t2, t3); 1839 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1840 tcg_gen_or_i64(t2, t2, t3); 1841 tcg_gen_movi_i64(t3, 0); 1842 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1843 tcg_gen_div_i64(ret, t0, t1); 1844 } else { 1845 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1846 tcg_gen_movi_i64(t3, 0); 1847 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1848 tcg_gen_divu_i64(ret, t0, t1); 1849 } 1850 if (compute_ov) { 1851 tcg_gen_mov_tl(cpu_ov, t2); 1852 if (is_isa300(ctx)) { 1853 tcg_gen_mov_tl(cpu_ov32, t2); 1854 } 1855 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1856 } 1857 tcg_temp_free_i64(t0); 1858 tcg_temp_free_i64(t1); 1859 tcg_temp_free_i64(t2); 1860 tcg_temp_free_i64(t3); 1861 1862 if (unlikely(Rc(ctx->opcode) != 0)) { 1863 gen_set_Rc0(ctx, ret); 1864 } 1865 } 1866 1867 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1868 static void glue(gen_, name)(DisasContext *ctx) \ 1869 { \ 1870 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1871 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1872 sign, compute_ov); \ 1873 } 1874 /* divdu divdu. divduo divduo. */ 1875 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1876 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1877 /* divd divd. divdo divdo. */ 1878 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1879 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1880 1881 GEN_DIVE(divdeu, divdeu, 0); 1882 GEN_DIVE(divdeuo, divdeu, 1); 1883 GEN_DIVE(divde, divde, 0); 1884 GEN_DIVE(divdeo, divde, 1); 1885 #endif 1886 1887 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1888 TCGv arg2, int sign) 1889 { 1890 TCGv_i32 t0 = tcg_temp_new_i32(); 1891 TCGv_i32 t1 = tcg_temp_new_i32(); 1892 1893 tcg_gen_trunc_tl_i32(t0, arg1); 1894 tcg_gen_trunc_tl_i32(t1, arg2); 1895 if (sign) { 1896 TCGv_i32 t2 = tcg_temp_new_i32(); 1897 TCGv_i32 t3 = tcg_temp_new_i32(); 1898 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1899 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1900 tcg_gen_and_i32(t2, t2, t3); 1901 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1902 tcg_gen_or_i32(t2, t2, t3); 1903 tcg_gen_movi_i32(t3, 0); 1904 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1905 tcg_gen_rem_i32(t3, t0, t1); 1906 tcg_gen_ext_i32_tl(ret, t3); 1907 tcg_temp_free_i32(t2); 1908 tcg_temp_free_i32(t3); 1909 } else { 1910 TCGv_i32 t2 = tcg_const_i32(1); 1911 TCGv_i32 t3 = tcg_const_i32(0); 1912 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1913 tcg_gen_remu_i32(t3, t0, t1); 1914 tcg_gen_extu_i32_tl(ret, t3); 1915 tcg_temp_free_i32(t2); 1916 tcg_temp_free_i32(t3); 1917 } 1918 tcg_temp_free_i32(t0); 1919 tcg_temp_free_i32(t1); 1920 } 1921 1922 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1923 static void glue(gen_, name)(DisasContext *ctx) \ 1924 { \ 1925 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1926 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1927 sign); \ 1928 } 1929 1930 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1931 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1932 1933 #if defined(TARGET_PPC64) 1934 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1935 TCGv arg2, int sign) 1936 { 1937 TCGv_i64 t0 = tcg_temp_new_i64(); 1938 TCGv_i64 t1 = tcg_temp_new_i64(); 1939 1940 tcg_gen_mov_i64(t0, arg1); 1941 tcg_gen_mov_i64(t1, arg2); 1942 if (sign) { 1943 TCGv_i64 t2 = tcg_temp_new_i64(); 1944 TCGv_i64 t3 = tcg_temp_new_i64(); 1945 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1946 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1947 tcg_gen_and_i64(t2, t2, t3); 1948 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1949 tcg_gen_or_i64(t2, t2, t3); 1950 tcg_gen_movi_i64(t3, 0); 1951 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1952 tcg_gen_rem_i64(ret, t0, t1); 1953 tcg_temp_free_i64(t2); 1954 tcg_temp_free_i64(t3); 1955 } else { 1956 TCGv_i64 t2 = tcg_const_i64(1); 1957 TCGv_i64 t3 = tcg_const_i64(0); 1958 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1959 tcg_gen_remu_i64(ret, t0, t1); 1960 tcg_temp_free_i64(t2); 1961 tcg_temp_free_i64(t3); 1962 } 1963 tcg_temp_free_i64(t0); 1964 tcg_temp_free_i64(t1); 1965 } 1966 1967 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1968 static void glue(gen_, name)(DisasContext *ctx) \ 1969 { \ 1970 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1971 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1972 sign); \ 1973 } 1974 1975 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1976 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1977 #endif 1978 1979 /* mulhw mulhw. */ 1980 static void gen_mulhw(DisasContext *ctx) 1981 { 1982 TCGv_i32 t0 = tcg_temp_new_i32(); 1983 TCGv_i32 t1 = tcg_temp_new_i32(); 1984 1985 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1986 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1987 tcg_gen_muls2_i32(t0, t1, t0, t1); 1988 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1989 tcg_temp_free_i32(t0); 1990 tcg_temp_free_i32(t1); 1991 if (unlikely(Rc(ctx->opcode) != 0)) { 1992 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1993 } 1994 } 1995 1996 /* mulhwu mulhwu. */ 1997 static void gen_mulhwu(DisasContext *ctx) 1998 { 1999 TCGv_i32 t0 = tcg_temp_new_i32(); 2000 TCGv_i32 t1 = tcg_temp_new_i32(); 2001 2002 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2003 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2004 tcg_gen_mulu2_i32(t0, t1, t0, t1); 2005 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 2006 tcg_temp_free_i32(t0); 2007 tcg_temp_free_i32(t1); 2008 if (unlikely(Rc(ctx->opcode) != 0)) { 2009 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2010 } 2011 } 2012 2013 /* mullw mullw. */ 2014 static void gen_mullw(DisasContext *ctx) 2015 { 2016 #if defined(TARGET_PPC64) 2017 TCGv_i64 t0, t1; 2018 t0 = tcg_temp_new_i64(); 2019 t1 = tcg_temp_new_i64(); 2020 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 2021 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 2022 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2023 tcg_temp_free(t0); 2024 tcg_temp_free(t1); 2025 #else 2026 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2027 cpu_gpr[rB(ctx->opcode)]); 2028 #endif 2029 if (unlikely(Rc(ctx->opcode) != 0)) { 2030 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2031 } 2032 } 2033 2034 /* mullwo mullwo. */ 2035 static void gen_mullwo(DisasContext *ctx) 2036 { 2037 TCGv_i32 t0 = tcg_temp_new_i32(); 2038 TCGv_i32 t1 = tcg_temp_new_i32(); 2039 2040 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2041 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2042 tcg_gen_muls2_i32(t0, t1, t0, t1); 2043 #if defined(TARGET_PPC64) 2044 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2045 #else 2046 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 2047 #endif 2048 2049 tcg_gen_sari_i32(t0, t0, 31); 2050 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 2051 tcg_gen_extu_i32_tl(cpu_ov, t0); 2052 if (is_isa300(ctx)) { 2053 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2054 } 2055 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2056 2057 tcg_temp_free_i32(t0); 2058 tcg_temp_free_i32(t1); 2059 if (unlikely(Rc(ctx->opcode) != 0)) { 2060 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2061 } 2062 } 2063 2064 /* mulli */ 2065 static void gen_mulli(DisasContext *ctx) 2066 { 2067 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2068 SIMM(ctx->opcode)); 2069 } 2070 2071 #if defined(TARGET_PPC64) 2072 /* mulhd mulhd. */ 2073 static void gen_mulhd(DisasContext *ctx) 2074 { 2075 TCGv lo = tcg_temp_new(); 2076 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2077 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2078 tcg_temp_free(lo); 2079 if (unlikely(Rc(ctx->opcode) != 0)) { 2080 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2081 } 2082 } 2083 2084 /* mulhdu mulhdu. */ 2085 static void gen_mulhdu(DisasContext *ctx) 2086 { 2087 TCGv lo = tcg_temp_new(); 2088 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2089 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2090 tcg_temp_free(lo); 2091 if (unlikely(Rc(ctx->opcode) != 0)) { 2092 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2093 } 2094 } 2095 2096 /* mulld mulld. */ 2097 static void gen_mulld(DisasContext *ctx) 2098 { 2099 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2100 cpu_gpr[rB(ctx->opcode)]); 2101 if (unlikely(Rc(ctx->opcode) != 0)) { 2102 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2103 } 2104 } 2105 2106 /* mulldo mulldo. */ 2107 static void gen_mulldo(DisasContext *ctx) 2108 { 2109 TCGv_i64 t0 = tcg_temp_new_i64(); 2110 TCGv_i64 t1 = tcg_temp_new_i64(); 2111 2112 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 2113 cpu_gpr[rB(ctx->opcode)]); 2114 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 2115 2116 tcg_gen_sari_i64(t0, t0, 63); 2117 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 2118 if (is_isa300(ctx)) { 2119 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2120 } 2121 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2122 2123 tcg_temp_free_i64(t0); 2124 tcg_temp_free_i64(t1); 2125 2126 if (unlikely(Rc(ctx->opcode) != 0)) { 2127 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2128 } 2129 } 2130 #endif 2131 2132 /* Common subf function */ 2133 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 2134 TCGv arg2, bool add_ca, bool compute_ca, 2135 bool compute_ov, bool compute_rc0) 2136 { 2137 TCGv t0 = ret; 2138 2139 if (compute_ca || compute_ov) { 2140 t0 = tcg_temp_new(); 2141 } 2142 2143 if (compute_ca) { 2144 /* dest = ~arg1 + arg2 [+ ca]. */ 2145 if (NARROW_MODE(ctx)) { 2146 /* 2147 * Caution: a non-obvious corner case of the spec is that 2148 * we must produce the *entire* 64-bit addition, but 2149 * produce the carry into bit 32. 2150 */ 2151 TCGv inv1 = tcg_temp_new(); 2152 TCGv t1 = tcg_temp_new(); 2153 tcg_gen_not_tl(inv1, arg1); 2154 if (add_ca) { 2155 tcg_gen_add_tl(t0, arg2, cpu_ca); 2156 } else { 2157 tcg_gen_addi_tl(t0, arg2, 1); 2158 } 2159 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 2160 tcg_gen_add_tl(t0, t0, inv1); 2161 tcg_temp_free(inv1); 2162 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 2163 tcg_temp_free(t1); 2164 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 2165 if (is_isa300(ctx)) { 2166 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2167 } 2168 } else if (add_ca) { 2169 TCGv zero, inv1 = tcg_temp_new(); 2170 tcg_gen_not_tl(inv1, arg1); 2171 zero = tcg_const_tl(0); 2172 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 2173 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 2174 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 2175 tcg_temp_free(zero); 2176 tcg_temp_free(inv1); 2177 } else { 2178 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 2179 tcg_gen_sub_tl(t0, arg2, arg1); 2180 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 2181 } 2182 } else if (add_ca) { 2183 /* 2184 * Since we're ignoring carry-out, we can simplify the 2185 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. 2186 */ 2187 tcg_gen_sub_tl(t0, arg2, arg1); 2188 tcg_gen_add_tl(t0, t0, cpu_ca); 2189 tcg_gen_subi_tl(t0, t0, 1); 2190 } else { 2191 tcg_gen_sub_tl(t0, arg2, arg1); 2192 } 2193 2194 if (compute_ov) { 2195 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 2196 } 2197 if (unlikely(compute_rc0)) { 2198 gen_set_Rc0(ctx, t0); 2199 } 2200 2201 if (t0 != ret) { 2202 tcg_gen_mov_tl(ret, t0); 2203 tcg_temp_free(t0); 2204 } 2205 } 2206 /* Sub functions with Two operands functions */ 2207 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 2208 static void glue(gen_, name)(DisasContext *ctx) \ 2209 { \ 2210 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2211 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 2212 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2213 } 2214 /* Sub functions with one operand and one immediate */ 2215 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 2216 add_ca, compute_ca, compute_ov) \ 2217 static void glue(gen_, name)(DisasContext *ctx) \ 2218 { \ 2219 TCGv t0 = tcg_const_tl(const_val); \ 2220 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2221 cpu_gpr[rA(ctx->opcode)], t0, \ 2222 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2223 tcg_temp_free(t0); \ 2224 } 2225 /* subf subf. subfo subfo. */ 2226 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 2227 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 2228 /* subfc subfc. subfco subfco. */ 2229 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 2230 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 2231 /* subfe subfe. subfeo subfo. */ 2232 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 2233 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 2234 /* subfme subfme. subfmeo subfmeo. */ 2235 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 2236 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 2237 /* subfze subfze. subfzeo subfzeo.*/ 2238 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 2239 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 2240 2241 /* subfic */ 2242 static void gen_subfic(DisasContext *ctx) 2243 { 2244 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 2245 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2246 c, 0, 1, 0, 0); 2247 tcg_temp_free(c); 2248 } 2249 2250 /* neg neg. nego nego. */ 2251 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 2252 { 2253 TCGv zero = tcg_const_tl(0); 2254 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2255 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 2256 tcg_temp_free(zero); 2257 } 2258 2259 static void gen_neg(DisasContext *ctx) 2260 { 2261 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2262 if (unlikely(Rc(ctx->opcode))) { 2263 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2264 } 2265 } 2266 2267 static void gen_nego(DisasContext *ctx) 2268 { 2269 gen_op_arith_neg(ctx, 1); 2270 } 2271 2272 /*** Integer logical ***/ 2273 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 2274 static void glue(gen_, name)(DisasContext *ctx) \ 2275 { \ 2276 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 2277 cpu_gpr[rB(ctx->opcode)]); \ 2278 if (unlikely(Rc(ctx->opcode) != 0)) \ 2279 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2280 } 2281 2282 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 2283 static void glue(gen_, name)(DisasContext *ctx) \ 2284 { \ 2285 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 2286 if (unlikely(Rc(ctx->opcode) != 0)) \ 2287 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2288 } 2289 2290 /* and & and. */ 2291 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 2292 /* andc & andc. */ 2293 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 2294 2295 /* andi. */ 2296 static void gen_andi_(DisasContext *ctx) 2297 { 2298 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2299 UIMM(ctx->opcode)); 2300 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2301 } 2302 2303 /* andis. */ 2304 static void gen_andis_(DisasContext *ctx) 2305 { 2306 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2307 UIMM(ctx->opcode) << 16); 2308 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2309 } 2310 2311 /* cntlzw */ 2312 static void gen_cntlzw(DisasContext *ctx) 2313 { 2314 TCGv_i32 t = tcg_temp_new_i32(); 2315 2316 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2317 tcg_gen_clzi_i32(t, t, 32); 2318 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2319 tcg_temp_free_i32(t); 2320 2321 if (unlikely(Rc(ctx->opcode) != 0)) { 2322 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2323 } 2324 } 2325 2326 /* cnttzw */ 2327 static void gen_cnttzw(DisasContext *ctx) 2328 { 2329 TCGv_i32 t = tcg_temp_new_i32(); 2330 2331 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2332 tcg_gen_ctzi_i32(t, t, 32); 2333 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2334 tcg_temp_free_i32(t); 2335 2336 if (unlikely(Rc(ctx->opcode) != 0)) { 2337 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2338 } 2339 } 2340 2341 /* eqv & eqv. */ 2342 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 2343 /* extsb & extsb. */ 2344 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 2345 /* extsh & extsh. */ 2346 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 2347 /* nand & nand. */ 2348 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 2349 /* nor & nor. */ 2350 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 2351 2352 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 2353 static void gen_pause(DisasContext *ctx) 2354 { 2355 TCGv_i32 t0 = tcg_const_i32(0); 2356 tcg_gen_st_i32(t0, cpu_env, 2357 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 2358 tcg_temp_free_i32(t0); 2359 2360 /* Stop translation, this gives other CPUs a chance to run */ 2361 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 2362 } 2363 #endif /* defined(TARGET_PPC64) */ 2364 2365 /* or & or. */ 2366 static void gen_or(DisasContext *ctx) 2367 { 2368 int rs, ra, rb; 2369 2370 rs = rS(ctx->opcode); 2371 ra = rA(ctx->opcode); 2372 rb = rB(ctx->opcode); 2373 /* Optimisation for mr. ri case */ 2374 if (rs != ra || rs != rb) { 2375 if (rs != rb) { 2376 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 2377 } else { 2378 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 2379 } 2380 if (unlikely(Rc(ctx->opcode) != 0)) { 2381 gen_set_Rc0(ctx, cpu_gpr[ra]); 2382 } 2383 } else if (unlikely(Rc(ctx->opcode) != 0)) { 2384 gen_set_Rc0(ctx, cpu_gpr[rs]); 2385 #if defined(TARGET_PPC64) 2386 } else if (rs != 0) { /* 0 is nop */ 2387 int prio = 0; 2388 2389 switch (rs) { 2390 case 1: 2391 /* Set process priority to low */ 2392 prio = 2; 2393 break; 2394 case 6: 2395 /* Set process priority to medium-low */ 2396 prio = 3; 2397 break; 2398 case 2: 2399 /* Set process priority to normal */ 2400 prio = 4; 2401 break; 2402 #if !defined(CONFIG_USER_ONLY) 2403 case 31: 2404 if (!ctx->pr) { 2405 /* Set process priority to very low */ 2406 prio = 1; 2407 } 2408 break; 2409 case 5: 2410 if (!ctx->pr) { 2411 /* Set process priority to medium-hight */ 2412 prio = 5; 2413 } 2414 break; 2415 case 3: 2416 if (!ctx->pr) { 2417 /* Set process priority to high */ 2418 prio = 6; 2419 } 2420 break; 2421 case 7: 2422 if (ctx->hv && !ctx->pr) { 2423 /* Set process priority to very high */ 2424 prio = 7; 2425 } 2426 break; 2427 #endif 2428 default: 2429 break; 2430 } 2431 if (prio) { 2432 TCGv t0 = tcg_temp_new(); 2433 gen_load_spr(t0, SPR_PPR); 2434 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 2435 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 2436 gen_store_spr(SPR_PPR, t0); 2437 tcg_temp_free(t0); 2438 } 2439 #if !defined(CONFIG_USER_ONLY) 2440 /* 2441 * Pause out of TCG otherwise spin loops with smt_low eat too 2442 * much CPU and the kernel hangs. This applies to all 2443 * encodings other than no-op, e.g., miso(rs=26), yield(27), 2444 * mdoio(29), mdoom(30), and all currently undefined. 2445 */ 2446 gen_pause(ctx); 2447 #endif 2448 #endif 2449 } 2450 } 2451 /* orc & orc. */ 2452 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 2453 2454 /* xor & xor. */ 2455 static void gen_xor(DisasContext *ctx) 2456 { 2457 /* Optimisation for "set to zero" case */ 2458 if (rS(ctx->opcode) != rB(ctx->opcode)) { 2459 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2460 cpu_gpr[rB(ctx->opcode)]); 2461 } else { 2462 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 2463 } 2464 if (unlikely(Rc(ctx->opcode) != 0)) { 2465 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2466 } 2467 } 2468 2469 /* ori */ 2470 static void gen_ori(DisasContext *ctx) 2471 { 2472 target_ulong uimm = UIMM(ctx->opcode); 2473 2474 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2475 return; 2476 } 2477 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2478 } 2479 2480 /* oris */ 2481 static void gen_oris(DisasContext *ctx) 2482 { 2483 target_ulong uimm = UIMM(ctx->opcode); 2484 2485 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2486 /* NOP */ 2487 return; 2488 } 2489 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2490 uimm << 16); 2491 } 2492 2493 /* xori */ 2494 static void gen_xori(DisasContext *ctx) 2495 { 2496 target_ulong uimm = UIMM(ctx->opcode); 2497 2498 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2499 /* NOP */ 2500 return; 2501 } 2502 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2503 } 2504 2505 /* xoris */ 2506 static void gen_xoris(DisasContext *ctx) 2507 { 2508 target_ulong uimm = UIMM(ctx->opcode); 2509 2510 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2511 /* NOP */ 2512 return; 2513 } 2514 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2515 uimm << 16); 2516 } 2517 2518 /* popcntb : PowerPC 2.03 specification */ 2519 static void gen_popcntb(DisasContext *ctx) 2520 { 2521 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2522 } 2523 2524 static void gen_popcntw(DisasContext *ctx) 2525 { 2526 #if defined(TARGET_PPC64) 2527 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2528 #else 2529 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2530 #endif 2531 } 2532 2533 #if defined(TARGET_PPC64) 2534 /* popcntd: PowerPC 2.06 specification */ 2535 static void gen_popcntd(DisasContext *ctx) 2536 { 2537 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2538 } 2539 #endif 2540 2541 /* prtyw: PowerPC 2.05 specification */ 2542 static void gen_prtyw(DisasContext *ctx) 2543 { 2544 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2545 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2546 TCGv t0 = tcg_temp_new(); 2547 tcg_gen_shri_tl(t0, rs, 16); 2548 tcg_gen_xor_tl(ra, rs, t0); 2549 tcg_gen_shri_tl(t0, ra, 8); 2550 tcg_gen_xor_tl(ra, ra, t0); 2551 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 2552 tcg_temp_free(t0); 2553 } 2554 2555 #if defined(TARGET_PPC64) 2556 /* prtyd: PowerPC 2.05 specification */ 2557 static void gen_prtyd(DisasContext *ctx) 2558 { 2559 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2560 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2561 TCGv t0 = tcg_temp_new(); 2562 tcg_gen_shri_tl(t0, rs, 32); 2563 tcg_gen_xor_tl(ra, rs, t0); 2564 tcg_gen_shri_tl(t0, ra, 16); 2565 tcg_gen_xor_tl(ra, ra, t0); 2566 tcg_gen_shri_tl(t0, ra, 8); 2567 tcg_gen_xor_tl(ra, ra, t0); 2568 tcg_gen_andi_tl(ra, ra, 1); 2569 tcg_temp_free(t0); 2570 } 2571 #endif 2572 2573 #if defined(TARGET_PPC64) 2574 /* bpermd */ 2575 static void gen_bpermd(DisasContext *ctx) 2576 { 2577 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 2578 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2579 } 2580 #endif 2581 2582 #if defined(TARGET_PPC64) 2583 /* extsw & extsw. */ 2584 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 2585 2586 /* cntlzd */ 2587 static void gen_cntlzd(DisasContext *ctx) 2588 { 2589 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2590 if (unlikely(Rc(ctx->opcode) != 0)) { 2591 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2592 } 2593 } 2594 2595 /* cnttzd */ 2596 static void gen_cnttzd(DisasContext *ctx) 2597 { 2598 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2599 if (unlikely(Rc(ctx->opcode) != 0)) { 2600 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2601 } 2602 } 2603 2604 /* darn */ 2605 static void gen_darn(DisasContext *ctx) 2606 { 2607 int l = L(ctx->opcode); 2608 2609 if (l > 2) { 2610 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 2611 } else { 2612 gen_icount_io_start(ctx); 2613 if (l == 0) { 2614 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 2615 } else { 2616 /* Return 64-bit random for both CRN and RRN */ 2617 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 2618 } 2619 } 2620 } 2621 #endif 2622 2623 /*** Integer rotate ***/ 2624 2625 /* rlwimi & rlwimi. */ 2626 static void gen_rlwimi(DisasContext *ctx) 2627 { 2628 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2629 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2630 uint32_t sh = SH(ctx->opcode); 2631 uint32_t mb = MB(ctx->opcode); 2632 uint32_t me = ME(ctx->opcode); 2633 2634 if (sh == (31 - me) && mb <= me) { 2635 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2636 } else { 2637 target_ulong mask; 2638 bool mask_in_32b = true; 2639 TCGv t1; 2640 2641 #if defined(TARGET_PPC64) 2642 mb += 32; 2643 me += 32; 2644 #endif 2645 mask = MASK(mb, me); 2646 2647 #if defined(TARGET_PPC64) 2648 if (mask > 0xffffffffu) { 2649 mask_in_32b = false; 2650 } 2651 #endif 2652 t1 = tcg_temp_new(); 2653 if (mask_in_32b) { 2654 TCGv_i32 t0 = tcg_temp_new_i32(); 2655 tcg_gen_trunc_tl_i32(t0, t_rs); 2656 tcg_gen_rotli_i32(t0, t0, sh); 2657 tcg_gen_extu_i32_tl(t1, t0); 2658 tcg_temp_free_i32(t0); 2659 } else { 2660 #if defined(TARGET_PPC64) 2661 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 2662 tcg_gen_rotli_i64(t1, t1, sh); 2663 #else 2664 g_assert_not_reached(); 2665 #endif 2666 } 2667 2668 tcg_gen_andi_tl(t1, t1, mask); 2669 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2670 tcg_gen_or_tl(t_ra, t_ra, t1); 2671 tcg_temp_free(t1); 2672 } 2673 if (unlikely(Rc(ctx->opcode) != 0)) { 2674 gen_set_Rc0(ctx, t_ra); 2675 } 2676 } 2677 2678 /* rlwinm & rlwinm. */ 2679 static void gen_rlwinm(DisasContext *ctx) 2680 { 2681 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2682 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2683 int sh = SH(ctx->opcode); 2684 int mb = MB(ctx->opcode); 2685 int me = ME(ctx->opcode); 2686 int len = me - mb + 1; 2687 int rsh = (32 - sh) & 31; 2688 2689 if (sh != 0 && len > 0 && me == (31 - sh)) { 2690 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2691 } else if (me == 31 && rsh + len <= 32) { 2692 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2693 } else { 2694 target_ulong mask; 2695 bool mask_in_32b = true; 2696 #if defined(TARGET_PPC64) 2697 mb += 32; 2698 me += 32; 2699 #endif 2700 mask = MASK(mb, me); 2701 #if defined(TARGET_PPC64) 2702 if (mask > 0xffffffffu) { 2703 mask_in_32b = false; 2704 } 2705 #endif 2706 if (mask_in_32b) { 2707 if (sh == 0) { 2708 tcg_gen_andi_tl(t_ra, t_rs, mask); 2709 } else { 2710 TCGv_i32 t0 = tcg_temp_new_i32(); 2711 tcg_gen_trunc_tl_i32(t0, t_rs); 2712 tcg_gen_rotli_i32(t0, t0, sh); 2713 tcg_gen_andi_i32(t0, t0, mask); 2714 tcg_gen_extu_i32_tl(t_ra, t0); 2715 tcg_temp_free_i32(t0); 2716 } 2717 } else { 2718 #if defined(TARGET_PPC64) 2719 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2720 tcg_gen_rotli_i64(t_ra, t_ra, sh); 2721 tcg_gen_andi_i64(t_ra, t_ra, mask); 2722 #else 2723 g_assert_not_reached(); 2724 #endif 2725 } 2726 } 2727 if (unlikely(Rc(ctx->opcode) != 0)) { 2728 gen_set_Rc0(ctx, t_ra); 2729 } 2730 } 2731 2732 /* rlwnm & rlwnm. */ 2733 static void gen_rlwnm(DisasContext *ctx) 2734 { 2735 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2736 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2737 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2738 uint32_t mb = MB(ctx->opcode); 2739 uint32_t me = ME(ctx->opcode); 2740 target_ulong mask; 2741 bool mask_in_32b = true; 2742 2743 #if defined(TARGET_PPC64) 2744 mb += 32; 2745 me += 32; 2746 #endif 2747 mask = MASK(mb, me); 2748 2749 #if defined(TARGET_PPC64) 2750 if (mask > 0xffffffffu) { 2751 mask_in_32b = false; 2752 } 2753 #endif 2754 if (mask_in_32b) { 2755 TCGv_i32 t0 = tcg_temp_new_i32(); 2756 TCGv_i32 t1 = tcg_temp_new_i32(); 2757 tcg_gen_trunc_tl_i32(t0, t_rb); 2758 tcg_gen_trunc_tl_i32(t1, t_rs); 2759 tcg_gen_andi_i32(t0, t0, 0x1f); 2760 tcg_gen_rotl_i32(t1, t1, t0); 2761 tcg_gen_extu_i32_tl(t_ra, t1); 2762 tcg_temp_free_i32(t0); 2763 tcg_temp_free_i32(t1); 2764 } else { 2765 #if defined(TARGET_PPC64) 2766 TCGv_i64 t0 = tcg_temp_new_i64(); 2767 tcg_gen_andi_i64(t0, t_rb, 0x1f); 2768 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2769 tcg_gen_rotl_i64(t_ra, t_ra, t0); 2770 tcg_temp_free_i64(t0); 2771 #else 2772 g_assert_not_reached(); 2773 #endif 2774 } 2775 2776 tcg_gen_andi_tl(t_ra, t_ra, mask); 2777 2778 if (unlikely(Rc(ctx->opcode) != 0)) { 2779 gen_set_Rc0(ctx, t_ra); 2780 } 2781 } 2782 2783 #if defined(TARGET_PPC64) 2784 #define GEN_PPC64_R2(name, opc1, opc2) \ 2785 static void glue(gen_, name##0)(DisasContext *ctx) \ 2786 { \ 2787 gen_##name(ctx, 0); \ 2788 } \ 2789 \ 2790 static void glue(gen_, name##1)(DisasContext *ctx) \ 2791 { \ 2792 gen_##name(ctx, 1); \ 2793 } 2794 #define GEN_PPC64_R4(name, opc1, opc2) \ 2795 static void glue(gen_, name##0)(DisasContext *ctx) \ 2796 { \ 2797 gen_##name(ctx, 0, 0); \ 2798 } \ 2799 \ 2800 static void glue(gen_, name##1)(DisasContext *ctx) \ 2801 { \ 2802 gen_##name(ctx, 0, 1); \ 2803 } \ 2804 \ 2805 static void glue(gen_, name##2)(DisasContext *ctx) \ 2806 { \ 2807 gen_##name(ctx, 1, 0); \ 2808 } \ 2809 \ 2810 static void glue(gen_, name##3)(DisasContext *ctx) \ 2811 { \ 2812 gen_##name(ctx, 1, 1); \ 2813 } 2814 2815 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2816 { 2817 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2818 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2819 int len = me - mb + 1; 2820 int rsh = (64 - sh) & 63; 2821 2822 if (sh != 0 && len > 0 && me == (63 - sh)) { 2823 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2824 } else if (me == 63 && rsh + len <= 64) { 2825 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2826 } else { 2827 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2828 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2829 } 2830 if (unlikely(Rc(ctx->opcode) != 0)) { 2831 gen_set_Rc0(ctx, t_ra); 2832 } 2833 } 2834 2835 /* rldicl - rldicl. */ 2836 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2837 { 2838 uint32_t sh, mb; 2839 2840 sh = SH(ctx->opcode) | (shn << 5); 2841 mb = MB(ctx->opcode) | (mbn << 5); 2842 gen_rldinm(ctx, mb, 63, sh); 2843 } 2844 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2845 2846 /* rldicr - rldicr. */ 2847 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2848 { 2849 uint32_t sh, me; 2850 2851 sh = SH(ctx->opcode) | (shn << 5); 2852 me = MB(ctx->opcode) | (men << 5); 2853 gen_rldinm(ctx, 0, me, sh); 2854 } 2855 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2856 2857 /* rldic - rldic. */ 2858 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2859 { 2860 uint32_t sh, mb; 2861 2862 sh = SH(ctx->opcode) | (shn << 5); 2863 mb = MB(ctx->opcode) | (mbn << 5); 2864 gen_rldinm(ctx, mb, 63 - sh, sh); 2865 } 2866 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2867 2868 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2869 { 2870 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2871 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2872 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2873 TCGv t0; 2874 2875 t0 = tcg_temp_new(); 2876 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2877 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2878 tcg_temp_free(t0); 2879 2880 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2881 if (unlikely(Rc(ctx->opcode) != 0)) { 2882 gen_set_Rc0(ctx, t_ra); 2883 } 2884 } 2885 2886 /* rldcl - rldcl. */ 2887 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2888 { 2889 uint32_t mb; 2890 2891 mb = MB(ctx->opcode) | (mbn << 5); 2892 gen_rldnm(ctx, mb, 63); 2893 } 2894 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2895 2896 /* rldcr - rldcr. */ 2897 static inline void gen_rldcr(DisasContext *ctx, int men) 2898 { 2899 uint32_t me; 2900 2901 me = MB(ctx->opcode) | (men << 5); 2902 gen_rldnm(ctx, 0, me); 2903 } 2904 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2905 2906 /* rldimi - rldimi. */ 2907 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2908 { 2909 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2910 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2911 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2912 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2913 uint32_t me = 63 - sh; 2914 2915 if (mb <= me) { 2916 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2917 } else { 2918 target_ulong mask = MASK(mb, me); 2919 TCGv t1 = tcg_temp_new(); 2920 2921 tcg_gen_rotli_tl(t1, t_rs, sh); 2922 tcg_gen_andi_tl(t1, t1, mask); 2923 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2924 tcg_gen_or_tl(t_ra, t_ra, t1); 2925 tcg_temp_free(t1); 2926 } 2927 if (unlikely(Rc(ctx->opcode) != 0)) { 2928 gen_set_Rc0(ctx, t_ra); 2929 } 2930 } 2931 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2932 #endif 2933 2934 /*** Integer shift ***/ 2935 2936 /* slw & slw. */ 2937 static void gen_slw(DisasContext *ctx) 2938 { 2939 TCGv t0, t1; 2940 2941 t0 = tcg_temp_new(); 2942 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2943 #if defined(TARGET_PPC64) 2944 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2945 tcg_gen_sari_tl(t0, t0, 0x3f); 2946 #else 2947 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2948 tcg_gen_sari_tl(t0, t0, 0x1f); 2949 #endif 2950 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2951 t1 = tcg_temp_new(); 2952 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2953 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2954 tcg_temp_free(t1); 2955 tcg_temp_free(t0); 2956 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2957 if (unlikely(Rc(ctx->opcode) != 0)) { 2958 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2959 } 2960 } 2961 2962 /* sraw & sraw. */ 2963 static void gen_sraw(DisasContext *ctx) 2964 { 2965 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2966 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2967 if (unlikely(Rc(ctx->opcode) != 0)) { 2968 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2969 } 2970 } 2971 2972 /* srawi & srawi. */ 2973 static void gen_srawi(DisasContext *ctx) 2974 { 2975 int sh = SH(ctx->opcode); 2976 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2977 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2978 if (sh == 0) { 2979 tcg_gen_ext32s_tl(dst, src); 2980 tcg_gen_movi_tl(cpu_ca, 0); 2981 if (is_isa300(ctx)) { 2982 tcg_gen_movi_tl(cpu_ca32, 0); 2983 } 2984 } else { 2985 TCGv t0; 2986 tcg_gen_ext32s_tl(dst, src); 2987 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2988 t0 = tcg_temp_new(); 2989 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2990 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2991 tcg_temp_free(t0); 2992 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2993 if (is_isa300(ctx)) { 2994 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2995 } 2996 tcg_gen_sari_tl(dst, dst, sh); 2997 } 2998 if (unlikely(Rc(ctx->opcode) != 0)) { 2999 gen_set_Rc0(ctx, dst); 3000 } 3001 } 3002 3003 /* srw & srw. */ 3004 static void gen_srw(DisasContext *ctx) 3005 { 3006 TCGv t0, t1; 3007 3008 t0 = tcg_temp_new(); 3009 /* AND rS with a mask that is 0 when rB >= 0x20 */ 3010 #if defined(TARGET_PPC64) 3011 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 3012 tcg_gen_sari_tl(t0, t0, 0x3f); 3013 #else 3014 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 3015 tcg_gen_sari_tl(t0, t0, 0x1f); 3016 #endif 3017 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3018 tcg_gen_ext32u_tl(t0, t0); 3019 t1 = tcg_temp_new(); 3020 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 3021 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3022 tcg_temp_free(t1); 3023 tcg_temp_free(t0); 3024 if (unlikely(Rc(ctx->opcode) != 0)) { 3025 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3026 } 3027 } 3028 3029 #if defined(TARGET_PPC64) 3030 /* sld & sld. */ 3031 static void gen_sld(DisasContext *ctx) 3032 { 3033 TCGv t0, t1; 3034 3035 t0 = tcg_temp_new(); 3036 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3037 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3038 tcg_gen_sari_tl(t0, t0, 0x3f); 3039 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3040 t1 = tcg_temp_new(); 3041 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3042 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3043 tcg_temp_free(t1); 3044 tcg_temp_free(t0); 3045 if (unlikely(Rc(ctx->opcode) != 0)) { 3046 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3047 } 3048 } 3049 3050 /* srad & srad. */ 3051 static void gen_srad(DisasContext *ctx) 3052 { 3053 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 3054 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3055 if (unlikely(Rc(ctx->opcode) != 0)) { 3056 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3057 } 3058 } 3059 /* sradi & sradi. */ 3060 static inline void gen_sradi(DisasContext *ctx, int n) 3061 { 3062 int sh = SH(ctx->opcode) + (n << 5); 3063 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3064 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3065 if (sh == 0) { 3066 tcg_gen_mov_tl(dst, src); 3067 tcg_gen_movi_tl(cpu_ca, 0); 3068 if (is_isa300(ctx)) { 3069 tcg_gen_movi_tl(cpu_ca32, 0); 3070 } 3071 } else { 3072 TCGv t0; 3073 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 3074 t0 = tcg_temp_new(); 3075 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 3076 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 3077 tcg_temp_free(t0); 3078 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 3079 if (is_isa300(ctx)) { 3080 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 3081 } 3082 tcg_gen_sari_tl(dst, src, sh); 3083 } 3084 if (unlikely(Rc(ctx->opcode) != 0)) { 3085 gen_set_Rc0(ctx, dst); 3086 } 3087 } 3088 3089 static void gen_sradi0(DisasContext *ctx) 3090 { 3091 gen_sradi(ctx, 0); 3092 } 3093 3094 static void gen_sradi1(DisasContext *ctx) 3095 { 3096 gen_sradi(ctx, 1); 3097 } 3098 3099 /* extswsli & extswsli. */ 3100 static inline void gen_extswsli(DisasContext *ctx, int n) 3101 { 3102 int sh = SH(ctx->opcode) + (n << 5); 3103 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3104 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3105 3106 tcg_gen_ext32s_tl(dst, src); 3107 tcg_gen_shli_tl(dst, dst, sh); 3108 if (unlikely(Rc(ctx->opcode) != 0)) { 3109 gen_set_Rc0(ctx, dst); 3110 } 3111 } 3112 3113 static void gen_extswsli0(DisasContext *ctx) 3114 { 3115 gen_extswsli(ctx, 0); 3116 } 3117 3118 static void gen_extswsli1(DisasContext *ctx) 3119 { 3120 gen_extswsli(ctx, 1); 3121 } 3122 3123 /* srd & srd. */ 3124 static void gen_srd(DisasContext *ctx) 3125 { 3126 TCGv t0, t1; 3127 3128 t0 = tcg_temp_new(); 3129 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3130 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3131 tcg_gen_sari_tl(t0, t0, 0x3f); 3132 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3133 t1 = tcg_temp_new(); 3134 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3135 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3136 tcg_temp_free(t1); 3137 tcg_temp_free(t0); 3138 if (unlikely(Rc(ctx->opcode) != 0)) { 3139 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3140 } 3141 } 3142 #endif 3143 3144 /*** Addressing modes ***/ 3145 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 3146 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 3147 target_long maskl) 3148 { 3149 target_long simm = SIMM(ctx->opcode); 3150 3151 simm &= ~maskl; 3152 if (rA(ctx->opcode) == 0) { 3153 if (NARROW_MODE(ctx)) { 3154 simm = (uint32_t)simm; 3155 } 3156 tcg_gen_movi_tl(EA, simm); 3157 } else if (likely(simm != 0)) { 3158 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 3159 if (NARROW_MODE(ctx)) { 3160 tcg_gen_ext32u_tl(EA, EA); 3161 } 3162 } else { 3163 if (NARROW_MODE(ctx)) { 3164 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3165 } else { 3166 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3167 } 3168 } 3169 } 3170 3171 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 3172 { 3173 if (rA(ctx->opcode) == 0) { 3174 if (NARROW_MODE(ctx)) { 3175 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3176 } else { 3177 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3178 } 3179 } else { 3180 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3181 if (NARROW_MODE(ctx)) { 3182 tcg_gen_ext32u_tl(EA, EA); 3183 } 3184 } 3185 } 3186 3187 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 3188 { 3189 if (rA(ctx->opcode) == 0) { 3190 tcg_gen_movi_tl(EA, 0); 3191 } else if (NARROW_MODE(ctx)) { 3192 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3193 } else { 3194 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3195 } 3196 } 3197 3198 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 3199 target_long val) 3200 { 3201 tcg_gen_addi_tl(ret, arg1, val); 3202 if (NARROW_MODE(ctx)) { 3203 tcg_gen_ext32u_tl(ret, ret); 3204 } 3205 } 3206 3207 static inline void gen_align_no_le(DisasContext *ctx) 3208 { 3209 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 3210 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 3211 } 3212 3213 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ) 3214 { 3215 TCGv ea = tcg_temp_new(); 3216 if (ra) { 3217 tcg_gen_add_tl(ea, cpu_gpr[ra], displ); 3218 } else { 3219 tcg_gen_mov_tl(ea, displ); 3220 } 3221 if (NARROW_MODE(ctx)) { 3222 tcg_gen_ext32u_tl(ea, ea); 3223 } 3224 return ea; 3225 } 3226 3227 /*** Integer load ***/ 3228 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 3229 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 3230 3231 #define GEN_QEMU_LOAD_TL(ldop, op) \ 3232 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 3233 TCGv val, \ 3234 TCGv addr) \ 3235 { \ 3236 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 3237 } 3238 3239 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 3240 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 3241 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 3242 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 3243 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 3244 3245 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 3246 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 3247 3248 #define GEN_QEMU_LOAD_64(ldop, op) \ 3249 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 3250 TCGv_i64 val, \ 3251 TCGv addr) \ 3252 { \ 3253 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 3254 } 3255 3256 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 3257 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 3258 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 3259 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 3260 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_Q)) 3261 3262 #if defined(TARGET_PPC64) 3263 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_Q)) 3264 #endif 3265 3266 #define GEN_QEMU_STORE_TL(stop, op) \ 3267 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 3268 TCGv val, \ 3269 TCGv addr) \ 3270 { \ 3271 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 3272 } 3273 3274 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY) 3275 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 3276 #endif 3277 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 3278 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 3279 3280 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 3281 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 3282 3283 #define GEN_QEMU_STORE_64(stop, op) \ 3284 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 3285 TCGv_i64 val, \ 3286 TCGv addr) \ 3287 { \ 3288 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 3289 } 3290 3291 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 3292 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 3293 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 3294 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_Q)) 3295 3296 #if defined(TARGET_PPC64) 3297 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_Q)) 3298 #endif 3299 3300 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 3301 static void glue(gen_, name##x)(DisasContext *ctx) \ 3302 { \ 3303 TCGv EA; \ 3304 chk; \ 3305 gen_set_access_type(ctx, ACCESS_INT); \ 3306 EA = tcg_temp_new(); \ 3307 gen_addr_reg_index(ctx, EA); \ 3308 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 3309 tcg_temp_free(EA); \ 3310 } 3311 3312 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 3313 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3314 3315 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 3316 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3317 3318 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 3319 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3320 { \ 3321 TCGv EA; \ 3322 CHK_SV; \ 3323 gen_set_access_type(ctx, ACCESS_INT); \ 3324 EA = tcg_temp_new(); \ 3325 gen_addr_reg_index(ctx, EA); \ 3326 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 3327 tcg_temp_free(EA); \ 3328 } 3329 3330 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 3331 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 3332 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 3333 #if defined(TARGET_PPC64) 3334 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 3335 #endif 3336 3337 #if defined(TARGET_PPC64) 3338 /* CI load/store variants */ 3339 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 3340 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 3341 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 3342 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 3343 #endif 3344 3345 /*** Integer store ***/ 3346 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 3347 static void glue(gen_, name##x)(DisasContext *ctx) \ 3348 { \ 3349 TCGv EA; \ 3350 chk; \ 3351 gen_set_access_type(ctx, ACCESS_INT); \ 3352 EA = tcg_temp_new(); \ 3353 gen_addr_reg_index(ctx, EA); \ 3354 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 3355 tcg_temp_free(EA); \ 3356 } 3357 #define GEN_STX(name, stop, opc2, opc3, type) \ 3358 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3359 3360 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 3361 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3362 3363 #define GEN_STEPX(name, stop, opc2, opc3) \ 3364 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3365 { \ 3366 TCGv EA; \ 3367 CHK_SV; \ 3368 gen_set_access_type(ctx, ACCESS_INT); \ 3369 EA = tcg_temp_new(); \ 3370 gen_addr_reg_index(ctx, EA); \ 3371 tcg_gen_qemu_st_tl( \ 3372 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 3373 tcg_temp_free(EA); \ 3374 } 3375 3376 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 3377 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 3378 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 3379 #if defined(TARGET_PPC64) 3380 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1d, 0x04) 3381 #endif 3382 3383 #if defined(TARGET_PPC64) 3384 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 3385 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 3386 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 3387 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 3388 #endif 3389 /*** Integer load and store with byte reverse ***/ 3390 3391 /* lhbrx */ 3392 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 3393 3394 /* lwbrx */ 3395 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 3396 3397 #if defined(TARGET_PPC64) 3398 /* ldbrx */ 3399 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 3400 /* stdbrx */ 3401 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 3402 #endif /* TARGET_PPC64 */ 3403 3404 /* sthbrx */ 3405 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 3406 /* stwbrx */ 3407 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 3408 3409 /*** Integer load and store multiple ***/ 3410 3411 /* lmw */ 3412 static void gen_lmw(DisasContext *ctx) 3413 { 3414 TCGv t0; 3415 TCGv_i32 t1; 3416 3417 if (ctx->le_mode) { 3418 gen_align_no_le(ctx); 3419 return; 3420 } 3421 gen_set_access_type(ctx, ACCESS_INT); 3422 t0 = tcg_temp_new(); 3423 t1 = tcg_const_i32(rD(ctx->opcode)); 3424 gen_addr_imm_index(ctx, t0, 0); 3425 gen_helper_lmw(cpu_env, t0, t1); 3426 tcg_temp_free(t0); 3427 tcg_temp_free_i32(t1); 3428 } 3429 3430 /* stmw */ 3431 static void gen_stmw(DisasContext *ctx) 3432 { 3433 TCGv t0; 3434 TCGv_i32 t1; 3435 3436 if (ctx->le_mode) { 3437 gen_align_no_le(ctx); 3438 return; 3439 } 3440 gen_set_access_type(ctx, ACCESS_INT); 3441 t0 = tcg_temp_new(); 3442 t1 = tcg_const_i32(rS(ctx->opcode)); 3443 gen_addr_imm_index(ctx, t0, 0); 3444 gen_helper_stmw(cpu_env, t0, t1); 3445 tcg_temp_free(t0); 3446 tcg_temp_free_i32(t1); 3447 } 3448 3449 /*** Integer load and store strings ***/ 3450 3451 /* lswi */ 3452 /* 3453 * PowerPC32 specification says we must generate an exception if rA is 3454 * in the range of registers to be loaded. In an other hand, IBM says 3455 * this is valid, but rA won't be loaded. For now, I'll follow the 3456 * spec... 3457 */ 3458 static void gen_lswi(DisasContext *ctx) 3459 { 3460 TCGv t0; 3461 TCGv_i32 t1, t2; 3462 int nb = NB(ctx->opcode); 3463 int start = rD(ctx->opcode); 3464 int ra = rA(ctx->opcode); 3465 int nr; 3466 3467 if (ctx->le_mode) { 3468 gen_align_no_le(ctx); 3469 return; 3470 } 3471 if (nb == 0) { 3472 nb = 32; 3473 } 3474 nr = DIV_ROUND_UP(nb, 4); 3475 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 3476 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 3477 return; 3478 } 3479 gen_set_access_type(ctx, ACCESS_INT); 3480 t0 = tcg_temp_new(); 3481 gen_addr_register(ctx, t0); 3482 t1 = tcg_const_i32(nb); 3483 t2 = tcg_const_i32(start); 3484 gen_helper_lsw(cpu_env, t0, t1, t2); 3485 tcg_temp_free(t0); 3486 tcg_temp_free_i32(t1); 3487 tcg_temp_free_i32(t2); 3488 } 3489 3490 /* lswx */ 3491 static void gen_lswx(DisasContext *ctx) 3492 { 3493 TCGv t0; 3494 TCGv_i32 t1, t2, t3; 3495 3496 if (ctx->le_mode) { 3497 gen_align_no_le(ctx); 3498 return; 3499 } 3500 gen_set_access_type(ctx, ACCESS_INT); 3501 t0 = tcg_temp_new(); 3502 gen_addr_reg_index(ctx, t0); 3503 t1 = tcg_const_i32(rD(ctx->opcode)); 3504 t2 = tcg_const_i32(rA(ctx->opcode)); 3505 t3 = tcg_const_i32(rB(ctx->opcode)); 3506 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3507 tcg_temp_free(t0); 3508 tcg_temp_free_i32(t1); 3509 tcg_temp_free_i32(t2); 3510 tcg_temp_free_i32(t3); 3511 } 3512 3513 /* stswi */ 3514 static void gen_stswi(DisasContext *ctx) 3515 { 3516 TCGv t0; 3517 TCGv_i32 t1, t2; 3518 int nb = NB(ctx->opcode); 3519 3520 if (ctx->le_mode) { 3521 gen_align_no_le(ctx); 3522 return; 3523 } 3524 gen_set_access_type(ctx, ACCESS_INT); 3525 t0 = tcg_temp_new(); 3526 gen_addr_register(ctx, t0); 3527 if (nb == 0) { 3528 nb = 32; 3529 } 3530 t1 = tcg_const_i32(nb); 3531 t2 = tcg_const_i32(rS(ctx->opcode)); 3532 gen_helper_stsw(cpu_env, t0, t1, t2); 3533 tcg_temp_free(t0); 3534 tcg_temp_free_i32(t1); 3535 tcg_temp_free_i32(t2); 3536 } 3537 3538 /* stswx */ 3539 static void gen_stswx(DisasContext *ctx) 3540 { 3541 TCGv t0; 3542 TCGv_i32 t1, t2; 3543 3544 if (ctx->le_mode) { 3545 gen_align_no_le(ctx); 3546 return; 3547 } 3548 gen_set_access_type(ctx, ACCESS_INT); 3549 t0 = tcg_temp_new(); 3550 gen_addr_reg_index(ctx, t0); 3551 t1 = tcg_temp_new_i32(); 3552 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3553 tcg_gen_andi_i32(t1, t1, 0x7F); 3554 t2 = tcg_const_i32(rS(ctx->opcode)); 3555 gen_helper_stsw(cpu_env, t0, t1, t2); 3556 tcg_temp_free(t0); 3557 tcg_temp_free_i32(t1); 3558 tcg_temp_free_i32(t2); 3559 } 3560 3561 /*** Memory synchronisation ***/ 3562 /* eieio */ 3563 static void gen_eieio(DisasContext *ctx) 3564 { 3565 TCGBar bar = TCG_MO_LD_ST; 3566 3567 /* 3568 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3569 * tell the CPU it is a store-forwarding barrier. 3570 */ 3571 if (ctx->opcode & 0x2000000) { 3572 /* 3573 * ISA says that "Reserved fields in instructions are ignored 3574 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3575 * as this is not an instruction software should be using, 3576 * complain to the user. 3577 */ 3578 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3579 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3580 TARGET_FMT_lx "\n", ctx->cia); 3581 } else { 3582 bar = TCG_MO_ST_LD; 3583 } 3584 } 3585 3586 tcg_gen_mb(bar | TCG_BAR_SC); 3587 } 3588 3589 #if !defined(CONFIG_USER_ONLY) 3590 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3591 { 3592 TCGv_i32 t; 3593 TCGLabel *l; 3594 3595 if (!ctx->lazy_tlb_flush) { 3596 return; 3597 } 3598 l = gen_new_label(); 3599 t = tcg_temp_new_i32(); 3600 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3601 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3602 if (global) { 3603 gen_helper_check_tlb_flush_global(cpu_env); 3604 } else { 3605 gen_helper_check_tlb_flush_local(cpu_env); 3606 } 3607 gen_set_label(l); 3608 tcg_temp_free_i32(t); 3609 } 3610 #else 3611 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3612 #endif 3613 3614 /* isync */ 3615 static void gen_isync(DisasContext *ctx) 3616 { 3617 /* 3618 * We need to check for a pending TLB flush. This can only happen in 3619 * kernel mode however so check MSR_PR 3620 */ 3621 if (!ctx->pr) { 3622 gen_check_tlb_flush(ctx, false); 3623 } 3624 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3625 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 3626 } 3627 3628 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3629 3630 static void gen_load_locked(DisasContext *ctx, MemOp memop) 3631 { 3632 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3633 TCGv t0 = tcg_temp_new(); 3634 3635 gen_set_access_type(ctx, ACCESS_RES); 3636 gen_addr_reg_index(ctx, t0); 3637 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3638 tcg_gen_mov_tl(cpu_reserve, t0); 3639 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3640 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3641 tcg_temp_free(t0); 3642 } 3643 3644 #define LARX(name, memop) \ 3645 static void gen_##name(DisasContext *ctx) \ 3646 { \ 3647 gen_load_locked(ctx, memop); \ 3648 } 3649 3650 /* lwarx */ 3651 LARX(lbarx, DEF_MEMOP(MO_UB)) 3652 LARX(lharx, DEF_MEMOP(MO_UW)) 3653 LARX(lwarx, DEF_MEMOP(MO_UL)) 3654 3655 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop, 3656 TCGv EA, TCGCond cond, int addend) 3657 { 3658 TCGv t = tcg_temp_new(); 3659 TCGv t2 = tcg_temp_new(); 3660 TCGv u = tcg_temp_new(); 3661 3662 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3663 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3664 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3665 tcg_gen_addi_tl(u, t, addend); 3666 3667 /* E.g. for fetch and increment bounded... */ 3668 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3669 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3670 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3671 3672 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3673 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3674 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3675 3676 tcg_temp_free(t); 3677 tcg_temp_free(t2); 3678 tcg_temp_free(u); 3679 } 3680 3681 static void gen_ld_atomic(DisasContext *ctx, MemOp memop) 3682 { 3683 uint32_t gpr_FC = FC(ctx->opcode); 3684 TCGv EA = tcg_temp_new(); 3685 int rt = rD(ctx->opcode); 3686 bool need_serial; 3687 TCGv src, dst; 3688 3689 gen_addr_register(ctx, EA); 3690 dst = cpu_gpr[rt]; 3691 src = cpu_gpr[(rt + 1) & 31]; 3692 3693 need_serial = false; 3694 memop |= MO_ALIGN; 3695 switch (gpr_FC) { 3696 case 0: /* Fetch and add */ 3697 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3698 break; 3699 case 1: /* Fetch and xor */ 3700 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3701 break; 3702 case 2: /* Fetch and or */ 3703 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3704 break; 3705 case 3: /* Fetch and 'and' */ 3706 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3707 break; 3708 case 4: /* Fetch and max unsigned */ 3709 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3710 break; 3711 case 5: /* Fetch and max signed */ 3712 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3713 break; 3714 case 6: /* Fetch and min unsigned */ 3715 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3716 break; 3717 case 7: /* Fetch and min signed */ 3718 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3719 break; 3720 case 8: /* Swap */ 3721 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3722 break; 3723 3724 case 16: /* Compare and swap not equal */ 3725 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3726 need_serial = true; 3727 } else { 3728 TCGv t0 = tcg_temp_new(); 3729 TCGv t1 = tcg_temp_new(); 3730 3731 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3732 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3733 tcg_gen_mov_tl(t1, src); 3734 } else { 3735 tcg_gen_ext32u_tl(t1, src); 3736 } 3737 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3738 cpu_gpr[(rt + 2) & 31], t0); 3739 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3740 tcg_gen_mov_tl(dst, t0); 3741 3742 tcg_temp_free(t0); 3743 tcg_temp_free(t1); 3744 } 3745 break; 3746 3747 case 24: /* Fetch and increment bounded */ 3748 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3749 need_serial = true; 3750 } else { 3751 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3752 } 3753 break; 3754 case 25: /* Fetch and increment equal */ 3755 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3756 need_serial = true; 3757 } else { 3758 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3759 } 3760 break; 3761 case 28: /* Fetch and decrement bounded */ 3762 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3763 need_serial = true; 3764 } else { 3765 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3766 } 3767 break; 3768 3769 default: 3770 /* invoke data storage error handler */ 3771 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3772 } 3773 tcg_temp_free(EA); 3774 3775 if (need_serial) { 3776 /* Restart with exclusive lock. */ 3777 gen_helper_exit_atomic(cpu_env); 3778 ctx->base.is_jmp = DISAS_NORETURN; 3779 } 3780 } 3781 3782 static void gen_lwat(DisasContext *ctx) 3783 { 3784 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3785 } 3786 3787 #ifdef TARGET_PPC64 3788 static void gen_ldat(DisasContext *ctx) 3789 { 3790 gen_ld_atomic(ctx, DEF_MEMOP(MO_Q)); 3791 } 3792 #endif 3793 3794 static void gen_st_atomic(DisasContext *ctx, MemOp memop) 3795 { 3796 uint32_t gpr_FC = FC(ctx->opcode); 3797 TCGv EA = tcg_temp_new(); 3798 TCGv src, discard; 3799 3800 gen_addr_register(ctx, EA); 3801 src = cpu_gpr[rD(ctx->opcode)]; 3802 discard = tcg_temp_new(); 3803 3804 memop |= MO_ALIGN; 3805 switch (gpr_FC) { 3806 case 0: /* add and Store */ 3807 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3808 break; 3809 case 1: /* xor and Store */ 3810 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3811 break; 3812 case 2: /* Or and Store */ 3813 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3814 break; 3815 case 3: /* 'and' and Store */ 3816 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3817 break; 3818 case 4: /* Store max unsigned */ 3819 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3820 break; 3821 case 5: /* Store max signed */ 3822 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3823 break; 3824 case 6: /* Store min unsigned */ 3825 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3826 break; 3827 case 7: /* Store min signed */ 3828 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3829 break; 3830 case 24: /* Store twin */ 3831 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3832 /* Restart with exclusive lock. */ 3833 gen_helper_exit_atomic(cpu_env); 3834 ctx->base.is_jmp = DISAS_NORETURN; 3835 } else { 3836 TCGv t = tcg_temp_new(); 3837 TCGv t2 = tcg_temp_new(); 3838 TCGv s = tcg_temp_new(); 3839 TCGv s2 = tcg_temp_new(); 3840 TCGv ea_plus_s = tcg_temp_new(); 3841 3842 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3843 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3844 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3845 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3846 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3847 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3848 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3849 3850 tcg_temp_free(ea_plus_s); 3851 tcg_temp_free(s2); 3852 tcg_temp_free(s); 3853 tcg_temp_free(t2); 3854 tcg_temp_free(t); 3855 } 3856 break; 3857 default: 3858 /* invoke data storage error handler */ 3859 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3860 } 3861 tcg_temp_free(discard); 3862 tcg_temp_free(EA); 3863 } 3864 3865 static void gen_stwat(DisasContext *ctx) 3866 { 3867 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3868 } 3869 3870 #ifdef TARGET_PPC64 3871 static void gen_stdat(DisasContext *ctx) 3872 { 3873 gen_st_atomic(ctx, DEF_MEMOP(MO_Q)); 3874 } 3875 #endif 3876 3877 static void gen_conditional_store(DisasContext *ctx, MemOp memop) 3878 { 3879 TCGLabel *l1 = gen_new_label(); 3880 TCGLabel *l2 = gen_new_label(); 3881 TCGv t0 = tcg_temp_new(); 3882 int reg = rS(ctx->opcode); 3883 3884 gen_set_access_type(ctx, ACCESS_RES); 3885 gen_addr_reg_index(ctx, t0); 3886 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3887 tcg_temp_free(t0); 3888 3889 t0 = tcg_temp_new(); 3890 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3891 cpu_gpr[reg], ctx->mem_idx, 3892 DEF_MEMOP(memop) | MO_ALIGN); 3893 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3894 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3895 tcg_gen_or_tl(t0, t0, cpu_so); 3896 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3897 tcg_temp_free(t0); 3898 tcg_gen_br(l2); 3899 3900 gen_set_label(l1); 3901 3902 /* 3903 * Address mismatch implies failure. But we still need to provide 3904 * the memory barrier semantics of the instruction. 3905 */ 3906 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3907 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3908 3909 gen_set_label(l2); 3910 tcg_gen_movi_tl(cpu_reserve, -1); 3911 } 3912 3913 #define STCX(name, memop) \ 3914 static void gen_##name(DisasContext *ctx) \ 3915 { \ 3916 gen_conditional_store(ctx, memop); \ 3917 } 3918 3919 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3920 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3921 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3922 3923 #if defined(TARGET_PPC64) 3924 /* ldarx */ 3925 LARX(ldarx, DEF_MEMOP(MO_Q)) 3926 /* stdcx. */ 3927 STCX(stdcx_, DEF_MEMOP(MO_Q)) 3928 3929 /* lqarx */ 3930 static void gen_lqarx(DisasContext *ctx) 3931 { 3932 int rd = rD(ctx->opcode); 3933 TCGv EA, hi, lo; 3934 3935 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3936 (rd == rB(ctx->opcode)))) { 3937 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3938 return; 3939 } 3940 3941 gen_set_access_type(ctx, ACCESS_RES); 3942 EA = tcg_temp_new(); 3943 gen_addr_reg_index(ctx, EA); 3944 3945 /* Note that the low part is always in RD+1, even in LE mode. */ 3946 lo = cpu_gpr[rd + 1]; 3947 hi = cpu_gpr[rd]; 3948 3949 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3950 if (HAVE_ATOMIC128) { 3951 TCGv_i32 oi = tcg_temp_new_i32(); 3952 if (ctx->le_mode) { 3953 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN, 3954 ctx->mem_idx)); 3955 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3956 } else { 3957 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN, 3958 ctx->mem_idx)); 3959 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3960 } 3961 tcg_temp_free_i32(oi); 3962 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3963 } else { 3964 /* Restart with exclusive lock. */ 3965 gen_helper_exit_atomic(cpu_env); 3966 ctx->base.is_jmp = DISAS_NORETURN; 3967 tcg_temp_free(EA); 3968 return; 3969 } 3970 } else if (ctx->le_mode) { 3971 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEQ | MO_ALIGN_16); 3972 tcg_gen_mov_tl(cpu_reserve, EA); 3973 gen_addr_add(ctx, EA, EA, 8); 3974 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEQ); 3975 } else { 3976 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEQ | MO_ALIGN_16); 3977 tcg_gen_mov_tl(cpu_reserve, EA); 3978 gen_addr_add(ctx, EA, EA, 8); 3979 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEQ); 3980 } 3981 tcg_temp_free(EA); 3982 3983 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 3984 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3985 } 3986 3987 /* stqcx. */ 3988 static void gen_stqcx_(DisasContext *ctx) 3989 { 3990 int rs = rS(ctx->opcode); 3991 TCGv EA, hi, lo; 3992 3993 if (unlikely(rs & 1)) { 3994 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3995 return; 3996 } 3997 3998 gen_set_access_type(ctx, ACCESS_RES); 3999 EA = tcg_temp_new(); 4000 gen_addr_reg_index(ctx, EA); 4001 4002 /* Note that the low part is always in RS+1, even in LE mode. */ 4003 lo = cpu_gpr[rs + 1]; 4004 hi = cpu_gpr[rs]; 4005 4006 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4007 if (HAVE_CMPXCHG128) { 4008 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN); 4009 if (ctx->le_mode) { 4010 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 4011 EA, lo, hi, oi); 4012 } else { 4013 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 4014 EA, lo, hi, oi); 4015 } 4016 tcg_temp_free_i32(oi); 4017 } else { 4018 /* Restart with exclusive lock. */ 4019 gen_helper_exit_atomic(cpu_env); 4020 ctx->base.is_jmp = DISAS_NORETURN; 4021 } 4022 tcg_temp_free(EA); 4023 } else { 4024 TCGLabel *lab_fail = gen_new_label(); 4025 TCGLabel *lab_over = gen_new_label(); 4026 TCGv_i64 t0 = tcg_temp_new_i64(); 4027 TCGv_i64 t1 = tcg_temp_new_i64(); 4028 4029 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 4030 tcg_temp_free(EA); 4031 4032 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 4033 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4034 ? offsetof(CPUPPCState, reserve_val2) 4035 : offsetof(CPUPPCState, reserve_val))); 4036 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4037 4038 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4039 gen_qemu_ld64_i64(ctx, t0, t0); 4040 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4041 ? offsetof(CPUPPCState, reserve_val) 4042 : offsetof(CPUPPCState, reserve_val2))); 4043 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4044 4045 /* Success */ 4046 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 4047 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4048 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 4049 4050 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4051 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4052 tcg_gen_br(lab_over); 4053 4054 gen_set_label(lab_fail); 4055 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4056 4057 gen_set_label(lab_over); 4058 tcg_gen_movi_tl(cpu_reserve, -1); 4059 tcg_temp_free_i64(t0); 4060 tcg_temp_free_i64(t1); 4061 } 4062 } 4063 #endif /* defined(TARGET_PPC64) */ 4064 4065 /* sync */ 4066 static void gen_sync(DisasContext *ctx) 4067 { 4068 uint32_t l = (ctx->opcode >> 21) & 3; 4069 4070 /* 4071 * We may need to check for a pending TLB flush. 4072 * 4073 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 4074 * 4075 * Additionally, this can only happen in kernel mode however so 4076 * check MSR_PR as well. 4077 */ 4078 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 4079 gen_check_tlb_flush(ctx, true); 4080 } 4081 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 4082 } 4083 4084 /* wait */ 4085 static void gen_wait(DisasContext *ctx) 4086 { 4087 TCGv_i32 t0 = tcg_const_i32(1); 4088 tcg_gen_st_i32(t0, cpu_env, 4089 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 4090 tcg_temp_free_i32(t0); 4091 /* Stop translation, as the CPU is supposed to sleep from now */ 4092 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4093 } 4094 4095 #if defined(TARGET_PPC64) 4096 static void gen_doze(DisasContext *ctx) 4097 { 4098 #if defined(CONFIG_USER_ONLY) 4099 GEN_PRIV; 4100 #else 4101 TCGv_i32 t; 4102 4103 CHK_HV; 4104 t = tcg_const_i32(PPC_PM_DOZE); 4105 gen_helper_pminsn(cpu_env, t); 4106 tcg_temp_free_i32(t); 4107 /* Stop translation, as the CPU is supposed to sleep from now */ 4108 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4109 #endif /* defined(CONFIG_USER_ONLY) */ 4110 } 4111 4112 static void gen_nap(DisasContext *ctx) 4113 { 4114 #if defined(CONFIG_USER_ONLY) 4115 GEN_PRIV; 4116 #else 4117 TCGv_i32 t; 4118 4119 CHK_HV; 4120 t = tcg_const_i32(PPC_PM_NAP); 4121 gen_helper_pminsn(cpu_env, t); 4122 tcg_temp_free_i32(t); 4123 /* Stop translation, as the CPU is supposed to sleep from now */ 4124 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4125 #endif /* defined(CONFIG_USER_ONLY) */ 4126 } 4127 4128 static void gen_stop(DisasContext *ctx) 4129 { 4130 #if defined(CONFIG_USER_ONLY) 4131 GEN_PRIV; 4132 #else 4133 TCGv_i32 t; 4134 4135 CHK_HV; 4136 t = tcg_const_i32(PPC_PM_STOP); 4137 gen_helper_pminsn(cpu_env, t); 4138 tcg_temp_free_i32(t); 4139 /* Stop translation, as the CPU is supposed to sleep from now */ 4140 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4141 #endif /* defined(CONFIG_USER_ONLY) */ 4142 } 4143 4144 static void gen_sleep(DisasContext *ctx) 4145 { 4146 #if defined(CONFIG_USER_ONLY) 4147 GEN_PRIV; 4148 #else 4149 TCGv_i32 t; 4150 4151 CHK_HV; 4152 t = tcg_const_i32(PPC_PM_SLEEP); 4153 gen_helper_pminsn(cpu_env, t); 4154 tcg_temp_free_i32(t); 4155 /* Stop translation, as the CPU is supposed to sleep from now */ 4156 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4157 #endif /* defined(CONFIG_USER_ONLY) */ 4158 } 4159 4160 static void gen_rvwinkle(DisasContext *ctx) 4161 { 4162 #if defined(CONFIG_USER_ONLY) 4163 GEN_PRIV; 4164 #else 4165 TCGv_i32 t; 4166 4167 CHK_HV; 4168 t = tcg_const_i32(PPC_PM_RVWINKLE); 4169 gen_helper_pminsn(cpu_env, t); 4170 tcg_temp_free_i32(t); 4171 /* Stop translation, as the CPU is supposed to sleep from now */ 4172 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4173 #endif /* defined(CONFIG_USER_ONLY) */ 4174 } 4175 #endif /* #if defined(TARGET_PPC64) */ 4176 4177 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 4178 { 4179 #if defined(TARGET_PPC64) 4180 if (ctx->has_cfar) { 4181 tcg_gen_movi_tl(cpu_cfar, nip); 4182 } 4183 #endif 4184 } 4185 4186 #if defined(TARGET_PPC64) 4187 static void pmu_count_insns(DisasContext *ctx) 4188 { 4189 /* 4190 * Do not bother calling the helper if the PMU isn't counting 4191 * instructions. 4192 */ 4193 if (!ctx->pmu_insn_cnt) { 4194 return; 4195 } 4196 4197 #if !defined(CONFIG_USER_ONLY) 4198 /* 4199 * The PMU insns_inc() helper stops the internal PMU timer if a 4200 * counter overflows happens. In that case, if the guest is 4201 * running with icount and we do not handle it beforehand, 4202 * the helper can trigger a 'bad icount read'. 4203 */ 4204 gen_icount_io_start(ctx); 4205 4206 gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns)); 4207 #else 4208 /* 4209 * User mode can read (but not write) PMC5 and start/stop 4210 * the PMU via MMCR0_FC. In this case just increment 4211 * PMC5 with base.num_insns. 4212 */ 4213 TCGv t0 = tcg_temp_new(); 4214 4215 gen_load_spr(t0, SPR_POWER_PMC5); 4216 tcg_gen_addi_tl(t0, t0, ctx->base.num_insns); 4217 gen_store_spr(SPR_POWER_PMC5, t0); 4218 4219 tcg_temp_free(t0); 4220 #endif /* #if !defined(CONFIG_USER_ONLY) */ 4221 } 4222 #else 4223 static void pmu_count_insns(DisasContext *ctx) 4224 { 4225 return; 4226 } 4227 #endif /* #if defined(TARGET_PPC64) */ 4228 4229 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 4230 { 4231 return translator_use_goto_tb(&ctx->base, dest); 4232 } 4233 4234 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 4235 { 4236 if (unlikely(ctx->singlestep_enabled)) { 4237 gen_debug_exception(ctx); 4238 } else { 4239 /* 4240 * tcg_gen_lookup_and_goto_ptr will exit the TB if 4241 * CF_NO_GOTO_PTR is set. Count insns now. 4242 */ 4243 if (ctx->base.tb->flags & CF_NO_GOTO_PTR) { 4244 pmu_count_insns(ctx); 4245 } 4246 4247 tcg_gen_lookup_and_goto_ptr(); 4248 } 4249 } 4250 4251 /*** Branch ***/ 4252 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 4253 { 4254 if (NARROW_MODE(ctx)) { 4255 dest = (uint32_t) dest; 4256 } 4257 if (use_goto_tb(ctx, dest)) { 4258 pmu_count_insns(ctx); 4259 tcg_gen_goto_tb(n); 4260 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4261 tcg_gen_exit_tb(ctx->base.tb, n); 4262 } else { 4263 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4264 gen_lookup_and_goto_ptr(ctx); 4265 } 4266 } 4267 4268 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 4269 { 4270 if (NARROW_MODE(ctx)) { 4271 nip = (uint32_t)nip; 4272 } 4273 tcg_gen_movi_tl(cpu_lr, nip); 4274 } 4275 4276 /* b ba bl bla */ 4277 static void gen_b(DisasContext *ctx) 4278 { 4279 target_ulong li, target; 4280 4281 /* sign extend LI */ 4282 li = LI(ctx->opcode); 4283 li = (li ^ 0x02000000) - 0x02000000; 4284 if (likely(AA(ctx->opcode) == 0)) { 4285 target = ctx->cia + li; 4286 } else { 4287 target = li; 4288 } 4289 if (LK(ctx->opcode)) { 4290 gen_setlr(ctx, ctx->base.pc_next); 4291 } 4292 gen_update_cfar(ctx, ctx->cia); 4293 gen_goto_tb(ctx, 0, target); 4294 ctx->base.is_jmp = DISAS_NORETURN; 4295 } 4296 4297 #define BCOND_IM 0 4298 #define BCOND_LR 1 4299 #define BCOND_CTR 2 4300 #define BCOND_TAR 3 4301 4302 static void gen_bcond(DisasContext *ctx, int type) 4303 { 4304 uint32_t bo = BO(ctx->opcode); 4305 TCGLabel *l1; 4306 TCGv target; 4307 4308 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 4309 target = tcg_temp_local_new(); 4310 if (type == BCOND_CTR) { 4311 tcg_gen_mov_tl(target, cpu_ctr); 4312 } else if (type == BCOND_TAR) { 4313 gen_load_spr(target, SPR_TAR); 4314 } else { 4315 tcg_gen_mov_tl(target, cpu_lr); 4316 } 4317 } else { 4318 target = NULL; 4319 } 4320 if (LK(ctx->opcode)) { 4321 gen_setlr(ctx, ctx->base.pc_next); 4322 } 4323 l1 = gen_new_label(); 4324 if ((bo & 0x4) == 0) { 4325 /* Decrement and test CTR */ 4326 TCGv temp = tcg_temp_new(); 4327 4328 if (type == BCOND_CTR) { 4329 /* 4330 * All ISAs up to v3 describe this form of bcctr as invalid but 4331 * some processors, ie. 64-bit server processors compliant with 4332 * arch 2.x, do implement a "test and decrement" logic instead, 4333 * as described in their respective UMs. This logic involves CTR 4334 * to act as both the branch target and a counter, which makes 4335 * it basically useless and thus never used in real code. 4336 * 4337 * This form was hence chosen to trigger extra micro-architectural 4338 * side-effect on real HW needed for the Spectre v2 workaround. 4339 * It is up to guests that implement such workaround, ie. linux, to 4340 * use this form in a way it just triggers the side-effect without 4341 * doing anything else harmful. 4342 */ 4343 if (unlikely(!is_book3s_arch2x(ctx))) { 4344 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4345 tcg_temp_free(temp); 4346 tcg_temp_free(target); 4347 return; 4348 } 4349 4350 if (NARROW_MODE(ctx)) { 4351 tcg_gen_ext32u_tl(temp, cpu_ctr); 4352 } else { 4353 tcg_gen_mov_tl(temp, cpu_ctr); 4354 } 4355 if (bo & 0x2) { 4356 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4357 } else { 4358 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4359 } 4360 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4361 } else { 4362 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4363 if (NARROW_MODE(ctx)) { 4364 tcg_gen_ext32u_tl(temp, cpu_ctr); 4365 } else { 4366 tcg_gen_mov_tl(temp, cpu_ctr); 4367 } 4368 if (bo & 0x2) { 4369 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4370 } else { 4371 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4372 } 4373 } 4374 tcg_temp_free(temp); 4375 } 4376 if ((bo & 0x10) == 0) { 4377 /* Test CR */ 4378 uint32_t bi = BI(ctx->opcode); 4379 uint32_t mask = 0x08 >> (bi & 0x03); 4380 TCGv_i32 temp = tcg_temp_new_i32(); 4381 4382 if (bo & 0x8) { 4383 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4384 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 4385 } else { 4386 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4387 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 4388 } 4389 tcg_temp_free_i32(temp); 4390 } 4391 gen_update_cfar(ctx, ctx->cia); 4392 if (type == BCOND_IM) { 4393 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 4394 if (likely(AA(ctx->opcode) == 0)) { 4395 gen_goto_tb(ctx, 0, ctx->cia + li); 4396 } else { 4397 gen_goto_tb(ctx, 0, li); 4398 } 4399 } else { 4400 if (NARROW_MODE(ctx)) { 4401 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 4402 } else { 4403 tcg_gen_andi_tl(cpu_nip, target, ~3); 4404 } 4405 gen_lookup_and_goto_ptr(ctx); 4406 tcg_temp_free(target); 4407 } 4408 if ((bo & 0x14) != 0x14) { 4409 /* fallthrough case */ 4410 gen_set_label(l1); 4411 gen_goto_tb(ctx, 1, ctx->base.pc_next); 4412 } 4413 ctx->base.is_jmp = DISAS_NORETURN; 4414 } 4415 4416 static void gen_bc(DisasContext *ctx) 4417 { 4418 gen_bcond(ctx, BCOND_IM); 4419 } 4420 4421 static void gen_bcctr(DisasContext *ctx) 4422 { 4423 gen_bcond(ctx, BCOND_CTR); 4424 } 4425 4426 static void gen_bclr(DisasContext *ctx) 4427 { 4428 gen_bcond(ctx, BCOND_LR); 4429 } 4430 4431 static void gen_bctar(DisasContext *ctx) 4432 { 4433 gen_bcond(ctx, BCOND_TAR); 4434 } 4435 4436 /*** Condition register logical ***/ 4437 #define GEN_CRLOGIC(name, tcg_op, opc) \ 4438 static void glue(gen_, name)(DisasContext *ctx) \ 4439 { \ 4440 uint8_t bitmask; \ 4441 int sh; \ 4442 TCGv_i32 t0, t1; \ 4443 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 4444 t0 = tcg_temp_new_i32(); \ 4445 if (sh > 0) \ 4446 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 4447 else if (sh < 0) \ 4448 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 4449 else \ 4450 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 4451 t1 = tcg_temp_new_i32(); \ 4452 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 4453 if (sh > 0) \ 4454 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 4455 else if (sh < 0) \ 4456 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 4457 else \ 4458 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 4459 tcg_op(t0, t0, t1); \ 4460 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 4461 tcg_gen_andi_i32(t0, t0, bitmask); \ 4462 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 4463 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 4464 tcg_temp_free_i32(t0); \ 4465 tcg_temp_free_i32(t1); \ 4466 } 4467 4468 /* crand */ 4469 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 4470 /* crandc */ 4471 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 4472 /* creqv */ 4473 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 4474 /* crnand */ 4475 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 4476 /* crnor */ 4477 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 4478 /* cror */ 4479 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 4480 /* crorc */ 4481 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 4482 /* crxor */ 4483 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 4484 4485 /* mcrf */ 4486 static void gen_mcrf(DisasContext *ctx) 4487 { 4488 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 4489 } 4490 4491 /*** System linkage ***/ 4492 4493 /* rfi (supervisor only) */ 4494 static void gen_rfi(DisasContext *ctx) 4495 { 4496 #if defined(CONFIG_USER_ONLY) 4497 GEN_PRIV; 4498 #else 4499 /* 4500 * This instruction doesn't exist anymore on 64-bit server 4501 * processors compliant with arch 2.x 4502 */ 4503 if (is_book3s_arch2x(ctx)) { 4504 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4505 return; 4506 } 4507 /* Restore CPU state */ 4508 CHK_SV; 4509 gen_icount_io_start(ctx); 4510 gen_update_cfar(ctx, ctx->cia); 4511 gen_helper_rfi(cpu_env); 4512 ctx->base.is_jmp = DISAS_EXIT; 4513 #endif 4514 } 4515 4516 #if defined(TARGET_PPC64) 4517 static void gen_rfid(DisasContext *ctx) 4518 { 4519 #if defined(CONFIG_USER_ONLY) 4520 GEN_PRIV; 4521 #else 4522 /* Restore CPU state */ 4523 CHK_SV; 4524 gen_icount_io_start(ctx); 4525 gen_update_cfar(ctx, ctx->cia); 4526 gen_helper_rfid(cpu_env); 4527 ctx->base.is_jmp = DISAS_EXIT; 4528 #endif 4529 } 4530 4531 #if !defined(CONFIG_USER_ONLY) 4532 static void gen_rfscv(DisasContext *ctx) 4533 { 4534 #if defined(CONFIG_USER_ONLY) 4535 GEN_PRIV; 4536 #else 4537 /* Restore CPU state */ 4538 CHK_SV; 4539 gen_icount_io_start(ctx); 4540 gen_update_cfar(ctx, ctx->cia); 4541 gen_helper_rfscv(cpu_env); 4542 ctx->base.is_jmp = DISAS_EXIT; 4543 #endif 4544 } 4545 #endif 4546 4547 static void gen_hrfid(DisasContext *ctx) 4548 { 4549 #if defined(CONFIG_USER_ONLY) 4550 GEN_PRIV; 4551 #else 4552 /* Restore CPU state */ 4553 CHK_HV; 4554 gen_helper_hrfid(cpu_env); 4555 ctx->base.is_jmp = DISAS_EXIT; 4556 #endif 4557 } 4558 #endif 4559 4560 /* sc */ 4561 #if defined(CONFIG_USER_ONLY) 4562 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 4563 #else 4564 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 4565 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED 4566 #endif 4567 static void gen_sc(DisasContext *ctx) 4568 { 4569 uint32_t lev; 4570 4571 lev = (ctx->opcode >> 5) & 0x7F; 4572 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 4573 } 4574 4575 #if defined(TARGET_PPC64) 4576 #if !defined(CONFIG_USER_ONLY) 4577 static void gen_scv(DisasContext *ctx) 4578 { 4579 uint32_t lev = (ctx->opcode >> 5) & 0x7F; 4580 4581 /* Set the PC back to the faulting instruction. */ 4582 gen_update_nip(ctx, ctx->cia); 4583 gen_helper_scv(cpu_env, tcg_constant_i32(lev)); 4584 4585 ctx->base.is_jmp = DISAS_NORETURN; 4586 } 4587 #endif 4588 #endif 4589 4590 /*** Trap ***/ 4591 4592 /* Check for unconditional traps (always or never) */ 4593 static bool check_unconditional_trap(DisasContext *ctx) 4594 { 4595 /* Trap never */ 4596 if (TO(ctx->opcode) == 0) { 4597 return true; 4598 } 4599 /* Trap always */ 4600 if (TO(ctx->opcode) == 31) { 4601 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 4602 return true; 4603 } 4604 return false; 4605 } 4606 4607 /* tw */ 4608 static void gen_tw(DisasContext *ctx) 4609 { 4610 TCGv_i32 t0; 4611 4612 if (check_unconditional_trap(ctx)) { 4613 return; 4614 } 4615 t0 = tcg_const_i32(TO(ctx->opcode)); 4616 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4617 t0); 4618 tcg_temp_free_i32(t0); 4619 } 4620 4621 /* twi */ 4622 static void gen_twi(DisasContext *ctx) 4623 { 4624 TCGv t0; 4625 TCGv_i32 t1; 4626 4627 if (check_unconditional_trap(ctx)) { 4628 return; 4629 } 4630 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4631 t1 = tcg_const_i32(TO(ctx->opcode)); 4632 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4633 tcg_temp_free(t0); 4634 tcg_temp_free_i32(t1); 4635 } 4636 4637 #if defined(TARGET_PPC64) 4638 /* td */ 4639 static void gen_td(DisasContext *ctx) 4640 { 4641 TCGv_i32 t0; 4642 4643 if (check_unconditional_trap(ctx)) { 4644 return; 4645 } 4646 t0 = tcg_const_i32(TO(ctx->opcode)); 4647 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4648 t0); 4649 tcg_temp_free_i32(t0); 4650 } 4651 4652 /* tdi */ 4653 static void gen_tdi(DisasContext *ctx) 4654 { 4655 TCGv t0; 4656 TCGv_i32 t1; 4657 4658 if (check_unconditional_trap(ctx)) { 4659 return; 4660 } 4661 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4662 t1 = tcg_const_i32(TO(ctx->opcode)); 4663 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4664 tcg_temp_free(t0); 4665 tcg_temp_free_i32(t1); 4666 } 4667 #endif 4668 4669 /*** Processor control ***/ 4670 4671 /* mcrxr */ 4672 static void gen_mcrxr(DisasContext *ctx) 4673 { 4674 TCGv_i32 t0 = tcg_temp_new_i32(); 4675 TCGv_i32 t1 = tcg_temp_new_i32(); 4676 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4677 4678 tcg_gen_trunc_tl_i32(t0, cpu_so); 4679 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4680 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4681 tcg_gen_shli_i32(t0, t0, 3); 4682 tcg_gen_shli_i32(t1, t1, 2); 4683 tcg_gen_shli_i32(dst, dst, 1); 4684 tcg_gen_or_i32(dst, dst, t0); 4685 tcg_gen_or_i32(dst, dst, t1); 4686 tcg_temp_free_i32(t0); 4687 tcg_temp_free_i32(t1); 4688 4689 tcg_gen_movi_tl(cpu_so, 0); 4690 tcg_gen_movi_tl(cpu_ov, 0); 4691 tcg_gen_movi_tl(cpu_ca, 0); 4692 } 4693 4694 #ifdef TARGET_PPC64 4695 /* mcrxrx */ 4696 static void gen_mcrxrx(DisasContext *ctx) 4697 { 4698 TCGv t0 = tcg_temp_new(); 4699 TCGv t1 = tcg_temp_new(); 4700 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4701 4702 /* copy OV and OV32 */ 4703 tcg_gen_shli_tl(t0, cpu_ov, 1); 4704 tcg_gen_or_tl(t0, t0, cpu_ov32); 4705 tcg_gen_shli_tl(t0, t0, 2); 4706 /* copy CA and CA32 */ 4707 tcg_gen_shli_tl(t1, cpu_ca, 1); 4708 tcg_gen_or_tl(t1, t1, cpu_ca32); 4709 tcg_gen_or_tl(t0, t0, t1); 4710 tcg_gen_trunc_tl_i32(dst, t0); 4711 tcg_temp_free(t0); 4712 tcg_temp_free(t1); 4713 } 4714 #endif 4715 4716 /* mfcr mfocrf */ 4717 static void gen_mfcr(DisasContext *ctx) 4718 { 4719 uint32_t crm, crn; 4720 4721 if (likely(ctx->opcode & 0x00100000)) { 4722 crm = CRM(ctx->opcode); 4723 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4724 crn = ctz32(crm); 4725 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4726 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4727 cpu_gpr[rD(ctx->opcode)], crn * 4); 4728 } 4729 } else { 4730 TCGv_i32 t0 = tcg_temp_new_i32(); 4731 tcg_gen_mov_i32(t0, cpu_crf[0]); 4732 tcg_gen_shli_i32(t0, t0, 4); 4733 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4734 tcg_gen_shli_i32(t0, t0, 4); 4735 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4736 tcg_gen_shli_i32(t0, t0, 4); 4737 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4738 tcg_gen_shli_i32(t0, t0, 4); 4739 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4740 tcg_gen_shli_i32(t0, t0, 4); 4741 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4742 tcg_gen_shli_i32(t0, t0, 4); 4743 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4744 tcg_gen_shli_i32(t0, t0, 4); 4745 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4746 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4747 tcg_temp_free_i32(t0); 4748 } 4749 } 4750 4751 /* mfmsr */ 4752 static void gen_mfmsr(DisasContext *ctx) 4753 { 4754 CHK_SV; 4755 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4756 } 4757 4758 /* mfspr */ 4759 static inline void gen_op_mfspr(DisasContext *ctx) 4760 { 4761 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4762 uint32_t sprn = SPR(ctx->opcode); 4763 4764 #if defined(CONFIG_USER_ONLY) 4765 read_cb = ctx->spr_cb[sprn].uea_read; 4766 #else 4767 if (ctx->pr) { 4768 read_cb = ctx->spr_cb[sprn].uea_read; 4769 } else if (ctx->hv) { 4770 read_cb = ctx->spr_cb[sprn].hea_read; 4771 } else { 4772 read_cb = ctx->spr_cb[sprn].oea_read; 4773 } 4774 #endif 4775 if (likely(read_cb != NULL)) { 4776 if (likely(read_cb != SPR_NOACCESS)) { 4777 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4778 } else { 4779 /* Privilege exception */ 4780 /* 4781 * This is a hack to avoid warnings when running Linux: 4782 * this OS breaks the PowerPC virtualisation model, 4783 * allowing userland application to read the PVR 4784 */ 4785 if (sprn != SPR_PVR) { 4786 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4787 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4788 ctx->cia); 4789 } 4790 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4791 } 4792 } else { 4793 /* ISA 2.07 defines these as no-ops */ 4794 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4795 (sprn >= 808 && sprn <= 811)) { 4796 /* This is a nop */ 4797 return; 4798 } 4799 /* Not defined */ 4800 qemu_log_mask(LOG_GUEST_ERROR, 4801 "Trying to read invalid spr %d (0x%03x) at " 4802 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 4803 4804 /* 4805 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 4806 * generate a priv, a hv emu or a no-op 4807 */ 4808 if (sprn & 0x10) { 4809 if (ctx->pr) { 4810 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4811 } 4812 } else { 4813 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4814 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4815 } 4816 } 4817 } 4818 } 4819 4820 static void gen_mfspr(DisasContext *ctx) 4821 { 4822 gen_op_mfspr(ctx); 4823 } 4824 4825 /* mftb */ 4826 static void gen_mftb(DisasContext *ctx) 4827 { 4828 gen_op_mfspr(ctx); 4829 } 4830 4831 /* mtcrf mtocrf*/ 4832 static void gen_mtcrf(DisasContext *ctx) 4833 { 4834 uint32_t crm, crn; 4835 4836 crm = CRM(ctx->opcode); 4837 if (likely((ctx->opcode & 0x00100000))) { 4838 if (crm && ((crm & (crm - 1)) == 0)) { 4839 TCGv_i32 temp = tcg_temp_new_i32(); 4840 crn = ctz32(crm); 4841 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4842 tcg_gen_shri_i32(temp, temp, crn * 4); 4843 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4844 tcg_temp_free_i32(temp); 4845 } 4846 } else { 4847 TCGv_i32 temp = tcg_temp_new_i32(); 4848 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4849 for (crn = 0 ; crn < 8 ; crn++) { 4850 if (crm & (1 << crn)) { 4851 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4852 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4853 } 4854 } 4855 tcg_temp_free_i32(temp); 4856 } 4857 } 4858 4859 /* mtmsr */ 4860 #if defined(TARGET_PPC64) 4861 static void gen_mtmsrd(DisasContext *ctx) 4862 { 4863 if (unlikely(!is_book3s_arch2x(ctx))) { 4864 gen_invalid(ctx); 4865 return; 4866 } 4867 4868 CHK_SV; 4869 4870 #if !defined(CONFIG_USER_ONLY) 4871 TCGv t0, t1; 4872 target_ulong mask; 4873 4874 t0 = tcg_temp_new(); 4875 t1 = tcg_temp_new(); 4876 4877 gen_icount_io_start(ctx); 4878 4879 if (ctx->opcode & 0x00010000) { 4880 /* L=1 form only updates EE and RI */ 4881 mask = (1ULL << MSR_RI) | (1ULL << MSR_EE); 4882 } else { 4883 /* mtmsrd does not alter HV, S, ME, or LE */ 4884 mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) | 4885 (1ULL << MSR_HV)); 4886 /* 4887 * XXX: we need to update nip before the store if we enter 4888 * power saving mode, we will exit the loop directly from 4889 * ppc_store_msr 4890 */ 4891 gen_update_nip(ctx, ctx->base.pc_next); 4892 } 4893 4894 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask); 4895 tcg_gen_andi_tl(t1, cpu_msr, ~mask); 4896 tcg_gen_or_tl(t0, t0, t1); 4897 4898 gen_helper_store_msr(cpu_env, t0); 4899 4900 /* Must stop the translation as machine state (may have) changed */ 4901 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 4902 4903 tcg_temp_free(t0); 4904 tcg_temp_free(t1); 4905 #endif /* !defined(CONFIG_USER_ONLY) */ 4906 } 4907 #endif /* defined(TARGET_PPC64) */ 4908 4909 static void gen_mtmsr(DisasContext *ctx) 4910 { 4911 CHK_SV; 4912 4913 #if !defined(CONFIG_USER_ONLY) 4914 TCGv t0, t1; 4915 target_ulong mask = 0xFFFFFFFF; 4916 4917 t0 = tcg_temp_new(); 4918 t1 = tcg_temp_new(); 4919 4920 gen_icount_io_start(ctx); 4921 if (ctx->opcode & 0x00010000) { 4922 /* L=1 form only updates EE and RI */ 4923 mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE); 4924 } else { 4925 /* mtmsr does not alter S, ME, or LE */ 4926 mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S)); 4927 4928 /* 4929 * XXX: we need to update nip before the store if we enter 4930 * power saving mode, we will exit the loop directly from 4931 * ppc_store_msr 4932 */ 4933 gen_update_nip(ctx, ctx->base.pc_next); 4934 } 4935 4936 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask); 4937 tcg_gen_andi_tl(t1, cpu_msr, ~mask); 4938 tcg_gen_or_tl(t0, t0, t1); 4939 4940 gen_helper_store_msr(cpu_env, t0); 4941 4942 /* Must stop the translation as machine state (may have) changed */ 4943 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 4944 4945 tcg_temp_free(t0); 4946 tcg_temp_free(t1); 4947 #endif 4948 } 4949 4950 /* mtspr */ 4951 static void gen_mtspr(DisasContext *ctx) 4952 { 4953 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4954 uint32_t sprn = SPR(ctx->opcode); 4955 4956 #if defined(CONFIG_USER_ONLY) 4957 write_cb = ctx->spr_cb[sprn].uea_write; 4958 #else 4959 if (ctx->pr) { 4960 write_cb = ctx->spr_cb[sprn].uea_write; 4961 } else if (ctx->hv) { 4962 write_cb = ctx->spr_cb[sprn].hea_write; 4963 } else { 4964 write_cb = ctx->spr_cb[sprn].oea_write; 4965 } 4966 #endif 4967 if (likely(write_cb != NULL)) { 4968 if (likely(write_cb != SPR_NOACCESS)) { 4969 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4970 } else { 4971 /* Privilege exception */ 4972 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4973 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4974 ctx->cia); 4975 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4976 } 4977 } else { 4978 /* ISA 2.07 defines these as no-ops */ 4979 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4980 (sprn >= 808 && sprn <= 811)) { 4981 /* This is a nop */ 4982 return; 4983 } 4984 4985 /* Not defined */ 4986 qemu_log_mask(LOG_GUEST_ERROR, 4987 "Trying to write invalid spr %d (0x%03x) at " 4988 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 4989 4990 4991 /* 4992 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 4993 * generate a priv, a hv emu or a no-op 4994 */ 4995 if (sprn & 0x10) { 4996 if (ctx->pr) { 4997 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4998 } 4999 } else { 5000 if (ctx->pr || sprn == 0) { 5001 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5002 } 5003 } 5004 } 5005 } 5006 5007 #if defined(TARGET_PPC64) 5008 /* setb */ 5009 static void gen_setb(DisasContext *ctx) 5010 { 5011 TCGv_i32 t0 = tcg_temp_new_i32(); 5012 TCGv_i32 t8 = tcg_constant_i32(8); 5013 TCGv_i32 tm1 = tcg_constant_i32(-1); 5014 int crf = crfS(ctx->opcode); 5015 5016 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 5017 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 5018 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 5019 5020 tcg_temp_free_i32(t0); 5021 } 5022 #endif 5023 5024 /*** Cache management ***/ 5025 5026 /* dcbf */ 5027 static void gen_dcbf(DisasContext *ctx) 5028 { 5029 /* XXX: specification says this is treated as a load by the MMU */ 5030 TCGv t0; 5031 gen_set_access_type(ctx, ACCESS_CACHE); 5032 t0 = tcg_temp_new(); 5033 gen_addr_reg_index(ctx, t0); 5034 gen_qemu_ld8u(ctx, t0, t0); 5035 tcg_temp_free(t0); 5036 } 5037 5038 /* dcbfep (external PID dcbf) */ 5039 static void gen_dcbfep(DisasContext *ctx) 5040 { 5041 /* XXX: specification says this is treated as a load by the MMU */ 5042 TCGv t0; 5043 CHK_SV; 5044 gen_set_access_type(ctx, ACCESS_CACHE); 5045 t0 = tcg_temp_new(); 5046 gen_addr_reg_index(ctx, t0); 5047 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5048 tcg_temp_free(t0); 5049 } 5050 5051 /* dcbi (Supervisor only) */ 5052 static void gen_dcbi(DisasContext *ctx) 5053 { 5054 #if defined(CONFIG_USER_ONLY) 5055 GEN_PRIV; 5056 #else 5057 TCGv EA, val; 5058 5059 CHK_SV; 5060 EA = tcg_temp_new(); 5061 gen_set_access_type(ctx, ACCESS_CACHE); 5062 gen_addr_reg_index(ctx, EA); 5063 val = tcg_temp_new(); 5064 /* XXX: specification says this should be treated as a store by the MMU */ 5065 gen_qemu_ld8u(ctx, val, EA); 5066 gen_qemu_st8(ctx, val, EA); 5067 tcg_temp_free(val); 5068 tcg_temp_free(EA); 5069 #endif /* defined(CONFIG_USER_ONLY) */ 5070 } 5071 5072 /* dcdst */ 5073 static void gen_dcbst(DisasContext *ctx) 5074 { 5075 /* XXX: specification say this is treated as a load by the MMU */ 5076 TCGv t0; 5077 gen_set_access_type(ctx, ACCESS_CACHE); 5078 t0 = tcg_temp_new(); 5079 gen_addr_reg_index(ctx, t0); 5080 gen_qemu_ld8u(ctx, t0, t0); 5081 tcg_temp_free(t0); 5082 } 5083 5084 /* dcbstep (dcbstep External PID version) */ 5085 static void gen_dcbstep(DisasContext *ctx) 5086 { 5087 /* XXX: specification say this is treated as a load by the MMU */ 5088 TCGv t0; 5089 gen_set_access_type(ctx, ACCESS_CACHE); 5090 t0 = tcg_temp_new(); 5091 gen_addr_reg_index(ctx, t0); 5092 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5093 tcg_temp_free(t0); 5094 } 5095 5096 /* dcbt */ 5097 static void gen_dcbt(DisasContext *ctx) 5098 { 5099 /* 5100 * interpreted as no-op 5101 * XXX: specification say this is treated as a load by the MMU but 5102 * does not generate any exception 5103 */ 5104 } 5105 5106 /* dcbtep */ 5107 static void gen_dcbtep(DisasContext *ctx) 5108 { 5109 /* 5110 * interpreted as no-op 5111 * XXX: specification say this is treated as a load by the MMU but 5112 * does not generate any exception 5113 */ 5114 } 5115 5116 /* dcbtst */ 5117 static void gen_dcbtst(DisasContext *ctx) 5118 { 5119 /* 5120 * interpreted as no-op 5121 * XXX: specification say this is treated as a load by the MMU but 5122 * does not generate any exception 5123 */ 5124 } 5125 5126 /* dcbtstep */ 5127 static void gen_dcbtstep(DisasContext *ctx) 5128 { 5129 /* 5130 * interpreted as no-op 5131 * XXX: specification say this is treated as a load by the MMU but 5132 * does not generate any exception 5133 */ 5134 } 5135 5136 /* dcbtls */ 5137 static void gen_dcbtls(DisasContext *ctx) 5138 { 5139 /* Always fails locking the cache */ 5140 TCGv t0 = tcg_temp_new(); 5141 gen_load_spr(t0, SPR_Exxx_L1CSR0); 5142 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 5143 gen_store_spr(SPR_Exxx_L1CSR0, t0); 5144 tcg_temp_free(t0); 5145 } 5146 5147 /* dcbz */ 5148 static void gen_dcbz(DisasContext *ctx) 5149 { 5150 TCGv tcgv_addr; 5151 TCGv_i32 tcgv_op; 5152 5153 gen_set_access_type(ctx, ACCESS_CACHE); 5154 tcgv_addr = tcg_temp_new(); 5155 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5156 gen_addr_reg_index(ctx, tcgv_addr); 5157 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 5158 tcg_temp_free(tcgv_addr); 5159 tcg_temp_free_i32(tcgv_op); 5160 } 5161 5162 /* dcbzep */ 5163 static void gen_dcbzep(DisasContext *ctx) 5164 { 5165 TCGv tcgv_addr; 5166 TCGv_i32 tcgv_op; 5167 5168 gen_set_access_type(ctx, ACCESS_CACHE); 5169 tcgv_addr = tcg_temp_new(); 5170 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5171 gen_addr_reg_index(ctx, tcgv_addr); 5172 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 5173 tcg_temp_free(tcgv_addr); 5174 tcg_temp_free_i32(tcgv_op); 5175 } 5176 5177 /* dst / dstt */ 5178 static void gen_dst(DisasContext *ctx) 5179 { 5180 if (rA(ctx->opcode) == 0) { 5181 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5182 } else { 5183 /* interpreted as no-op */ 5184 } 5185 } 5186 5187 /* dstst /dststt */ 5188 static void gen_dstst(DisasContext *ctx) 5189 { 5190 if (rA(ctx->opcode) == 0) { 5191 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5192 } else { 5193 /* interpreted as no-op */ 5194 } 5195 5196 } 5197 5198 /* dss / dssall */ 5199 static void gen_dss(DisasContext *ctx) 5200 { 5201 /* interpreted as no-op */ 5202 } 5203 5204 /* icbi */ 5205 static void gen_icbi(DisasContext *ctx) 5206 { 5207 TCGv t0; 5208 gen_set_access_type(ctx, ACCESS_CACHE); 5209 t0 = tcg_temp_new(); 5210 gen_addr_reg_index(ctx, t0); 5211 gen_helper_icbi(cpu_env, t0); 5212 tcg_temp_free(t0); 5213 } 5214 5215 /* icbiep */ 5216 static void gen_icbiep(DisasContext *ctx) 5217 { 5218 TCGv t0; 5219 gen_set_access_type(ctx, ACCESS_CACHE); 5220 t0 = tcg_temp_new(); 5221 gen_addr_reg_index(ctx, t0); 5222 gen_helper_icbiep(cpu_env, t0); 5223 tcg_temp_free(t0); 5224 } 5225 5226 /* Optional: */ 5227 /* dcba */ 5228 static void gen_dcba(DisasContext *ctx) 5229 { 5230 /* 5231 * interpreted as no-op 5232 * XXX: specification say this is treated as a store by the MMU 5233 * but does not generate any exception 5234 */ 5235 } 5236 5237 /*** Segment register manipulation ***/ 5238 /* Supervisor only: */ 5239 5240 /* mfsr */ 5241 static void gen_mfsr(DisasContext *ctx) 5242 { 5243 #if defined(CONFIG_USER_ONLY) 5244 GEN_PRIV; 5245 #else 5246 TCGv t0; 5247 5248 CHK_SV; 5249 t0 = tcg_const_tl(SR(ctx->opcode)); 5250 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5251 tcg_temp_free(t0); 5252 #endif /* defined(CONFIG_USER_ONLY) */ 5253 } 5254 5255 /* mfsrin */ 5256 static void gen_mfsrin(DisasContext *ctx) 5257 { 5258 #if defined(CONFIG_USER_ONLY) 5259 GEN_PRIV; 5260 #else 5261 TCGv t0; 5262 5263 CHK_SV; 5264 t0 = tcg_temp_new(); 5265 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5266 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5267 tcg_temp_free(t0); 5268 #endif /* defined(CONFIG_USER_ONLY) */ 5269 } 5270 5271 /* mtsr */ 5272 static void gen_mtsr(DisasContext *ctx) 5273 { 5274 #if defined(CONFIG_USER_ONLY) 5275 GEN_PRIV; 5276 #else 5277 TCGv t0; 5278 5279 CHK_SV; 5280 t0 = tcg_const_tl(SR(ctx->opcode)); 5281 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5282 tcg_temp_free(t0); 5283 #endif /* defined(CONFIG_USER_ONLY) */ 5284 } 5285 5286 /* mtsrin */ 5287 static void gen_mtsrin(DisasContext *ctx) 5288 { 5289 #if defined(CONFIG_USER_ONLY) 5290 GEN_PRIV; 5291 #else 5292 TCGv t0; 5293 CHK_SV; 5294 5295 t0 = tcg_temp_new(); 5296 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5297 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 5298 tcg_temp_free(t0); 5299 #endif /* defined(CONFIG_USER_ONLY) */ 5300 } 5301 5302 #if defined(TARGET_PPC64) 5303 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 5304 5305 /* mfsr */ 5306 static void gen_mfsr_64b(DisasContext *ctx) 5307 { 5308 #if defined(CONFIG_USER_ONLY) 5309 GEN_PRIV; 5310 #else 5311 TCGv t0; 5312 5313 CHK_SV; 5314 t0 = tcg_const_tl(SR(ctx->opcode)); 5315 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5316 tcg_temp_free(t0); 5317 #endif /* defined(CONFIG_USER_ONLY) */ 5318 } 5319 5320 /* mfsrin */ 5321 static void gen_mfsrin_64b(DisasContext *ctx) 5322 { 5323 #if defined(CONFIG_USER_ONLY) 5324 GEN_PRIV; 5325 #else 5326 TCGv t0; 5327 5328 CHK_SV; 5329 t0 = tcg_temp_new(); 5330 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5331 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5332 tcg_temp_free(t0); 5333 #endif /* defined(CONFIG_USER_ONLY) */ 5334 } 5335 5336 /* mtsr */ 5337 static void gen_mtsr_64b(DisasContext *ctx) 5338 { 5339 #if defined(CONFIG_USER_ONLY) 5340 GEN_PRIV; 5341 #else 5342 TCGv t0; 5343 5344 CHK_SV; 5345 t0 = tcg_const_tl(SR(ctx->opcode)); 5346 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5347 tcg_temp_free(t0); 5348 #endif /* defined(CONFIG_USER_ONLY) */ 5349 } 5350 5351 /* mtsrin */ 5352 static void gen_mtsrin_64b(DisasContext *ctx) 5353 { 5354 #if defined(CONFIG_USER_ONLY) 5355 GEN_PRIV; 5356 #else 5357 TCGv t0; 5358 5359 CHK_SV; 5360 t0 = tcg_temp_new(); 5361 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5362 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5363 tcg_temp_free(t0); 5364 #endif /* defined(CONFIG_USER_ONLY) */ 5365 } 5366 5367 /* slbmte */ 5368 static void gen_slbmte(DisasContext *ctx) 5369 { 5370 #if defined(CONFIG_USER_ONLY) 5371 GEN_PRIV; 5372 #else 5373 CHK_SV; 5374 5375 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 5376 cpu_gpr[rS(ctx->opcode)]); 5377 #endif /* defined(CONFIG_USER_ONLY) */ 5378 } 5379 5380 static void gen_slbmfee(DisasContext *ctx) 5381 { 5382 #if defined(CONFIG_USER_ONLY) 5383 GEN_PRIV; 5384 #else 5385 CHK_SV; 5386 5387 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5388 cpu_gpr[rB(ctx->opcode)]); 5389 #endif /* defined(CONFIG_USER_ONLY) */ 5390 } 5391 5392 static void gen_slbmfev(DisasContext *ctx) 5393 { 5394 #if defined(CONFIG_USER_ONLY) 5395 GEN_PRIV; 5396 #else 5397 CHK_SV; 5398 5399 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5400 cpu_gpr[rB(ctx->opcode)]); 5401 #endif /* defined(CONFIG_USER_ONLY) */ 5402 } 5403 5404 static void gen_slbfee_(DisasContext *ctx) 5405 { 5406 #if defined(CONFIG_USER_ONLY) 5407 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5408 #else 5409 TCGLabel *l1, *l2; 5410 5411 if (unlikely(ctx->pr)) { 5412 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5413 return; 5414 } 5415 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5416 cpu_gpr[rB(ctx->opcode)]); 5417 l1 = gen_new_label(); 5418 l2 = gen_new_label(); 5419 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5420 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 5421 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 5422 tcg_gen_br(l2); 5423 gen_set_label(l1); 5424 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 5425 gen_set_label(l2); 5426 #endif 5427 } 5428 #endif /* defined(TARGET_PPC64) */ 5429 5430 /*** Lookaside buffer management ***/ 5431 /* Optional & supervisor only: */ 5432 5433 /* tlbia */ 5434 static void gen_tlbia(DisasContext *ctx) 5435 { 5436 #if defined(CONFIG_USER_ONLY) 5437 GEN_PRIV; 5438 #else 5439 CHK_HV; 5440 5441 gen_helper_tlbia(cpu_env); 5442 #endif /* defined(CONFIG_USER_ONLY) */ 5443 } 5444 5445 /* tlbiel */ 5446 static void gen_tlbiel(DisasContext *ctx) 5447 { 5448 #if defined(CONFIG_USER_ONLY) 5449 GEN_PRIV; 5450 #else 5451 bool psr = (ctx->opcode >> 17) & 0x1; 5452 5453 if (ctx->pr || (!ctx->hv && !psr && ctx->hr)) { 5454 /* 5455 * tlbiel is privileged except when PSR=0 and HR=1, making it 5456 * hypervisor privileged. 5457 */ 5458 GEN_PRIV; 5459 } 5460 5461 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5462 #endif /* defined(CONFIG_USER_ONLY) */ 5463 } 5464 5465 /* tlbie */ 5466 static void gen_tlbie(DisasContext *ctx) 5467 { 5468 #if defined(CONFIG_USER_ONLY) 5469 GEN_PRIV; 5470 #else 5471 bool psr = (ctx->opcode >> 17) & 0x1; 5472 TCGv_i32 t1; 5473 5474 if (ctx->pr) { 5475 /* tlbie is privileged... */ 5476 GEN_PRIV; 5477 } else if (!ctx->hv) { 5478 if (!ctx->gtse || (!psr && ctx->hr)) { 5479 /* 5480 * ... except when GTSE=0 or when PSR=0 and HR=1, making it 5481 * hypervisor privileged. 5482 */ 5483 GEN_PRIV; 5484 } 5485 } 5486 5487 if (NARROW_MODE(ctx)) { 5488 TCGv t0 = tcg_temp_new(); 5489 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 5490 gen_helper_tlbie(cpu_env, t0); 5491 tcg_temp_free(t0); 5492 } else { 5493 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5494 } 5495 t1 = tcg_temp_new_i32(); 5496 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5497 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 5498 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5499 tcg_temp_free_i32(t1); 5500 #endif /* defined(CONFIG_USER_ONLY) */ 5501 } 5502 5503 /* tlbsync */ 5504 static void gen_tlbsync(DisasContext *ctx) 5505 { 5506 #if defined(CONFIG_USER_ONLY) 5507 GEN_PRIV; 5508 #else 5509 5510 if (ctx->gtse) { 5511 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 5512 } else { 5513 CHK_HV; /* Else hypervisor privileged */ 5514 } 5515 5516 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 5517 if (ctx->insns_flags & PPC_BOOKE) { 5518 gen_check_tlb_flush(ctx, true); 5519 } 5520 #endif /* defined(CONFIG_USER_ONLY) */ 5521 } 5522 5523 #if defined(TARGET_PPC64) 5524 /* slbia */ 5525 static void gen_slbia(DisasContext *ctx) 5526 { 5527 #if defined(CONFIG_USER_ONLY) 5528 GEN_PRIV; 5529 #else 5530 uint32_t ih = (ctx->opcode >> 21) & 0x7; 5531 TCGv_i32 t0 = tcg_const_i32(ih); 5532 5533 CHK_SV; 5534 5535 gen_helper_slbia(cpu_env, t0); 5536 tcg_temp_free_i32(t0); 5537 #endif /* defined(CONFIG_USER_ONLY) */ 5538 } 5539 5540 /* slbie */ 5541 static void gen_slbie(DisasContext *ctx) 5542 { 5543 #if defined(CONFIG_USER_ONLY) 5544 GEN_PRIV; 5545 #else 5546 CHK_SV; 5547 5548 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5549 #endif /* defined(CONFIG_USER_ONLY) */ 5550 } 5551 5552 /* slbieg */ 5553 static void gen_slbieg(DisasContext *ctx) 5554 { 5555 #if defined(CONFIG_USER_ONLY) 5556 GEN_PRIV; 5557 #else 5558 CHK_SV; 5559 5560 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5561 #endif /* defined(CONFIG_USER_ONLY) */ 5562 } 5563 5564 /* slbsync */ 5565 static void gen_slbsync(DisasContext *ctx) 5566 { 5567 #if defined(CONFIG_USER_ONLY) 5568 GEN_PRIV; 5569 #else 5570 CHK_SV; 5571 gen_check_tlb_flush(ctx, true); 5572 #endif /* defined(CONFIG_USER_ONLY) */ 5573 } 5574 5575 #endif /* defined(TARGET_PPC64) */ 5576 5577 /*** External control ***/ 5578 /* Optional: */ 5579 5580 /* eciwx */ 5581 static void gen_eciwx(DisasContext *ctx) 5582 { 5583 TCGv t0; 5584 /* Should check EAR[E] ! */ 5585 gen_set_access_type(ctx, ACCESS_EXT); 5586 t0 = tcg_temp_new(); 5587 gen_addr_reg_index(ctx, t0); 5588 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5589 DEF_MEMOP(MO_UL | MO_ALIGN)); 5590 tcg_temp_free(t0); 5591 } 5592 5593 /* ecowx */ 5594 static void gen_ecowx(DisasContext *ctx) 5595 { 5596 TCGv t0; 5597 /* Should check EAR[E] ! */ 5598 gen_set_access_type(ctx, ACCESS_EXT); 5599 t0 = tcg_temp_new(); 5600 gen_addr_reg_index(ctx, t0); 5601 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5602 DEF_MEMOP(MO_UL | MO_ALIGN)); 5603 tcg_temp_free(t0); 5604 } 5605 5606 /* PowerPC 601 specific instructions */ 5607 5608 /* abs - abs. */ 5609 static void gen_abs(DisasContext *ctx) 5610 { 5611 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5612 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5613 5614 tcg_gen_abs_tl(d, a); 5615 if (unlikely(Rc(ctx->opcode) != 0)) { 5616 gen_set_Rc0(ctx, d); 5617 } 5618 } 5619 5620 /* abso - abso. */ 5621 static void gen_abso(DisasContext *ctx) 5622 { 5623 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5624 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5625 5626 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000); 5627 tcg_gen_abs_tl(d, a); 5628 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 5629 if (unlikely(Rc(ctx->opcode) != 0)) { 5630 gen_set_Rc0(ctx, d); 5631 } 5632 } 5633 5634 /* clcs */ 5635 static void gen_clcs(DisasContext *ctx) 5636 { 5637 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5638 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5639 tcg_temp_free_i32(t0); 5640 /* Rc=1 sets CR0 to an undefined state */ 5641 } 5642 5643 /* div - div. */ 5644 static void gen_div(DisasContext *ctx) 5645 { 5646 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5647 cpu_gpr[rB(ctx->opcode)]); 5648 if (unlikely(Rc(ctx->opcode) != 0)) { 5649 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5650 } 5651 } 5652 5653 /* divo - divo. */ 5654 static void gen_divo(DisasContext *ctx) 5655 { 5656 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5657 cpu_gpr[rB(ctx->opcode)]); 5658 if (unlikely(Rc(ctx->opcode) != 0)) { 5659 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5660 } 5661 } 5662 5663 /* divs - divs. */ 5664 static void gen_divs(DisasContext *ctx) 5665 { 5666 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5667 cpu_gpr[rB(ctx->opcode)]); 5668 if (unlikely(Rc(ctx->opcode) != 0)) { 5669 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5670 } 5671 } 5672 5673 /* divso - divso. */ 5674 static void gen_divso(DisasContext *ctx) 5675 { 5676 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5677 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5678 if (unlikely(Rc(ctx->opcode) != 0)) { 5679 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5680 } 5681 } 5682 5683 /* doz - doz. */ 5684 static void gen_doz(DisasContext *ctx) 5685 { 5686 TCGLabel *l1 = gen_new_label(); 5687 TCGLabel *l2 = gen_new_label(); 5688 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5689 cpu_gpr[rA(ctx->opcode)], l1); 5690 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 5691 cpu_gpr[rA(ctx->opcode)]); 5692 tcg_gen_br(l2); 5693 gen_set_label(l1); 5694 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5695 gen_set_label(l2); 5696 if (unlikely(Rc(ctx->opcode) != 0)) { 5697 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5698 } 5699 } 5700 5701 /* dozo - dozo. */ 5702 static void gen_dozo(DisasContext *ctx) 5703 { 5704 TCGLabel *l1 = gen_new_label(); 5705 TCGLabel *l2 = gen_new_label(); 5706 TCGv t0 = tcg_temp_new(); 5707 TCGv t1 = tcg_temp_new(); 5708 TCGv t2 = tcg_temp_new(); 5709 /* Start with XER OV disabled, the most likely case */ 5710 tcg_gen_movi_tl(cpu_ov, 0); 5711 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5712 cpu_gpr[rA(ctx->opcode)], l1); 5713 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5714 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5715 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5716 tcg_gen_andc_tl(t1, t1, t2); 5717 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5718 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5719 tcg_gen_movi_tl(cpu_ov, 1); 5720 tcg_gen_movi_tl(cpu_so, 1); 5721 tcg_gen_br(l2); 5722 gen_set_label(l1); 5723 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5724 gen_set_label(l2); 5725 tcg_temp_free(t0); 5726 tcg_temp_free(t1); 5727 tcg_temp_free(t2); 5728 if (unlikely(Rc(ctx->opcode) != 0)) { 5729 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5730 } 5731 } 5732 5733 /* dozi */ 5734 static void gen_dozi(DisasContext *ctx) 5735 { 5736 target_long simm = SIMM(ctx->opcode); 5737 TCGLabel *l1 = gen_new_label(); 5738 TCGLabel *l2 = gen_new_label(); 5739 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5740 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5741 tcg_gen_br(l2); 5742 gen_set_label(l1); 5743 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5744 gen_set_label(l2); 5745 if (unlikely(Rc(ctx->opcode) != 0)) { 5746 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5747 } 5748 } 5749 5750 /* lscbx - lscbx. */ 5751 static void gen_lscbx(DisasContext *ctx) 5752 { 5753 TCGv t0 = tcg_temp_new(); 5754 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5755 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5756 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5757 5758 gen_addr_reg_index(ctx, t0); 5759 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5760 tcg_temp_free_i32(t1); 5761 tcg_temp_free_i32(t2); 5762 tcg_temp_free_i32(t3); 5763 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5764 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5765 if (unlikely(Rc(ctx->opcode) != 0)) { 5766 gen_set_Rc0(ctx, t0); 5767 } 5768 tcg_temp_free(t0); 5769 } 5770 5771 /* maskg - maskg. */ 5772 static void gen_maskg(DisasContext *ctx) 5773 { 5774 TCGLabel *l1 = gen_new_label(); 5775 TCGv t0 = tcg_temp_new(); 5776 TCGv t1 = tcg_temp_new(); 5777 TCGv t2 = tcg_temp_new(); 5778 TCGv t3 = tcg_temp_new(); 5779 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5780 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5781 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5782 tcg_gen_addi_tl(t2, t0, 1); 5783 tcg_gen_shr_tl(t2, t3, t2); 5784 tcg_gen_shr_tl(t3, t3, t1); 5785 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5786 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5787 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5788 gen_set_label(l1); 5789 tcg_temp_free(t0); 5790 tcg_temp_free(t1); 5791 tcg_temp_free(t2); 5792 tcg_temp_free(t3); 5793 if (unlikely(Rc(ctx->opcode) != 0)) { 5794 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5795 } 5796 } 5797 5798 /* maskir - maskir. */ 5799 static void gen_maskir(DisasContext *ctx) 5800 { 5801 TCGv t0 = tcg_temp_new(); 5802 TCGv t1 = tcg_temp_new(); 5803 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5804 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5805 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5806 tcg_temp_free(t0); 5807 tcg_temp_free(t1); 5808 if (unlikely(Rc(ctx->opcode) != 0)) { 5809 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5810 } 5811 } 5812 5813 /* mul - mul. */ 5814 static void gen_mul(DisasContext *ctx) 5815 { 5816 TCGv_i64 t0 = tcg_temp_new_i64(); 5817 TCGv_i64 t1 = tcg_temp_new_i64(); 5818 TCGv t2 = tcg_temp_new(); 5819 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5820 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5821 tcg_gen_mul_i64(t0, t0, t1); 5822 tcg_gen_trunc_i64_tl(t2, t0); 5823 gen_store_spr(SPR_MQ, t2); 5824 tcg_gen_shri_i64(t1, t0, 32); 5825 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5826 tcg_temp_free_i64(t0); 5827 tcg_temp_free_i64(t1); 5828 tcg_temp_free(t2); 5829 if (unlikely(Rc(ctx->opcode) != 0)) { 5830 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5831 } 5832 } 5833 5834 /* mulo - mulo. */ 5835 static void gen_mulo(DisasContext *ctx) 5836 { 5837 TCGLabel *l1 = gen_new_label(); 5838 TCGv_i64 t0 = tcg_temp_new_i64(); 5839 TCGv_i64 t1 = tcg_temp_new_i64(); 5840 TCGv t2 = tcg_temp_new(); 5841 /* Start with XER OV disabled, the most likely case */ 5842 tcg_gen_movi_tl(cpu_ov, 0); 5843 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5844 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5845 tcg_gen_mul_i64(t0, t0, t1); 5846 tcg_gen_trunc_i64_tl(t2, t0); 5847 gen_store_spr(SPR_MQ, t2); 5848 tcg_gen_shri_i64(t1, t0, 32); 5849 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5850 tcg_gen_ext32s_i64(t1, t0); 5851 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5852 tcg_gen_movi_tl(cpu_ov, 1); 5853 tcg_gen_movi_tl(cpu_so, 1); 5854 gen_set_label(l1); 5855 tcg_temp_free_i64(t0); 5856 tcg_temp_free_i64(t1); 5857 tcg_temp_free(t2); 5858 if (unlikely(Rc(ctx->opcode) != 0)) { 5859 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5860 } 5861 } 5862 5863 /* nabs - nabs. */ 5864 static void gen_nabs(DisasContext *ctx) 5865 { 5866 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5867 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5868 5869 tcg_gen_abs_tl(d, a); 5870 tcg_gen_neg_tl(d, d); 5871 if (unlikely(Rc(ctx->opcode) != 0)) { 5872 gen_set_Rc0(ctx, d); 5873 } 5874 } 5875 5876 /* nabso - nabso. */ 5877 static void gen_nabso(DisasContext *ctx) 5878 { 5879 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5880 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5881 5882 tcg_gen_abs_tl(d, a); 5883 tcg_gen_neg_tl(d, d); 5884 /* nabs never overflows */ 5885 tcg_gen_movi_tl(cpu_ov, 0); 5886 if (unlikely(Rc(ctx->opcode) != 0)) { 5887 gen_set_Rc0(ctx, d); 5888 } 5889 } 5890 5891 /* rlmi - rlmi. */ 5892 static void gen_rlmi(DisasContext *ctx) 5893 { 5894 uint32_t mb = MB(ctx->opcode); 5895 uint32_t me = ME(ctx->opcode); 5896 TCGv t0 = tcg_temp_new(); 5897 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5898 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5899 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5900 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 5901 ~MASK(mb, me)); 5902 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5903 tcg_temp_free(t0); 5904 if (unlikely(Rc(ctx->opcode) != 0)) { 5905 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5906 } 5907 } 5908 5909 /* rrib - rrib. */ 5910 static void gen_rrib(DisasContext *ctx) 5911 { 5912 TCGv t0 = tcg_temp_new(); 5913 TCGv t1 = tcg_temp_new(); 5914 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5915 tcg_gen_movi_tl(t1, 0x80000000); 5916 tcg_gen_shr_tl(t1, t1, t0); 5917 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5918 tcg_gen_and_tl(t0, t0, t1); 5919 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5920 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5921 tcg_temp_free(t0); 5922 tcg_temp_free(t1); 5923 if (unlikely(Rc(ctx->opcode) != 0)) { 5924 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5925 } 5926 } 5927 5928 /* sle - sle. */ 5929 static void gen_sle(DisasContext *ctx) 5930 { 5931 TCGv t0 = tcg_temp_new(); 5932 TCGv t1 = tcg_temp_new(); 5933 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5934 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5935 tcg_gen_subfi_tl(t1, 32, t1); 5936 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5937 tcg_gen_or_tl(t1, t0, t1); 5938 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5939 gen_store_spr(SPR_MQ, t1); 5940 tcg_temp_free(t0); 5941 tcg_temp_free(t1); 5942 if (unlikely(Rc(ctx->opcode) != 0)) { 5943 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5944 } 5945 } 5946 5947 /* sleq - sleq. */ 5948 static void gen_sleq(DisasContext *ctx) 5949 { 5950 TCGv t0 = tcg_temp_new(); 5951 TCGv t1 = tcg_temp_new(); 5952 TCGv t2 = tcg_temp_new(); 5953 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5954 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5955 tcg_gen_shl_tl(t2, t2, t0); 5956 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5957 gen_load_spr(t1, SPR_MQ); 5958 gen_store_spr(SPR_MQ, t0); 5959 tcg_gen_and_tl(t0, t0, t2); 5960 tcg_gen_andc_tl(t1, t1, t2); 5961 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5962 tcg_temp_free(t0); 5963 tcg_temp_free(t1); 5964 tcg_temp_free(t2); 5965 if (unlikely(Rc(ctx->opcode) != 0)) { 5966 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5967 } 5968 } 5969 5970 /* sliq - sliq. */ 5971 static void gen_sliq(DisasContext *ctx) 5972 { 5973 int sh = SH(ctx->opcode); 5974 TCGv t0 = tcg_temp_new(); 5975 TCGv t1 = tcg_temp_new(); 5976 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5977 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5978 tcg_gen_or_tl(t1, t0, t1); 5979 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5980 gen_store_spr(SPR_MQ, t1); 5981 tcg_temp_free(t0); 5982 tcg_temp_free(t1); 5983 if (unlikely(Rc(ctx->opcode) != 0)) { 5984 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5985 } 5986 } 5987 5988 /* slliq - slliq. */ 5989 static void gen_slliq(DisasContext *ctx) 5990 { 5991 int sh = SH(ctx->opcode); 5992 TCGv t0 = tcg_temp_new(); 5993 TCGv t1 = tcg_temp_new(); 5994 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5995 gen_load_spr(t1, SPR_MQ); 5996 gen_store_spr(SPR_MQ, t0); 5997 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 5998 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 5999 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6000 tcg_temp_free(t0); 6001 tcg_temp_free(t1); 6002 if (unlikely(Rc(ctx->opcode) != 0)) { 6003 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6004 } 6005 } 6006 6007 /* sllq - sllq. */ 6008 static void gen_sllq(DisasContext *ctx) 6009 { 6010 TCGLabel *l1 = gen_new_label(); 6011 TCGLabel *l2 = gen_new_label(); 6012 TCGv t0 = tcg_temp_local_new(); 6013 TCGv t1 = tcg_temp_local_new(); 6014 TCGv t2 = tcg_temp_local_new(); 6015 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6016 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6017 tcg_gen_shl_tl(t1, t1, t2); 6018 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6019 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6020 gen_load_spr(t0, SPR_MQ); 6021 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6022 tcg_gen_br(l2); 6023 gen_set_label(l1); 6024 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6025 gen_load_spr(t2, SPR_MQ); 6026 tcg_gen_andc_tl(t1, t2, t1); 6027 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6028 gen_set_label(l2); 6029 tcg_temp_free(t0); 6030 tcg_temp_free(t1); 6031 tcg_temp_free(t2); 6032 if (unlikely(Rc(ctx->opcode) != 0)) { 6033 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6034 } 6035 } 6036 6037 /* slq - slq. */ 6038 static void gen_slq(DisasContext *ctx) 6039 { 6040 TCGLabel *l1 = gen_new_label(); 6041 TCGv t0 = tcg_temp_new(); 6042 TCGv t1 = tcg_temp_new(); 6043 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6044 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6045 tcg_gen_subfi_tl(t1, 32, t1); 6046 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6047 tcg_gen_or_tl(t1, t0, t1); 6048 gen_store_spr(SPR_MQ, t1); 6049 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6050 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6051 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6052 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6053 gen_set_label(l1); 6054 tcg_temp_free(t0); 6055 tcg_temp_free(t1); 6056 if (unlikely(Rc(ctx->opcode) != 0)) { 6057 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6058 } 6059 } 6060 6061 /* sraiq - sraiq. */ 6062 static void gen_sraiq(DisasContext *ctx) 6063 { 6064 int sh = SH(ctx->opcode); 6065 TCGLabel *l1 = gen_new_label(); 6066 TCGv t0 = tcg_temp_new(); 6067 TCGv t1 = tcg_temp_new(); 6068 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6069 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6070 tcg_gen_or_tl(t0, t0, t1); 6071 gen_store_spr(SPR_MQ, t0); 6072 tcg_gen_movi_tl(cpu_ca, 0); 6073 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6074 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 6075 tcg_gen_movi_tl(cpu_ca, 1); 6076 gen_set_label(l1); 6077 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 6078 tcg_temp_free(t0); 6079 tcg_temp_free(t1); 6080 if (unlikely(Rc(ctx->opcode) != 0)) { 6081 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6082 } 6083 } 6084 6085 /* sraq - sraq. */ 6086 static void gen_sraq(DisasContext *ctx) 6087 { 6088 TCGLabel *l1 = gen_new_label(); 6089 TCGLabel *l2 = gen_new_label(); 6090 TCGv t0 = tcg_temp_new(); 6091 TCGv t1 = tcg_temp_local_new(); 6092 TCGv t2 = tcg_temp_local_new(); 6093 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6094 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6095 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 6096 tcg_gen_subfi_tl(t2, 32, t2); 6097 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 6098 tcg_gen_or_tl(t0, t0, t2); 6099 gen_store_spr(SPR_MQ, t0); 6100 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6101 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 6102 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 6103 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 6104 gen_set_label(l1); 6105 tcg_temp_free(t0); 6106 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 6107 tcg_gen_movi_tl(cpu_ca, 0); 6108 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 6109 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 6110 tcg_gen_movi_tl(cpu_ca, 1); 6111 gen_set_label(l2); 6112 tcg_temp_free(t1); 6113 tcg_temp_free(t2); 6114 if (unlikely(Rc(ctx->opcode) != 0)) { 6115 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6116 } 6117 } 6118 6119 /* sre - sre. */ 6120 static void gen_sre(DisasContext *ctx) 6121 { 6122 TCGv t0 = tcg_temp_new(); 6123 TCGv t1 = tcg_temp_new(); 6124 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6125 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6126 tcg_gen_subfi_tl(t1, 32, t1); 6127 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6128 tcg_gen_or_tl(t1, t0, t1); 6129 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6130 gen_store_spr(SPR_MQ, t1); 6131 tcg_temp_free(t0); 6132 tcg_temp_free(t1); 6133 if (unlikely(Rc(ctx->opcode) != 0)) { 6134 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6135 } 6136 } 6137 6138 /* srea - srea. */ 6139 static void gen_srea(DisasContext *ctx) 6140 { 6141 TCGv t0 = tcg_temp_new(); 6142 TCGv t1 = tcg_temp_new(); 6143 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6144 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6145 gen_store_spr(SPR_MQ, t0); 6146 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 6147 tcg_temp_free(t0); 6148 tcg_temp_free(t1); 6149 if (unlikely(Rc(ctx->opcode) != 0)) { 6150 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6151 } 6152 } 6153 6154 /* sreq */ 6155 static void gen_sreq(DisasContext *ctx) 6156 { 6157 TCGv t0 = tcg_temp_new(); 6158 TCGv t1 = tcg_temp_new(); 6159 TCGv t2 = tcg_temp_new(); 6160 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6161 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6162 tcg_gen_shr_tl(t1, t1, t0); 6163 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6164 gen_load_spr(t2, SPR_MQ); 6165 gen_store_spr(SPR_MQ, t0); 6166 tcg_gen_and_tl(t0, t0, t1); 6167 tcg_gen_andc_tl(t2, t2, t1); 6168 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6169 tcg_temp_free(t0); 6170 tcg_temp_free(t1); 6171 tcg_temp_free(t2); 6172 if (unlikely(Rc(ctx->opcode) != 0)) { 6173 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6174 } 6175 } 6176 6177 /* sriq */ 6178 static void gen_sriq(DisasContext *ctx) 6179 { 6180 int sh = SH(ctx->opcode); 6181 TCGv t0 = tcg_temp_new(); 6182 TCGv t1 = tcg_temp_new(); 6183 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6184 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6185 tcg_gen_or_tl(t1, t0, t1); 6186 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6187 gen_store_spr(SPR_MQ, t1); 6188 tcg_temp_free(t0); 6189 tcg_temp_free(t1); 6190 if (unlikely(Rc(ctx->opcode) != 0)) { 6191 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6192 } 6193 } 6194 6195 /* srliq */ 6196 static void gen_srliq(DisasContext *ctx) 6197 { 6198 int sh = SH(ctx->opcode); 6199 TCGv t0 = tcg_temp_new(); 6200 TCGv t1 = tcg_temp_new(); 6201 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6202 gen_load_spr(t1, SPR_MQ); 6203 gen_store_spr(SPR_MQ, t0); 6204 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 6205 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 6206 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6207 tcg_temp_free(t0); 6208 tcg_temp_free(t1); 6209 if (unlikely(Rc(ctx->opcode) != 0)) { 6210 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6211 } 6212 } 6213 6214 /* srlq */ 6215 static void gen_srlq(DisasContext *ctx) 6216 { 6217 TCGLabel *l1 = gen_new_label(); 6218 TCGLabel *l2 = gen_new_label(); 6219 TCGv t0 = tcg_temp_local_new(); 6220 TCGv t1 = tcg_temp_local_new(); 6221 TCGv t2 = tcg_temp_local_new(); 6222 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6223 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6224 tcg_gen_shr_tl(t2, t1, t2); 6225 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6226 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6227 gen_load_spr(t0, SPR_MQ); 6228 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6229 tcg_gen_br(l2); 6230 gen_set_label(l1); 6231 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6232 tcg_gen_and_tl(t0, t0, t2); 6233 gen_load_spr(t1, SPR_MQ); 6234 tcg_gen_andc_tl(t1, t1, t2); 6235 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6236 gen_set_label(l2); 6237 tcg_temp_free(t0); 6238 tcg_temp_free(t1); 6239 tcg_temp_free(t2); 6240 if (unlikely(Rc(ctx->opcode) != 0)) { 6241 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6242 } 6243 } 6244 6245 /* srq */ 6246 static void gen_srq(DisasContext *ctx) 6247 { 6248 TCGLabel *l1 = gen_new_label(); 6249 TCGv t0 = tcg_temp_new(); 6250 TCGv t1 = tcg_temp_new(); 6251 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6252 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6253 tcg_gen_subfi_tl(t1, 32, t1); 6254 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6255 tcg_gen_or_tl(t1, t0, t1); 6256 gen_store_spr(SPR_MQ, t1); 6257 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6258 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6259 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6260 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6261 gen_set_label(l1); 6262 tcg_temp_free(t0); 6263 tcg_temp_free(t1); 6264 if (unlikely(Rc(ctx->opcode) != 0)) { 6265 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6266 } 6267 } 6268 6269 /* PowerPC 602 specific instructions */ 6270 6271 /* dsa */ 6272 static void gen_dsa(DisasContext *ctx) 6273 { 6274 /* XXX: TODO */ 6275 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6276 } 6277 6278 /* esa */ 6279 static void gen_esa(DisasContext *ctx) 6280 { 6281 /* XXX: TODO */ 6282 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6283 } 6284 6285 /* mfrom */ 6286 static void gen_mfrom(DisasContext *ctx) 6287 { 6288 #if defined(CONFIG_USER_ONLY) 6289 GEN_PRIV; 6290 #else 6291 CHK_SV; 6292 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 6293 #endif /* defined(CONFIG_USER_ONLY) */ 6294 } 6295 6296 /* 602 - 603 - G2 TLB management */ 6297 6298 /* tlbld */ 6299 static void gen_tlbld_6xx(DisasContext *ctx) 6300 { 6301 #if defined(CONFIG_USER_ONLY) 6302 GEN_PRIV; 6303 #else 6304 CHK_SV; 6305 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6306 #endif /* defined(CONFIG_USER_ONLY) */ 6307 } 6308 6309 /* tlbli */ 6310 static void gen_tlbli_6xx(DisasContext *ctx) 6311 { 6312 #if defined(CONFIG_USER_ONLY) 6313 GEN_PRIV; 6314 #else 6315 CHK_SV; 6316 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6317 #endif /* defined(CONFIG_USER_ONLY) */ 6318 } 6319 6320 /* POWER instructions not in PowerPC 601 */ 6321 6322 /* clf */ 6323 static void gen_clf(DisasContext *ctx) 6324 { 6325 /* Cache line flush: implemented as no-op */ 6326 } 6327 6328 /* cli */ 6329 static void gen_cli(DisasContext *ctx) 6330 { 6331 #if defined(CONFIG_USER_ONLY) 6332 GEN_PRIV; 6333 #else 6334 /* Cache line invalidate: privileged and treated as no-op */ 6335 CHK_SV; 6336 #endif /* defined(CONFIG_USER_ONLY) */ 6337 } 6338 6339 /* dclst */ 6340 static void gen_dclst(DisasContext *ctx) 6341 { 6342 /* Data cache line store: treated as no-op */ 6343 } 6344 6345 static void gen_mfsri(DisasContext *ctx) 6346 { 6347 #if defined(CONFIG_USER_ONLY) 6348 GEN_PRIV; 6349 #else 6350 int ra = rA(ctx->opcode); 6351 int rd = rD(ctx->opcode); 6352 TCGv t0; 6353 6354 CHK_SV; 6355 t0 = tcg_temp_new(); 6356 gen_addr_reg_index(ctx, t0); 6357 tcg_gen_extract_tl(t0, t0, 28, 4); 6358 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 6359 tcg_temp_free(t0); 6360 if (ra != 0 && ra != rd) { 6361 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 6362 } 6363 #endif /* defined(CONFIG_USER_ONLY) */ 6364 } 6365 6366 static void gen_rac(DisasContext *ctx) 6367 { 6368 #if defined(CONFIG_USER_ONLY) 6369 GEN_PRIV; 6370 #else 6371 TCGv t0; 6372 6373 CHK_SV; 6374 t0 = tcg_temp_new(); 6375 gen_addr_reg_index(ctx, t0); 6376 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6377 tcg_temp_free(t0); 6378 #endif /* defined(CONFIG_USER_ONLY) */ 6379 } 6380 6381 static void gen_rfsvc(DisasContext *ctx) 6382 { 6383 #if defined(CONFIG_USER_ONLY) 6384 GEN_PRIV; 6385 #else 6386 CHK_SV; 6387 6388 gen_helper_rfsvc(cpu_env); 6389 ctx->base.is_jmp = DISAS_EXIT; 6390 #endif /* defined(CONFIG_USER_ONLY) */ 6391 } 6392 6393 /* svc is not implemented for now */ 6394 6395 /* BookE specific instructions */ 6396 6397 /* XXX: not implemented on 440 ? */ 6398 static void gen_mfapidi(DisasContext *ctx) 6399 { 6400 /* XXX: TODO */ 6401 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6402 } 6403 6404 /* XXX: not implemented on 440 ? */ 6405 static void gen_tlbiva(DisasContext *ctx) 6406 { 6407 #if defined(CONFIG_USER_ONLY) 6408 GEN_PRIV; 6409 #else 6410 TCGv t0; 6411 6412 CHK_SV; 6413 t0 = tcg_temp_new(); 6414 gen_addr_reg_index(ctx, t0); 6415 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6416 tcg_temp_free(t0); 6417 #endif /* defined(CONFIG_USER_ONLY) */ 6418 } 6419 6420 /* All 405 MAC instructions are translated here */ 6421 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 6422 int ra, int rb, int rt, int Rc) 6423 { 6424 TCGv t0, t1; 6425 6426 t0 = tcg_temp_local_new(); 6427 t1 = tcg_temp_local_new(); 6428 6429 switch (opc3 & 0x0D) { 6430 case 0x05: 6431 /* macchw - macchw. - macchwo - macchwo. */ 6432 /* macchws - macchws. - macchwso - macchwso. */ 6433 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 6434 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 6435 /* mulchw - mulchw. */ 6436 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6437 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6438 tcg_gen_ext16s_tl(t1, t1); 6439 break; 6440 case 0x04: 6441 /* macchwu - macchwu. - macchwuo - macchwuo. */ 6442 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 6443 /* mulchwu - mulchwu. */ 6444 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6445 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6446 tcg_gen_ext16u_tl(t1, t1); 6447 break; 6448 case 0x01: 6449 /* machhw - machhw. - machhwo - machhwo. */ 6450 /* machhws - machhws. - machhwso - machhwso. */ 6451 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 6452 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 6453 /* mulhhw - mulhhw. */ 6454 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 6455 tcg_gen_ext16s_tl(t0, t0); 6456 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6457 tcg_gen_ext16s_tl(t1, t1); 6458 break; 6459 case 0x00: 6460 /* machhwu - machhwu. - machhwuo - machhwuo. */ 6461 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 6462 /* mulhhwu - mulhhwu. */ 6463 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 6464 tcg_gen_ext16u_tl(t0, t0); 6465 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6466 tcg_gen_ext16u_tl(t1, t1); 6467 break; 6468 case 0x0D: 6469 /* maclhw - maclhw. - maclhwo - maclhwo. */ 6470 /* maclhws - maclhws. - maclhwso - maclhwso. */ 6471 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 6472 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 6473 /* mullhw - mullhw. */ 6474 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6475 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 6476 break; 6477 case 0x0C: 6478 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 6479 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 6480 /* mullhwu - mullhwu. */ 6481 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6482 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 6483 break; 6484 } 6485 if (opc2 & 0x04) { 6486 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 6487 tcg_gen_mul_tl(t1, t0, t1); 6488 if (opc2 & 0x02) { 6489 /* nmultiply-and-accumulate (0x0E) */ 6490 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 6491 } else { 6492 /* multiply-and-accumulate (0x0C) */ 6493 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 6494 } 6495 6496 if (opc3 & 0x12) { 6497 /* Check overflow and/or saturate */ 6498 TCGLabel *l1 = gen_new_label(); 6499 6500 if (opc3 & 0x10) { 6501 /* Start with XER OV disabled, the most likely case */ 6502 tcg_gen_movi_tl(cpu_ov, 0); 6503 } 6504 if (opc3 & 0x01) { 6505 /* Signed */ 6506 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 6507 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 6508 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 6509 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 6510 if (opc3 & 0x02) { 6511 /* Saturate */ 6512 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 6513 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 6514 } 6515 } else { 6516 /* Unsigned */ 6517 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 6518 if (opc3 & 0x02) { 6519 /* Saturate */ 6520 tcg_gen_movi_tl(t0, UINT32_MAX); 6521 } 6522 } 6523 if (opc3 & 0x10) { 6524 /* Check overflow */ 6525 tcg_gen_movi_tl(cpu_ov, 1); 6526 tcg_gen_movi_tl(cpu_so, 1); 6527 } 6528 gen_set_label(l1); 6529 tcg_gen_mov_tl(cpu_gpr[rt], t0); 6530 } 6531 } else { 6532 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 6533 } 6534 tcg_temp_free(t0); 6535 tcg_temp_free(t1); 6536 if (unlikely(Rc) != 0) { 6537 /* Update Rc0 */ 6538 gen_set_Rc0(ctx, cpu_gpr[rt]); 6539 } 6540 } 6541 6542 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6543 static void glue(gen_, name)(DisasContext *ctx) \ 6544 { \ 6545 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 6546 rD(ctx->opcode), Rc(ctx->opcode)); \ 6547 } 6548 6549 /* macchw - macchw. */ 6550 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 6551 /* macchwo - macchwo. */ 6552 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 6553 /* macchws - macchws. */ 6554 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 6555 /* macchwso - macchwso. */ 6556 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 6557 /* macchwsu - macchwsu. */ 6558 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 6559 /* macchwsuo - macchwsuo. */ 6560 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 6561 /* macchwu - macchwu. */ 6562 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 6563 /* macchwuo - macchwuo. */ 6564 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 6565 /* machhw - machhw. */ 6566 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 6567 /* machhwo - machhwo. */ 6568 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 6569 /* machhws - machhws. */ 6570 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 6571 /* machhwso - machhwso. */ 6572 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 6573 /* machhwsu - machhwsu. */ 6574 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 6575 /* machhwsuo - machhwsuo. */ 6576 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 6577 /* machhwu - machhwu. */ 6578 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 6579 /* machhwuo - machhwuo. */ 6580 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 6581 /* maclhw - maclhw. */ 6582 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 6583 /* maclhwo - maclhwo. */ 6584 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 6585 /* maclhws - maclhws. */ 6586 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 6587 /* maclhwso - maclhwso. */ 6588 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 6589 /* maclhwu - maclhwu. */ 6590 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 6591 /* maclhwuo - maclhwuo. */ 6592 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 6593 /* maclhwsu - maclhwsu. */ 6594 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 6595 /* maclhwsuo - maclhwsuo. */ 6596 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 6597 /* nmacchw - nmacchw. */ 6598 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 6599 /* nmacchwo - nmacchwo. */ 6600 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 6601 /* nmacchws - nmacchws. */ 6602 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 6603 /* nmacchwso - nmacchwso. */ 6604 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 6605 /* nmachhw - nmachhw. */ 6606 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 6607 /* nmachhwo - nmachhwo. */ 6608 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 6609 /* nmachhws - nmachhws. */ 6610 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 6611 /* nmachhwso - nmachhwso. */ 6612 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 6613 /* nmaclhw - nmaclhw. */ 6614 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 6615 /* nmaclhwo - nmaclhwo. */ 6616 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 6617 /* nmaclhws - nmaclhws. */ 6618 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 6619 /* nmaclhwso - nmaclhwso. */ 6620 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 6621 6622 /* mulchw - mulchw. */ 6623 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 6624 /* mulchwu - mulchwu. */ 6625 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 6626 /* mulhhw - mulhhw. */ 6627 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 6628 /* mulhhwu - mulhhwu. */ 6629 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 6630 /* mullhw - mullhw. */ 6631 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 6632 /* mullhwu - mullhwu. */ 6633 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 6634 6635 /* mfdcr */ 6636 static void gen_mfdcr(DisasContext *ctx) 6637 { 6638 #if defined(CONFIG_USER_ONLY) 6639 GEN_PRIV; 6640 #else 6641 TCGv dcrn; 6642 6643 CHK_SV; 6644 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6645 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6646 tcg_temp_free(dcrn); 6647 #endif /* defined(CONFIG_USER_ONLY) */ 6648 } 6649 6650 /* mtdcr */ 6651 static void gen_mtdcr(DisasContext *ctx) 6652 { 6653 #if defined(CONFIG_USER_ONLY) 6654 GEN_PRIV; 6655 #else 6656 TCGv dcrn; 6657 6658 CHK_SV; 6659 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6660 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6661 tcg_temp_free(dcrn); 6662 #endif /* defined(CONFIG_USER_ONLY) */ 6663 } 6664 6665 /* mfdcrx */ 6666 /* XXX: not implemented on 440 ? */ 6667 static void gen_mfdcrx(DisasContext *ctx) 6668 { 6669 #if defined(CONFIG_USER_ONLY) 6670 GEN_PRIV; 6671 #else 6672 CHK_SV; 6673 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6674 cpu_gpr[rA(ctx->opcode)]); 6675 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6676 #endif /* defined(CONFIG_USER_ONLY) */ 6677 } 6678 6679 /* mtdcrx */ 6680 /* XXX: not implemented on 440 ? */ 6681 static void gen_mtdcrx(DisasContext *ctx) 6682 { 6683 #if defined(CONFIG_USER_ONLY) 6684 GEN_PRIV; 6685 #else 6686 CHK_SV; 6687 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6688 cpu_gpr[rS(ctx->opcode)]); 6689 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6690 #endif /* defined(CONFIG_USER_ONLY) */ 6691 } 6692 6693 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6694 static void gen_mfdcrux(DisasContext *ctx) 6695 { 6696 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6697 cpu_gpr[rA(ctx->opcode)]); 6698 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6699 } 6700 6701 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6702 static void gen_mtdcrux(DisasContext *ctx) 6703 { 6704 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6705 cpu_gpr[rS(ctx->opcode)]); 6706 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6707 } 6708 6709 /* dccci */ 6710 static void gen_dccci(DisasContext *ctx) 6711 { 6712 CHK_SV; 6713 /* interpreted as no-op */ 6714 } 6715 6716 /* dcread */ 6717 static void gen_dcread(DisasContext *ctx) 6718 { 6719 #if defined(CONFIG_USER_ONLY) 6720 GEN_PRIV; 6721 #else 6722 TCGv EA, val; 6723 6724 CHK_SV; 6725 gen_set_access_type(ctx, ACCESS_CACHE); 6726 EA = tcg_temp_new(); 6727 gen_addr_reg_index(ctx, EA); 6728 val = tcg_temp_new(); 6729 gen_qemu_ld32u(ctx, val, EA); 6730 tcg_temp_free(val); 6731 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6732 tcg_temp_free(EA); 6733 #endif /* defined(CONFIG_USER_ONLY) */ 6734 } 6735 6736 /* icbt */ 6737 static void gen_icbt_40x(DisasContext *ctx) 6738 { 6739 /* 6740 * interpreted as no-op 6741 * XXX: specification say this is treated as a load by the MMU but 6742 * does not generate any exception 6743 */ 6744 } 6745 6746 /* iccci */ 6747 static void gen_iccci(DisasContext *ctx) 6748 { 6749 CHK_SV; 6750 /* interpreted as no-op */ 6751 } 6752 6753 /* icread */ 6754 static void gen_icread(DisasContext *ctx) 6755 { 6756 CHK_SV; 6757 /* interpreted as no-op */ 6758 } 6759 6760 /* rfci (supervisor only) */ 6761 static void gen_rfci_40x(DisasContext *ctx) 6762 { 6763 #if defined(CONFIG_USER_ONLY) 6764 GEN_PRIV; 6765 #else 6766 CHK_SV; 6767 /* Restore CPU state */ 6768 gen_helper_40x_rfci(cpu_env); 6769 ctx->base.is_jmp = DISAS_EXIT; 6770 #endif /* defined(CONFIG_USER_ONLY) */ 6771 } 6772 6773 static void gen_rfci(DisasContext *ctx) 6774 { 6775 #if defined(CONFIG_USER_ONLY) 6776 GEN_PRIV; 6777 #else 6778 CHK_SV; 6779 /* Restore CPU state */ 6780 gen_helper_rfci(cpu_env); 6781 ctx->base.is_jmp = DISAS_EXIT; 6782 #endif /* defined(CONFIG_USER_ONLY) */ 6783 } 6784 6785 /* BookE specific */ 6786 6787 /* XXX: not implemented on 440 ? */ 6788 static void gen_rfdi(DisasContext *ctx) 6789 { 6790 #if defined(CONFIG_USER_ONLY) 6791 GEN_PRIV; 6792 #else 6793 CHK_SV; 6794 /* Restore CPU state */ 6795 gen_helper_rfdi(cpu_env); 6796 ctx->base.is_jmp = DISAS_EXIT; 6797 #endif /* defined(CONFIG_USER_ONLY) */ 6798 } 6799 6800 /* XXX: not implemented on 440 ? */ 6801 static void gen_rfmci(DisasContext *ctx) 6802 { 6803 #if defined(CONFIG_USER_ONLY) 6804 GEN_PRIV; 6805 #else 6806 CHK_SV; 6807 /* Restore CPU state */ 6808 gen_helper_rfmci(cpu_env); 6809 ctx->base.is_jmp = DISAS_EXIT; 6810 #endif /* defined(CONFIG_USER_ONLY) */ 6811 } 6812 6813 /* TLB management - PowerPC 405 implementation */ 6814 6815 /* tlbre */ 6816 static void gen_tlbre_40x(DisasContext *ctx) 6817 { 6818 #if defined(CONFIG_USER_ONLY) 6819 GEN_PRIV; 6820 #else 6821 CHK_SV; 6822 switch (rB(ctx->opcode)) { 6823 case 0: 6824 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6825 cpu_gpr[rA(ctx->opcode)]); 6826 break; 6827 case 1: 6828 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6829 cpu_gpr[rA(ctx->opcode)]); 6830 break; 6831 default: 6832 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6833 break; 6834 } 6835 #endif /* defined(CONFIG_USER_ONLY) */ 6836 } 6837 6838 /* tlbsx - tlbsx. */ 6839 static void gen_tlbsx_40x(DisasContext *ctx) 6840 { 6841 #if defined(CONFIG_USER_ONLY) 6842 GEN_PRIV; 6843 #else 6844 TCGv t0; 6845 6846 CHK_SV; 6847 t0 = tcg_temp_new(); 6848 gen_addr_reg_index(ctx, t0); 6849 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6850 tcg_temp_free(t0); 6851 if (Rc(ctx->opcode)) { 6852 TCGLabel *l1 = gen_new_label(); 6853 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6854 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6855 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6856 gen_set_label(l1); 6857 } 6858 #endif /* defined(CONFIG_USER_ONLY) */ 6859 } 6860 6861 /* tlbwe */ 6862 static void gen_tlbwe_40x(DisasContext *ctx) 6863 { 6864 #if defined(CONFIG_USER_ONLY) 6865 GEN_PRIV; 6866 #else 6867 CHK_SV; 6868 6869 switch (rB(ctx->opcode)) { 6870 case 0: 6871 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6872 cpu_gpr[rS(ctx->opcode)]); 6873 break; 6874 case 1: 6875 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6876 cpu_gpr[rS(ctx->opcode)]); 6877 break; 6878 default: 6879 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6880 break; 6881 } 6882 #endif /* defined(CONFIG_USER_ONLY) */ 6883 } 6884 6885 /* TLB management - PowerPC 440 implementation */ 6886 6887 /* tlbre */ 6888 static void gen_tlbre_440(DisasContext *ctx) 6889 { 6890 #if defined(CONFIG_USER_ONLY) 6891 GEN_PRIV; 6892 #else 6893 CHK_SV; 6894 6895 switch (rB(ctx->opcode)) { 6896 case 0: 6897 case 1: 6898 case 2: 6899 { 6900 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6901 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6902 t0, cpu_gpr[rA(ctx->opcode)]); 6903 tcg_temp_free_i32(t0); 6904 } 6905 break; 6906 default: 6907 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6908 break; 6909 } 6910 #endif /* defined(CONFIG_USER_ONLY) */ 6911 } 6912 6913 /* tlbsx - tlbsx. */ 6914 static void gen_tlbsx_440(DisasContext *ctx) 6915 { 6916 #if defined(CONFIG_USER_ONLY) 6917 GEN_PRIV; 6918 #else 6919 TCGv t0; 6920 6921 CHK_SV; 6922 t0 = tcg_temp_new(); 6923 gen_addr_reg_index(ctx, t0); 6924 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6925 tcg_temp_free(t0); 6926 if (Rc(ctx->opcode)) { 6927 TCGLabel *l1 = gen_new_label(); 6928 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6929 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6930 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6931 gen_set_label(l1); 6932 } 6933 #endif /* defined(CONFIG_USER_ONLY) */ 6934 } 6935 6936 /* tlbwe */ 6937 static void gen_tlbwe_440(DisasContext *ctx) 6938 { 6939 #if defined(CONFIG_USER_ONLY) 6940 GEN_PRIV; 6941 #else 6942 CHK_SV; 6943 switch (rB(ctx->opcode)) { 6944 case 0: 6945 case 1: 6946 case 2: 6947 { 6948 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6949 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6950 cpu_gpr[rS(ctx->opcode)]); 6951 tcg_temp_free_i32(t0); 6952 } 6953 break; 6954 default: 6955 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6956 break; 6957 } 6958 #endif /* defined(CONFIG_USER_ONLY) */ 6959 } 6960 6961 /* TLB management - PowerPC BookE 2.06 implementation */ 6962 6963 /* tlbre */ 6964 static void gen_tlbre_booke206(DisasContext *ctx) 6965 { 6966 #if defined(CONFIG_USER_ONLY) 6967 GEN_PRIV; 6968 #else 6969 CHK_SV; 6970 gen_helper_booke206_tlbre(cpu_env); 6971 #endif /* defined(CONFIG_USER_ONLY) */ 6972 } 6973 6974 /* tlbsx - tlbsx. */ 6975 static void gen_tlbsx_booke206(DisasContext *ctx) 6976 { 6977 #if defined(CONFIG_USER_ONLY) 6978 GEN_PRIV; 6979 #else 6980 TCGv t0; 6981 6982 CHK_SV; 6983 if (rA(ctx->opcode)) { 6984 t0 = tcg_temp_new(); 6985 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6986 } else { 6987 t0 = tcg_const_tl(0); 6988 } 6989 6990 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6991 gen_helper_booke206_tlbsx(cpu_env, t0); 6992 tcg_temp_free(t0); 6993 #endif /* defined(CONFIG_USER_ONLY) */ 6994 } 6995 6996 /* tlbwe */ 6997 static void gen_tlbwe_booke206(DisasContext *ctx) 6998 { 6999 #if defined(CONFIG_USER_ONLY) 7000 GEN_PRIV; 7001 #else 7002 CHK_SV; 7003 gen_helper_booke206_tlbwe(cpu_env); 7004 #endif /* defined(CONFIG_USER_ONLY) */ 7005 } 7006 7007 static void gen_tlbivax_booke206(DisasContext *ctx) 7008 { 7009 #if defined(CONFIG_USER_ONLY) 7010 GEN_PRIV; 7011 #else 7012 TCGv t0; 7013 7014 CHK_SV; 7015 t0 = tcg_temp_new(); 7016 gen_addr_reg_index(ctx, t0); 7017 gen_helper_booke206_tlbivax(cpu_env, t0); 7018 tcg_temp_free(t0); 7019 #endif /* defined(CONFIG_USER_ONLY) */ 7020 } 7021 7022 static void gen_tlbilx_booke206(DisasContext *ctx) 7023 { 7024 #if defined(CONFIG_USER_ONLY) 7025 GEN_PRIV; 7026 #else 7027 TCGv t0; 7028 7029 CHK_SV; 7030 t0 = tcg_temp_new(); 7031 gen_addr_reg_index(ctx, t0); 7032 7033 switch ((ctx->opcode >> 21) & 0x3) { 7034 case 0: 7035 gen_helper_booke206_tlbilx0(cpu_env, t0); 7036 break; 7037 case 1: 7038 gen_helper_booke206_tlbilx1(cpu_env, t0); 7039 break; 7040 case 3: 7041 gen_helper_booke206_tlbilx3(cpu_env, t0); 7042 break; 7043 default: 7044 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7045 break; 7046 } 7047 7048 tcg_temp_free(t0); 7049 #endif /* defined(CONFIG_USER_ONLY) */ 7050 } 7051 7052 7053 /* wrtee */ 7054 static void gen_wrtee(DisasContext *ctx) 7055 { 7056 #if defined(CONFIG_USER_ONLY) 7057 GEN_PRIV; 7058 #else 7059 TCGv t0; 7060 7061 CHK_SV; 7062 t0 = tcg_temp_new(); 7063 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 7064 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7065 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 7066 tcg_temp_free(t0); 7067 /* 7068 * Stop translation to have a chance to raise an exception if we 7069 * just set msr_ee to 1 7070 */ 7071 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7072 #endif /* defined(CONFIG_USER_ONLY) */ 7073 } 7074 7075 /* wrteei */ 7076 static void gen_wrteei(DisasContext *ctx) 7077 { 7078 #if defined(CONFIG_USER_ONLY) 7079 GEN_PRIV; 7080 #else 7081 CHK_SV; 7082 if (ctx->opcode & 0x00008000) { 7083 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 7084 /* Stop translation to have a chance to raise an exception */ 7085 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7086 } else { 7087 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7088 } 7089 #endif /* defined(CONFIG_USER_ONLY) */ 7090 } 7091 7092 /* PowerPC 440 specific instructions */ 7093 7094 /* dlmzb */ 7095 static void gen_dlmzb(DisasContext *ctx) 7096 { 7097 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 7098 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 7099 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 7100 tcg_temp_free_i32(t0); 7101 } 7102 7103 /* mbar replaces eieio on 440 */ 7104 static void gen_mbar(DisasContext *ctx) 7105 { 7106 /* interpreted as no-op */ 7107 } 7108 7109 /* msync replaces sync on 440 */ 7110 static void gen_msync_4xx(DisasContext *ctx) 7111 { 7112 /* Only e500 seems to treat reserved bits as invalid */ 7113 if ((ctx->insns_flags2 & PPC2_BOOKE206) && 7114 (ctx->opcode & 0x03FFF801)) { 7115 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7116 } 7117 /* otherwise interpreted as no-op */ 7118 } 7119 7120 /* icbt */ 7121 static void gen_icbt_440(DisasContext *ctx) 7122 { 7123 /* 7124 * interpreted as no-op 7125 * XXX: specification say this is treated as a load by the MMU but 7126 * does not generate any exception 7127 */ 7128 } 7129 7130 /* Embedded.Processor Control */ 7131 7132 static void gen_msgclr(DisasContext *ctx) 7133 { 7134 #if defined(CONFIG_USER_ONLY) 7135 GEN_PRIV; 7136 #else 7137 CHK_HV; 7138 if (is_book3s_arch2x(ctx)) { 7139 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7140 } else { 7141 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7142 } 7143 #endif /* defined(CONFIG_USER_ONLY) */ 7144 } 7145 7146 static void gen_msgsnd(DisasContext *ctx) 7147 { 7148 #if defined(CONFIG_USER_ONLY) 7149 GEN_PRIV; 7150 #else 7151 CHK_HV; 7152 if (is_book3s_arch2x(ctx)) { 7153 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7154 } else { 7155 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7156 } 7157 #endif /* defined(CONFIG_USER_ONLY) */ 7158 } 7159 7160 #if defined(TARGET_PPC64) 7161 static void gen_msgclrp(DisasContext *ctx) 7162 { 7163 #if defined(CONFIG_USER_ONLY) 7164 GEN_PRIV; 7165 #else 7166 CHK_SV; 7167 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7168 #endif /* defined(CONFIG_USER_ONLY) */ 7169 } 7170 7171 static void gen_msgsndp(DisasContext *ctx) 7172 { 7173 #if defined(CONFIG_USER_ONLY) 7174 GEN_PRIV; 7175 #else 7176 CHK_SV; 7177 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7178 #endif /* defined(CONFIG_USER_ONLY) */ 7179 } 7180 #endif 7181 7182 static void gen_msgsync(DisasContext *ctx) 7183 { 7184 #if defined(CONFIG_USER_ONLY) 7185 GEN_PRIV; 7186 #else 7187 CHK_HV; 7188 #endif /* defined(CONFIG_USER_ONLY) */ 7189 /* interpreted as no-op */ 7190 } 7191 7192 #if defined(TARGET_PPC64) 7193 static void gen_maddld(DisasContext *ctx) 7194 { 7195 TCGv_i64 t1 = tcg_temp_new_i64(); 7196 7197 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 7198 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 7199 tcg_temp_free_i64(t1); 7200 } 7201 7202 /* maddhd maddhdu */ 7203 static void gen_maddhd_maddhdu(DisasContext *ctx) 7204 { 7205 TCGv_i64 lo = tcg_temp_new_i64(); 7206 TCGv_i64 hi = tcg_temp_new_i64(); 7207 TCGv_i64 t1 = tcg_temp_new_i64(); 7208 7209 if (Rc(ctx->opcode)) { 7210 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7211 cpu_gpr[rB(ctx->opcode)]); 7212 tcg_gen_movi_i64(t1, 0); 7213 } else { 7214 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7215 cpu_gpr[rB(ctx->opcode)]); 7216 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 7217 } 7218 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 7219 cpu_gpr[rC(ctx->opcode)], t1); 7220 tcg_temp_free_i64(lo); 7221 tcg_temp_free_i64(hi); 7222 tcg_temp_free_i64(t1); 7223 } 7224 #endif /* defined(TARGET_PPC64) */ 7225 7226 static void gen_tbegin(DisasContext *ctx) 7227 { 7228 if (unlikely(!ctx->tm_enabled)) { 7229 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7230 return; 7231 } 7232 gen_helper_tbegin(cpu_env); 7233 } 7234 7235 #define GEN_TM_NOOP(name) \ 7236 static inline void gen_##name(DisasContext *ctx) \ 7237 { \ 7238 if (unlikely(!ctx->tm_enabled)) { \ 7239 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7240 return; \ 7241 } \ 7242 /* \ 7243 * Because tbegin always fails in QEMU, these user \ 7244 * space instructions all have a simple implementation: \ 7245 * \ 7246 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7247 * = 0b0 || 0b00 || 0b0 \ 7248 */ \ 7249 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7250 } 7251 7252 GEN_TM_NOOP(tend); 7253 GEN_TM_NOOP(tabort); 7254 GEN_TM_NOOP(tabortwc); 7255 GEN_TM_NOOP(tabortwci); 7256 GEN_TM_NOOP(tabortdc); 7257 GEN_TM_NOOP(tabortdci); 7258 GEN_TM_NOOP(tsr); 7259 7260 static inline void gen_cp_abort(DisasContext *ctx) 7261 { 7262 /* Do Nothing */ 7263 } 7264 7265 #define GEN_CP_PASTE_NOOP(name) \ 7266 static inline void gen_##name(DisasContext *ctx) \ 7267 { \ 7268 /* \ 7269 * Generate invalid exception until we have an \ 7270 * implementation of the copy paste facility \ 7271 */ \ 7272 gen_invalid(ctx); \ 7273 } 7274 7275 GEN_CP_PASTE_NOOP(copy) 7276 GEN_CP_PASTE_NOOP(paste) 7277 7278 static void gen_tcheck(DisasContext *ctx) 7279 { 7280 if (unlikely(!ctx->tm_enabled)) { 7281 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7282 return; 7283 } 7284 /* 7285 * Because tbegin always fails, the tcheck implementation is 7286 * simple: 7287 * 7288 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 7289 * = 0b1 || 0b00 || 0b0 7290 */ 7291 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 7292 } 7293 7294 #if defined(CONFIG_USER_ONLY) 7295 #define GEN_TM_PRIV_NOOP(name) \ 7296 static inline void gen_##name(DisasContext *ctx) \ 7297 { \ 7298 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 7299 } 7300 7301 #else 7302 7303 #define GEN_TM_PRIV_NOOP(name) \ 7304 static inline void gen_##name(DisasContext *ctx) \ 7305 { \ 7306 CHK_SV; \ 7307 if (unlikely(!ctx->tm_enabled)) { \ 7308 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7309 return; \ 7310 } \ 7311 /* \ 7312 * Because tbegin always fails, the implementation is \ 7313 * simple: \ 7314 * \ 7315 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7316 * = 0b0 || 0b00 | 0b0 \ 7317 */ \ 7318 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7319 } 7320 7321 #endif 7322 7323 GEN_TM_PRIV_NOOP(treclaim); 7324 GEN_TM_PRIV_NOOP(trechkpt); 7325 7326 static inline void get_fpr(TCGv_i64 dst, int regno) 7327 { 7328 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno)); 7329 } 7330 7331 static inline void set_fpr(int regno, TCGv_i64 src) 7332 { 7333 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno)); 7334 } 7335 7336 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 7337 { 7338 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high)); 7339 } 7340 7341 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 7342 { 7343 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high)); 7344 } 7345 7346 /* 7347 * Helpers for decodetree used by !function for decoding arguments. 7348 */ 7349 static int times_2(DisasContext *ctx, int x) 7350 { 7351 return x * 2; 7352 } 7353 7354 static int times_4(DisasContext *ctx, int x) 7355 { 7356 return x * 4; 7357 } 7358 7359 static int times_16(DisasContext *ctx, int x) 7360 { 7361 return x * 16; 7362 } 7363 7364 /* 7365 * Helpers for trans_* functions to check for specific insns flags. 7366 * Use token pasting to ensure that we use the proper flag with the 7367 * proper variable. 7368 */ 7369 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \ 7370 do { \ 7371 if (((CTX)->insns_flags & PPC_##NAME) == 0) { \ 7372 return false; \ 7373 } \ 7374 } while (0) 7375 7376 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \ 7377 do { \ 7378 if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \ 7379 return false; \ 7380 } \ 7381 } while (0) 7382 7383 /* Then special-case the check for 64-bit so that we elide code for ppc32. */ 7384 #if TARGET_LONG_BITS == 32 7385 # define REQUIRE_64BIT(CTX) return false 7386 #else 7387 # define REQUIRE_64BIT(CTX) REQUIRE_INSNS_FLAGS(CTX, 64B) 7388 #endif 7389 7390 #define REQUIRE_VECTOR(CTX) \ 7391 do { \ 7392 if (unlikely(!(CTX)->altivec_enabled)) { \ 7393 gen_exception((CTX), POWERPC_EXCP_VPU); \ 7394 return true; \ 7395 } \ 7396 } while (0) 7397 7398 #define REQUIRE_VSX(CTX) \ 7399 do { \ 7400 if (unlikely(!(CTX)->vsx_enabled)) { \ 7401 gen_exception((CTX), POWERPC_EXCP_VSXU); \ 7402 return true; \ 7403 } \ 7404 } while (0) 7405 7406 #define REQUIRE_FPU(ctx) \ 7407 do { \ 7408 if (unlikely(!(ctx)->fpu_enabled)) { \ 7409 gen_exception((ctx), POWERPC_EXCP_FPU); \ 7410 return true; \ 7411 } \ 7412 } while (0) 7413 7414 /* 7415 * Helpers for implementing sets of trans_* functions. 7416 * Defer the implementation of NAME to FUNC, with optional extra arguments. 7417 */ 7418 #define TRANS(NAME, FUNC, ...) \ 7419 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7420 { return FUNC(ctx, a, __VA_ARGS__); } 7421 7422 #define TRANS64(NAME, FUNC, ...) \ 7423 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7424 { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); } 7425 7426 /* TODO: More TRANS* helpers for extra insn_flags checks. */ 7427 7428 7429 #include "decode-insn32.c.inc" 7430 #include "decode-insn64.c.inc" 7431 #include "power8-pmu-regs.c.inc" 7432 7433 /* 7434 * Incorporate CIA into the constant when R=1. 7435 * Validate that when R=1, RA=0. 7436 */ 7437 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a) 7438 { 7439 d->rt = a->rt; 7440 d->ra = a->ra; 7441 d->si = a->si; 7442 if (a->r) { 7443 if (unlikely(a->ra != 0)) { 7444 gen_invalid(ctx); 7445 return false; 7446 } 7447 d->si += ctx->cia; 7448 } 7449 return true; 7450 } 7451 7452 #include "translate/fixedpoint-impl.c.inc" 7453 7454 #include "translate/fp-impl.c.inc" 7455 7456 #include "translate/vmx-impl.c.inc" 7457 7458 #include "translate/vsx-impl.c.inc" 7459 7460 #include "translate/dfp-impl.c.inc" 7461 7462 #include "translate/spe-impl.c.inc" 7463 7464 #include "translate/branch-impl.c.inc" 7465 7466 /* Handles lfdp, lxsd, lxssp */ 7467 static void gen_dform39(DisasContext *ctx) 7468 { 7469 switch (ctx->opcode & 0x3) { 7470 case 0: /* lfdp */ 7471 if (ctx->insns_flags2 & PPC2_ISA205) { 7472 return gen_lfdp(ctx); 7473 } 7474 break; 7475 case 2: /* lxsd */ 7476 if (ctx->insns_flags2 & PPC2_ISA300) { 7477 return gen_lxsd(ctx); 7478 } 7479 break; 7480 case 3: /* lxssp */ 7481 if (ctx->insns_flags2 & PPC2_ISA300) { 7482 return gen_lxssp(ctx); 7483 } 7484 break; 7485 } 7486 return gen_invalid(ctx); 7487 } 7488 7489 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 7490 static void gen_dform3D(DisasContext *ctx) 7491 { 7492 if ((ctx->opcode & 3) != 1) { /* DS-FORM */ 7493 switch (ctx->opcode & 0x3) { 7494 case 0: /* stfdp */ 7495 if (ctx->insns_flags2 & PPC2_ISA205) { 7496 return gen_stfdp(ctx); 7497 } 7498 break; 7499 case 2: /* stxsd */ 7500 if (ctx->insns_flags2 & PPC2_ISA300) { 7501 return gen_stxsd(ctx); 7502 } 7503 break; 7504 case 3: /* stxssp */ 7505 if (ctx->insns_flags2 & PPC2_ISA300) { 7506 return gen_stxssp(ctx); 7507 } 7508 break; 7509 } 7510 } 7511 return gen_invalid(ctx); 7512 } 7513 7514 #if defined(TARGET_PPC64) 7515 /* brd */ 7516 static void gen_brd(DisasContext *ctx) 7517 { 7518 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7519 } 7520 7521 /* brw */ 7522 static void gen_brw(DisasContext *ctx) 7523 { 7524 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7525 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32); 7526 7527 } 7528 7529 /* brh */ 7530 static void gen_brh(DisasContext *ctx) 7531 { 7532 TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull); 7533 TCGv_i64 t1 = tcg_temp_new_i64(); 7534 TCGv_i64 t2 = tcg_temp_new_i64(); 7535 7536 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8); 7537 tcg_gen_and_i64(t2, t1, mask); 7538 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask); 7539 tcg_gen_shli_i64(t1, t1, 8); 7540 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2); 7541 7542 tcg_temp_free_i64(t1); 7543 tcg_temp_free_i64(t2); 7544 } 7545 #endif 7546 7547 static opcode_t opcodes[] = { 7548 #if defined(TARGET_PPC64) 7549 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310), 7550 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310), 7551 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310), 7552 #endif 7553 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 7554 #if defined(TARGET_PPC64) 7555 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 7556 #endif 7557 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 7558 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 7559 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 7560 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7561 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7562 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 7563 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 7564 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 7565 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 7566 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7567 #if defined(TARGET_PPC64) 7568 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 7569 #endif 7570 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 7571 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 7572 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7573 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7574 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7575 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 7576 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 7577 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 7578 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7579 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 7580 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 7581 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 7582 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7583 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7584 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7585 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7586 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 7587 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 7588 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 7589 #if defined(TARGET_PPC64) 7590 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 7591 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 7592 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 7593 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 7594 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 7595 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 7596 #endif 7597 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7598 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7599 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7600 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 7601 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 7602 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 7603 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 7604 #if defined(TARGET_PPC64) 7605 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 7606 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 7607 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 7608 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 7609 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 7610 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 7611 PPC_NONE, PPC2_ISA300), 7612 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 7613 PPC_NONE, PPC2_ISA300), 7614 #endif 7615 /* handles lfdp, lxsd, lxssp */ 7616 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7617 /* handles stfdp, stxsd, stxssp */ 7618 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7619 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7620 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7621 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 7622 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 7623 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 7624 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 7625 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 7626 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 7627 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7628 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7629 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 7630 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 7631 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 7632 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7633 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7634 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 7635 #if defined(TARGET_PPC64) 7636 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 7637 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 7638 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 7639 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 7640 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 7641 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 7642 #endif 7643 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 7644 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 7645 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 7646 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7647 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7648 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 7649 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 7650 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 7651 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 7652 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 7653 #if defined(TARGET_PPC64) 7654 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 7655 #if !defined(CONFIG_USER_ONLY) 7656 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7657 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7658 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7659 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300), 7660 #endif 7661 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7662 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7663 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7664 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7665 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7666 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 7667 #endif 7668 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7669 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW), 7670 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW), 7671 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 7672 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7673 #if defined(TARGET_PPC64) 7674 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 7675 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 7676 #endif 7677 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 7678 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 7679 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 7680 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 7681 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 7682 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 7683 #if defined(TARGET_PPC64) 7684 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 7685 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 7686 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 7687 #endif 7688 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 7689 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 7690 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 7691 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7692 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 7693 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 7694 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7695 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 7696 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7697 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 7698 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7699 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 7700 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 7701 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7702 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 7703 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 7704 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 7705 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 7706 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7707 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 7708 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 7709 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 7710 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 7711 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 7712 #if defined(TARGET_PPC64) 7713 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 7714 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 7715 PPC_SEGMENT_64B), 7716 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 7717 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 7718 PPC_SEGMENT_64B), 7719 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 7720 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 7721 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 7722 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 7723 #endif 7724 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 7725 /* 7726 * XXX Those instructions will need to be handled differently for 7727 * different ISA versions 7728 */ 7729 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 7730 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 7731 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 7732 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 7733 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 7734 #if defined(TARGET_PPC64) 7735 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 7736 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 7737 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 7738 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7739 #endif 7740 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 7741 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 7742 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 7743 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 7744 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 7745 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 7746 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 7747 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 7748 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 7749 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 7750 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 7751 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7752 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 7753 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 7754 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 7755 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 7756 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 7757 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 7758 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 7759 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7760 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 7761 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 7762 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 7763 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 7764 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 7765 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 7766 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 7767 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 7768 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 7769 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 7770 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 7771 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 7772 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 7773 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 7774 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 7775 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 7776 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 7777 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 7778 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 7779 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 7780 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 7781 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7782 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7783 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7784 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7785 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7786 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7787 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7788 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7789 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7790 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7791 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7792 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7793 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7794 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7795 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7796 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7797 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7798 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7799 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7800 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7801 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7802 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7803 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7804 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7805 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7806 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7807 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7808 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7809 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7810 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7811 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7812 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7813 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7814 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7815 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7816 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7817 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7818 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7819 PPC_NONE, PPC2_BOOKE206), 7820 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7821 PPC_NONE, PPC2_BOOKE206), 7822 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7823 PPC_NONE, PPC2_BOOKE206), 7824 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7825 PPC_NONE, PPC2_BOOKE206), 7826 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7827 PPC_NONE, PPC2_BOOKE206), 7828 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7829 PPC_NONE, PPC2_PRCNTL), 7830 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7831 PPC_NONE, PPC2_PRCNTL), 7832 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7833 PPC_NONE, PPC2_PRCNTL), 7834 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7835 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7836 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7837 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7838 PPC_BOOKE, PPC2_BOOKE206), 7839 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE), 7840 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7841 PPC_BOOKE, PPC2_BOOKE206), 7842 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7843 PPC_440_SPEC), 7844 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7845 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7846 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7847 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7848 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7849 #if defined(TARGET_PPC64) 7850 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7851 PPC2_ISA300), 7852 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7853 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001, 7854 PPC_NONE, PPC2_ISA207S), 7855 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001, 7856 PPC_NONE, PPC2_ISA207S), 7857 #endif 7858 7859 #undef GEN_INT_ARITH_ADD 7860 #undef GEN_INT_ARITH_ADD_CONST 7861 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7862 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7863 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7864 add_ca, compute_ca, compute_ov) \ 7865 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7866 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7867 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7868 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7869 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7870 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7871 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7872 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7873 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7874 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7875 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7876 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7877 7878 #undef GEN_INT_ARITH_DIVW 7879 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7880 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7881 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7882 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7883 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7884 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7885 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7886 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7887 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7888 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7889 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7890 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7891 7892 #if defined(TARGET_PPC64) 7893 #undef GEN_INT_ARITH_DIVD 7894 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7895 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7896 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7897 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7898 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7899 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7900 7901 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7902 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7903 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7904 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7905 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7906 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7907 7908 #undef GEN_INT_ARITH_MUL_HELPER 7909 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7910 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7911 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7912 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7913 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7914 #endif 7915 7916 #undef GEN_INT_ARITH_SUBF 7917 #undef GEN_INT_ARITH_SUBF_CONST 7918 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7919 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7920 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7921 add_ca, compute_ca, compute_ov) \ 7922 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7923 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7924 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7925 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7926 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7927 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7928 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7929 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7930 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7931 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7932 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7933 7934 #undef GEN_LOGICAL1 7935 #undef GEN_LOGICAL2 7936 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7937 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7938 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7939 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7940 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7941 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7942 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7943 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7944 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7945 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7946 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7947 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7948 #if defined(TARGET_PPC64) 7949 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7950 #endif 7951 7952 #if defined(TARGET_PPC64) 7953 #undef GEN_PPC64_R2 7954 #undef GEN_PPC64_R4 7955 #define GEN_PPC64_R2(name, opc1, opc2) \ 7956 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7957 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7958 PPC_64B) 7959 #define GEN_PPC64_R4(name, opc1, opc2) \ 7960 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7961 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 7962 PPC_64B), \ 7963 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7964 PPC_64B), \ 7965 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 7966 PPC_64B) 7967 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 7968 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7969 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7970 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7971 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7972 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7973 #endif 7974 7975 #undef GEN_LDX_E 7976 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7977 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7978 7979 #if defined(TARGET_PPC64) 7980 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 7981 7982 /* HV/P7 and later only */ 7983 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 7984 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 7985 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 7986 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 7987 #endif 7988 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 7989 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 7990 7991 /* External PID based load */ 7992 #undef GEN_LDEPX 7993 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 7994 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 7995 0x00000001, PPC_NONE, PPC2_BOOKE206), 7996 7997 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 7998 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 7999 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 8000 #if defined(TARGET_PPC64) 8001 GEN_LDEPX(ld, DEF_MEMOP(MO_Q), 0x1D, 0x00) 8002 #endif 8003 8004 #undef GEN_STX_E 8005 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 8006 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 8007 8008 #if defined(TARGET_PPC64) 8009 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 8010 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 8011 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 8012 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 8013 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 8014 #endif 8015 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 8016 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 8017 8018 #undef GEN_STEPX 8019 #define GEN_STEPX(name, ldop, opc2, opc3) \ 8020 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8021 0x00000001, PPC_NONE, PPC2_BOOKE206), 8022 8023 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 8024 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 8025 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 8026 #if defined(TARGET_PPC64) 8027 GEN_STEPX(std, DEF_MEMOP(MO_Q), 0x1D, 0x04) 8028 #endif 8029 8030 #undef GEN_CRLOGIC 8031 #define GEN_CRLOGIC(name, tcg_op, opc) \ 8032 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 8033 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 8034 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 8035 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 8036 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 8037 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 8038 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 8039 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 8040 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 8041 8042 #undef GEN_MAC_HANDLER 8043 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 8044 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 8045 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 8046 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 8047 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 8048 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 8049 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 8050 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 8051 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 8052 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 8053 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 8054 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 8055 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 8056 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 8057 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 8058 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 8059 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 8060 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 8061 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 8062 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 8063 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 8064 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 8065 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 8066 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 8067 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 8068 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 8069 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 8070 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 8071 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 8072 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 8073 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 8074 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 8075 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 8076 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 8077 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 8078 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 8079 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 8080 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 8081 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 8082 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 8083 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 8084 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 8085 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 8086 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 8087 8088 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 8089 PPC_NONE, PPC2_TM), 8090 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 8091 PPC_NONE, PPC2_TM), 8092 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 8093 PPC_NONE, PPC2_TM), 8094 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 8095 PPC_NONE, PPC2_TM), 8096 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 8097 PPC_NONE, PPC2_TM), 8098 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 8099 PPC_NONE, PPC2_TM), 8100 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 8101 PPC_NONE, PPC2_TM), 8102 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 8103 PPC_NONE, PPC2_TM), 8104 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 8105 PPC_NONE, PPC2_TM), 8106 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 8107 PPC_NONE, PPC2_TM), 8108 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 8109 PPC_NONE, PPC2_TM), 8110 8111 #include "translate/fp-ops.c.inc" 8112 8113 #include "translate/vmx-ops.c.inc" 8114 8115 #include "translate/vsx-ops.c.inc" 8116 8117 #include "translate/spe-ops.c.inc" 8118 }; 8119 8120 /*****************************************************************************/ 8121 /* Opcode types */ 8122 enum { 8123 PPC_DIRECT = 0, /* Opcode routine */ 8124 PPC_INDIRECT = 1, /* Indirect opcode table */ 8125 }; 8126 8127 #define PPC_OPCODE_MASK 0x3 8128 8129 static inline int is_indirect_opcode(void *handler) 8130 { 8131 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT; 8132 } 8133 8134 static inline opc_handler_t **ind_table(void *handler) 8135 { 8136 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK); 8137 } 8138 8139 /* Instruction table creation */ 8140 /* Opcodes tables creation */ 8141 static void fill_new_table(opc_handler_t **table, int len) 8142 { 8143 int i; 8144 8145 for (i = 0; i < len; i++) { 8146 table[i] = &invalid_handler; 8147 } 8148 } 8149 8150 static int create_new_table(opc_handler_t **table, unsigned char idx) 8151 { 8152 opc_handler_t **tmp; 8153 8154 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN); 8155 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN); 8156 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT); 8157 8158 return 0; 8159 } 8160 8161 static int insert_in_table(opc_handler_t **table, unsigned char idx, 8162 opc_handler_t *handler) 8163 { 8164 if (table[idx] != &invalid_handler) { 8165 return -1; 8166 } 8167 table[idx] = handler; 8168 8169 return 0; 8170 } 8171 8172 static int register_direct_insn(opc_handler_t **ppc_opcodes, 8173 unsigned char idx, opc_handler_t *handler) 8174 { 8175 if (insert_in_table(ppc_opcodes, idx, handler) < 0) { 8176 printf("*** ERROR: opcode %02x already assigned in main " 8177 "opcode table\n", idx); 8178 return -1; 8179 } 8180 8181 return 0; 8182 } 8183 8184 static int register_ind_in_table(opc_handler_t **table, 8185 unsigned char idx1, unsigned char idx2, 8186 opc_handler_t *handler) 8187 { 8188 if (table[idx1] == &invalid_handler) { 8189 if (create_new_table(table, idx1) < 0) { 8190 printf("*** ERROR: unable to create indirect table " 8191 "idx=%02x\n", idx1); 8192 return -1; 8193 } 8194 } else { 8195 if (!is_indirect_opcode(table[idx1])) { 8196 printf("*** ERROR: idx %02x already assigned to a direct " 8197 "opcode\n", idx1); 8198 return -1; 8199 } 8200 } 8201 if (handler != NULL && 8202 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) { 8203 printf("*** ERROR: opcode %02x already assigned in " 8204 "opcode table %02x\n", idx2, idx1); 8205 return -1; 8206 } 8207 8208 return 0; 8209 } 8210 8211 static int register_ind_insn(opc_handler_t **ppc_opcodes, 8212 unsigned char idx1, unsigned char idx2, 8213 opc_handler_t *handler) 8214 { 8215 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler); 8216 } 8217 8218 static int register_dblind_insn(opc_handler_t **ppc_opcodes, 8219 unsigned char idx1, unsigned char idx2, 8220 unsigned char idx3, opc_handler_t *handler) 8221 { 8222 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8223 printf("*** ERROR: unable to join indirect table idx " 8224 "[%02x-%02x]\n", idx1, idx2); 8225 return -1; 8226 } 8227 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3, 8228 handler) < 0) { 8229 printf("*** ERROR: unable to insert opcode " 8230 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8231 return -1; 8232 } 8233 8234 return 0; 8235 } 8236 8237 static int register_trplind_insn(opc_handler_t **ppc_opcodes, 8238 unsigned char idx1, unsigned char idx2, 8239 unsigned char idx3, unsigned char idx4, 8240 opc_handler_t *handler) 8241 { 8242 opc_handler_t **table; 8243 8244 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8245 printf("*** ERROR: unable to join indirect table idx " 8246 "[%02x-%02x]\n", idx1, idx2); 8247 return -1; 8248 } 8249 table = ind_table(ppc_opcodes[idx1]); 8250 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) { 8251 printf("*** ERROR: unable to join 2nd-level indirect table idx " 8252 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8253 return -1; 8254 } 8255 table = ind_table(table[idx2]); 8256 if (register_ind_in_table(table, idx3, idx4, handler) < 0) { 8257 printf("*** ERROR: unable to insert opcode " 8258 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4); 8259 return -1; 8260 } 8261 return 0; 8262 } 8263 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn) 8264 { 8265 if (insn->opc2 != 0xFF) { 8266 if (insn->opc3 != 0xFF) { 8267 if (insn->opc4 != 0xFF) { 8268 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8269 insn->opc3, insn->opc4, 8270 &insn->handler) < 0) { 8271 return -1; 8272 } 8273 } else { 8274 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8275 insn->opc3, &insn->handler) < 0) { 8276 return -1; 8277 } 8278 } 8279 } else { 8280 if (register_ind_insn(ppc_opcodes, insn->opc1, 8281 insn->opc2, &insn->handler) < 0) { 8282 return -1; 8283 } 8284 } 8285 } else { 8286 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) { 8287 return -1; 8288 } 8289 } 8290 8291 return 0; 8292 } 8293 8294 static int test_opcode_table(opc_handler_t **table, int len) 8295 { 8296 int i, count, tmp; 8297 8298 for (i = 0, count = 0; i < len; i++) { 8299 /* Consistency fixup */ 8300 if (table[i] == NULL) { 8301 table[i] = &invalid_handler; 8302 } 8303 if (table[i] != &invalid_handler) { 8304 if (is_indirect_opcode(table[i])) { 8305 tmp = test_opcode_table(ind_table(table[i]), 8306 PPC_CPU_INDIRECT_OPCODES_LEN); 8307 if (tmp == 0) { 8308 free(table[i]); 8309 table[i] = &invalid_handler; 8310 } else { 8311 count++; 8312 } 8313 } else { 8314 count++; 8315 } 8316 } 8317 } 8318 8319 return count; 8320 } 8321 8322 static void fix_opcode_tables(opc_handler_t **ppc_opcodes) 8323 { 8324 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) { 8325 printf("*** WARNING: no opcode defined !\n"); 8326 } 8327 } 8328 8329 /*****************************************************************************/ 8330 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp) 8331 { 8332 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu); 8333 opcode_t *opc; 8334 8335 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN); 8336 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) { 8337 if (((opc->handler.type & pcc->insns_flags) != 0) || 8338 ((opc->handler.type2 & pcc->insns_flags2) != 0)) { 8339 if (register_insn(cpu->opcodes, opc) < 0) { 8340 error_setg(errp, "ERROR initializing PowerPC instruction " 8341 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2, 8342 opc->opc3); 8343 return; 8344 } 8345 } 8346 } 8347 fix_opcode_tables(cpu->opcodes); 8348 fflush(stdout); 8349 fflush(stderr); 8350 } 8351 8352 void destroy_ppc_opcodes(PowerPCCPU *cpu) 8353 { 8354 opc_handler_t **table, **table_2; 8355 int i, j, k; 8356 8357 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) { 8358 if (cpu->opcodes[i] == &invalid_handler) { 8359 continue; 8360 } 8361 if (is_indirect_opcode(cpu->opcodes[i])) { 8362 table = ind_table(cpu->opcodes[i]); 8363 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) { 8364 if (table[j] == &invalid_handler) { 8365 continue; 8366 } 8367 if (is_indirect_opcode(table[j])) { 8368 table_2 = ind_table(table[j]); 8369 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) { 8370 if (table_2[k] != &invalid_handler && 8371 is_indirect_opcode(table_2[k])) { 8372 g_free((opc_handler_t *)((uintptr_t)table_2[k] & 8373 ~PPC_INDIRECT)); 8374 } 8375 } 8376 g_free((opc_handler_t *)((uintptr_t)table[j] & 8377 ~PPC_INDIRECT)); 8378 } 8379 } 8380 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] & 8381 ~PPC_INDIRECT)); 8382 } 8383 } 8384 } 8385 8386 int ppc_fixup_cpu(PowerPCCPU *cpu) 8387 { 8388 CPUPPCState *env = &cpu->env; 8389 8390 /* 8391 * TCG doesn't (yet) emulate some groups of instructions that are 8392 * implemented on some otherwise supported CPUs (e.g. VSX and 8393 * decimal floating point instructions on POWER7). We remove 8394 * unsupported instruction groups from the cpu state's instruction 8395 * masks and hope the guest can cope. For at least the pseries 8396 * machine, the unavailability of these instructions can be 8397 * advertised to the guest via the device tree. 8398 */ 8399 if ((env->insns_flags & ~PPC_TCG_INSNS) 8400 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) { 8401 warn_report("Disabling some instructions which are not " 8402 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")", 8403 env->insns_flags & ~PPC_TCG_INSNS, 8404 env->insns_flags2 & ~PPC_TCG_INSNS2); 8405 } 8406 env->insns_flags &= PPC_TCG_INSNS; 8407 env->insns_flags2 &= PPC_TCG_INSNS2; 8408 return 0; 8409 } 8410 8411 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn) 8412 { 8413 opc_handler_t **table, *handler; 8414 uint32_t inval; 8415 8416 ctx->opcode = insn; 8417 8418 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 8419 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8420 ctx->le_mode ? "little" : "big"); 8421 8422 table = cpu->opcodes; 8423 handler = table[opc1(insn)]; 8424 if (is_indirect_opcode(handler)) { 8425 table = ind_table(handler); 8426 handler = table[opc2(insn)]; 8427 if (is_indirect_opcode(handler)) { 8428 table = ind_table(handler); 8429 handler = table[opc3(insn)]; 8430 if (is_indirect_opcode(handler)) { 8431 table = ind_table(handler); 8432 handler = table[opc4(insn)]; 8433 } 8434 } 8435 } 8436 8437 /* Is opcode *REALLY* valid ? */ 8438 if (unlikely(handler->handler == &gen_invalid)) { 8439 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 8440 "%02x - %02x - %02x - %02x (%08x) " 8441 TARGET_FMT_lx "\n", 8442 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8443 insn, ctx->cia); 8444 return false; 8445 } 8446 8447 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 8448 && Rc(insn))) { 8449 inval = handler->inval2; 8450 } else { 8451 inval = handler->inval1; 8452 } 8453 8454 if (unlikely((insn & inval) != 0)) { 8455 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 8456 "%02x - %02x - %02x - %02x (%08x) " 8457 TARGET_FMT_lx "\n", insn & inval, 8458 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8459 insn, ctx->cia); 8460 return false; 8461 } 8462 8463 handler->handler(ctx); 8464 return true; 8465 } 8466 8467 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 8468 { 8469 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8470 CPUPPCState *env = cs->env_ptr; 8471 uint32_t hflags = ctx->base.tb->flags; 8472 8473 ctx->spr_cb = env->spr_cb; 8474 ctx->pr = (hflags >> HFLAGS_PR) & 1; 8475 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7; 8476 ctx->dr = (hflags >> HFLAGS_DR) & 1; 8477 ctx->hv = (hflags >> HFLAGS_HV) & 1; 8478 ctx->insns_flags = env->insns_flags; 8479 ctx->insns_flags2 = env->insns_flags2; 8480 ctx->access_type = -1; 8481 ctx->need_access_type = !mmu_is_64bit(env->mmu_model); 8482 ctx->le_mode = (hflags >> HFLAGS_LE) & 1; 8483 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 8484 ctx->flags = env->flags; 8485 #if defined(TARGET_PPC64) 8486 ctx->sf_mode = (hflags >> HFLAGS_64) & 1; 8487 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 8488 #endif 8489 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 8490 || env->mmu_model == POWERPC_MMU_601 8491 || env->mmu_model & POWERPC_MMU_64; 8492 8493 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1; 8494 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1; 8495 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1; 8496 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1; 8497 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1; 8498 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1; 8499 ctx->hr = (hflags >> HFLAGS_HR) & 1; 8500 ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1; 8501 ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1; 8502 ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1; 8503 8504 ctx->singlestep_enabled = 0; 8505 if ((hflags >> HFLAGS_SE) & 1) { 8506 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 8507 ctx->base.max_insns = 1; 8508 } 8509 if ((hflags >> HFLAGS_BE) & 1) { 8510 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 8511 } 8512 } 8513 8514 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 8515 { 8516 } 8517 8518 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 8519 { 8520 tcg_gen_insn_start(dcbase->pc_next); 8521 } 8522 8523 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn) 8524 { 8525 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 8526 return opc1(insn) == 1; 8527 } 8528 8529 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 8530 { 8531 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8532 PowerPCCPU *cpu = POWERPC_CPU(cs); 8533 CPUPPCState *env = cs->env_ptr; 8534 target_ulong pc; 8535 uint32_t insn; 8536 bool ok; 8537 8538 LOG_DISAS("----------------\n"); 8539 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 8540 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 8541 8542 ctx->cia = pc = ctx->base.pc_next; 8543 insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx)); 8544 ctx->base.pc_next = pc += 4; 8545 8546 if (!is_prefix_insn(ctx, insn)) { 8547 ok = (decode_insn32(ctx, insn) || 8548 decode_legacy(cpu, ctx, insn)); 8549 } else if ((pc & 63) == 0) { 8550 /* 8551 * Power v3.1, section 1.9 Exceptions: 8552 * attempt to execute a prefixed instruction that crosses a 8553 * 64-byte address boundary (system alignment error). 8554 */ 8555 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN); 8556 ok = true; 8557 } else { 8558 uint32_t insn2 = translator_ldl_swap(env, dcbase, pc, 8559 need_byteswap(ctx)); 8560 ctx->base.pc_next = pc += 4; 8561 ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn)); 8562 } 8563 if (!ok) { 8564 gen_invalid(ctx); 8565 } 8566 8567 /* End the TB when crossing a page boundary. */ 8568 if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) { 8569 ctx->base.is_jmp = DISAS_TOO_MANY; 8570 } 8571 8572 translator_loop_temp_check(&ctx->base); 8573 } 8574 8575 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 8576 { 8577 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8578 DisasJumpType is_jmp = ctx->base.is_jmp; 8579 target_ulong nip = ctx->base.pc_next; 8580 8581 if (is_jmp == DISAS_NORETURN) { 8582 /* We have already exited the TB. */ 8583 return; 8584 } 8585 8586 /* Honor single stepping. */ 8587 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP) 8588 && (nip <= 0x100 || nip > 0xf00)) { 8589 switch (is_jmp) { 8590 case DISAS_TOO_MANY: 8591 case DISAS_EXIT_UPDATE: 8592 case DISAS_CHAIN_UPDATE: 8593 gen_update_nip(ctx, nip); 8594 break; 8595 case DISAS_EXIT: 8596 case DISAS_CHAIN: 8597 break; 8598 default: 8599 g_assert_not_reached(); 8600 } 8601 8602 gen_debug_exception(ctx); 8603 return; 8604 } 8605 8606 switch (is_jmp) { 8607 case DISAS_TOO_MANY: 8608 if (use_goto_tb(ctx, nip)) { 8609 pmu_count_insns(ctx); 8610 tcg_gen_goto_tb(0); 8611 gen_update_nip(ctx, nip); 8612 tcg_gen_exit_tb(ctx->base.tb, 0); 8613 break; 8614 } 8615 /* fall through */ 8616 case DISAS_CHAIN_UPDATE: 8617 gen_update_nip(ctx, nip); 8618 /* fall through */ 8619 case DISAS_CHAIN: 8620 /* 8621 * tcg_gen_lookup_and_goto_ptr will exit the TB if 8622 * CF_NO_GOTO_PTR is set. Count insns now. 8623 */ 8624 if (ctx->base.tb->flags & CF_NO_GOTO_PTR) { 8625 pmu_count_insns(ctx); 8626 } 8627 8628 tcg_gen_lookup_and_goto_ptr(); 8629 break; 8630 8631 case DISAS_EXIT_UPDATE: 8632 gen_update_nip(ctx, nip); 8633 /* fall through */ 8634 case DISAS_EXIT: 8635 pmu_count_insns(ctx); 8636 tcg_gen_exit_tb(NULL, 0); 8637 break; 8638 8639 default: 8640 g_assert_not_reached(); 8641 } 8642 } 8643 8644 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 8645 { 8646 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 8647 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 8648 } 8649 8650 static const TranslatorOps ppc_tr_ops = { 8651 .init_disas_context = ppc_tr_init_disas_context, 8652 .tb_start = ppc_tr_tb_start, 8653 .insn_start = ppc_tr_insn_start, 8654 .translate_insn = ppc_tr_translate_insn, 8655 .tb_stop = ppc_tr_tb_stop, 8656 .disas_log = ppc_tr_disas_log, 8657 }; 8658 8659 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns) 8660 { 8661 DisasContext ctx; 8662 8663 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns); 8664 } 8665 8666 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 8667 target_ulong *data) 8668 { 8669 env->nip = data[0]; 8670 } 8671