1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2.1 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg/tcg-op.h" 27 #include "tcg/tcg-op-gvec.h" 28 #include "qemu/host-utils.h" 29 #include "qemu/main-loop.h" 30 #include "exec/cpu_ldst.h" 31 32 #include "exec/helper-proto.h" 33 #include "exec/helper-gen.h" 34 35 #include "exec/translator.h" 36 #include "exec/log.h" 37 #include "qemu/atomic128.h" 38 #include "spr_tcg.h" 39 40 #include "qemu/qemu-print.h" 41 #include "qapi/error.h" 42 43 #define CPU_SINGLE_STEP 0x1 44 #define CPU_BRANCH_STEP 0x2 45 46 /* Include definitions for instructions classes and implementations flags */ 47 /* #define PPC_DEBUG_DISAS */ 48 49 #ifdef PPC_DEBUG_DISAS 50 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 51 #else 52 # define LOG_DISAS(...) do { } while (0) 53 #endif 54 /*****************************************************************************/ 55 /* Code translation helpers */ 56 57 /* global register indexes */ 58 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */ 59 + 10 * 4 + 22 * 5 /* SPE GPRh */ 60 + 8 * 5 /* CRF */]; 61 static TCGv cpu_gpr[32]; 62 static TCGv cpu_gprh[32]; 63 static TCGv_i32 cpu_crf[8]; 64 static TCGv cpu_nip; 65 static TCGv cpu_msr; 66 static TCGv cpu_ctr; 67 static TCGv cpu_lr; 68 #if defined(TARGET_PPC64) 69 static TCGv cpu_cfar; 70 #endif 71 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 72 static TCGv cpu_reserve; 73 static TCGv cpu_reserve_val; 74 static TCGv cpu_fpscr; 75 static TCGv_i32 cpu_access_type; 76 77 #include "exec/gen-icount.h" 78 79 void ppc_translate_init(void) 80 { 81 int i; 82 char *p; 83 size_t cpu_reg_names_size; 84 85 p = cpu_reg_names; 86 cpu_reg_names_size = sizeof(cpu_reg_names); 87 88 for (i = 0; i < 8; i++) { 89 snprintf(p, cpu_reg_names_size, "crf%d", i); 90 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 91 offsetof(CPUPPCState, crf[i]), p); 92 p += 5; 93 cpu_reg_names_size -= 5; 94 } 95 96 for (i = 0; i < 32; i++) { 97 snprintf(p, cpu_reg_names_size, "r%d", i); 98 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 99 offsetof(CPUPPCState, gpr[i]), p); 100 p += (i < 10) ? 3 : 4; 101 cpu_reg_names_size -= (i < 10) ? 3 : 4; 102 snprintf(p, cpu_reg_names_size, "r%dH", i); 103 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 104 offsetof(CPUPPCState, gprh[i]), p); 105 p += (i < 10) ? 4 : 5; 106 cpu_reg_names_size -= (i < 10) ? 4 : 5; 107 } 108 109 cpu_nip = tcg_global_mem_new(cpu_env, 110 offsetof(CPUPPCState, nip), "nip"); 111 112 cpu_msr = tcg_global_mem_new(cpu_env, 113 offsetof(CPUPPCState, msr), "msr"); 114 115 cpu_ctr = tcg_global_mem_new(cpu_env, 116 offsetof(CPUPPCState, ctr), "ctr"); 117 118 cpu_lr = tcg_global_mem_new(cpu_env, 119 offsetof(CPUPPCState, lr), "lr"); 120 121 #if defined(TARGET_PPC64) 122 cpu_cfar = tcg_global_mem_new(cpu_env, 123 offsetof(CPUPPCState, cfar), "cfar"); 124 #endif 125 126 cpu_xer = tcg_global_mem_new(cpu_env, 127 offsetof(CPUPPCState, xer), "xer"); 128 cpu_so = tcg_global_mem_new(cpu_env, 129 offsetof(CPUPPCState, so), "SO"); 130 cpu_ov = tcg_global_mem_new(cpu_env, 131 offsetof(CPUPPCState, ov), "OV"); 132 cpu_ca = tcg_global_mem_new(cpu_env, 133 offsetof(CPUPPCState, ca), "CA"); 134 cpu_ov32 = tcg_global_mem_new(cpu_env, 135 offsetof(CPUPPCState, ov32), "OV32"); 136 cpu_ca32 = tcg_global_mem_new(cpu_env, 137 offsetof(CPUPPCState, ca32), "CA32"); 138 139 cpu_reserve = tcg_global_mem_new(cpu_env, 140 offsetof(CPUPPCState, reserve_addr), 141 "reserve_addr"); 142 cpu_reserve_val = tcg_global_mem_new(cpu_env, 143 offsetof(CPUPPCState, reserve_val), 144 "reserve_val"); 145 146 cpu_fpscr = tcg_global_mem_new(cpu_env, 147 offsetof(CPUPPCState, fpscr), "fpscr"); 148 149 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 150 offsetof(CPUPPCState, access_type), 151 "access_type"); 152 } 153 154 /* internal defines */ 155 struct DisasContext { 156 DisasContextBase base; 157 target_ulong cia; /* current instruction address */ 158 uint32_t opcode; 159 /* Routine used to access memory */ 160 bool pr, hv, dr, le_mode; 161 bool lazy_tlb_flush; 162 bool need_access_type; 163 int mem_idx; 164 int access_type; 165 /* Translation flags */ 166 MemOp default_tcg_memop_mask; 167 #if defined(TARGET_PPC64) 168 bool sf_mode; 169 bool has_cfar; 170 #endif 171 bool fpu_enabled; 172 bool altivec_enabled; 173 bool vsx_enabled; 174 bool spe_enabled; 175 bool tm_enabled; 176 bool gtse; 177 bool hr; 178 bool mmcr0_pmcc0; 179 bool mmcr0_pmcc1; 180 bool pmu_insn_cnt; 181 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 182 int singlestep_enabled; 183 uint32_t flags; 184 uint64_t insns_flags; 185 uint64_t insns_flags2; 186 }; 187 188 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */ 189 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */ 190 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */ 191 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */ 192 193 /* Return true iff byteswap is needed in a scalar memop */ 194 static inline bool need_byteswap(const DisasContext *ctx) 195 { 196 #if defined(TARGET_WORDS_BIGENDIAN) 197 return ctx->le_mode; 198 #else 199 return !ctx->le_mode; 200 #endif 201 } 202 203 /* True when active word size < size of target_long. */ 204 #ifdef TARGET_PPC64 205 # define NARROW_MODE(C) (!(C)->sf_mode) 206 #else 207 # define NARROW_MODE(C) 0 208 #endif 209 210 struct opc_handler_t { 211 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 212 uint32_t inval1; 213 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 214 uint32_t inval2; 215 /* instruction type */ 216 uint64_t type; 217 /* extended instruction type */ 218 uint64_t type2; 219 /* handler */ 220 void (*handler)(DisasContext *ctx); 221 }; 222 223 /* SPR load/store helpers */ 224 static inline void gen_load_spr(TCGv t, int reg) 225 { 226 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 227 } 228 229 static inline void gen_store_spr(int reg, TCGv t) 230 { 231 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 232 } 233 234 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 235 { 236 if (ctx->need_access_type && ctx->access_type != access_type) { 237 tcg_gen_movi_i32(cpu_access_type, access_type); 238 ctx->access_type = access_type; 239 } 240 } 241 242 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 243 { 244 if (NARROW_MODE(ctx)) { 245 nip = (uint32_t)nip; 246 } 247 tcg_gen_movi_tl(cpu_nip, nip); 248 } 249 250 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 251 { 252 TCGv_i32 t0, t1; 253 254 /* 255 * These are all synchronous exceptions, we set the PC back to the 256 * faulting instruction 257 */ 258 gen_update_nip(ctx, ctx->cia); 259 t0 = tcg_const_i32(excp); 260 t1 = tcg_const_i32(error); 261 gen_helper_raise_exception_err(cpu_env, t0, t1); 262 tcg_temp_free_i32(t0); 263 tcg_temp_free_i32(t1); 264 ctx->base.is_jmp = DISAS_NORETURN; 265 } 266 267 static void gen_exception(DisasContext *ctx, uint32_t excp) 268 { 269 TCGv_i32 t0; 270 271 /* 272 * These are all synchronous exceptions, we set the PC back to the 273 * faulting instruction 274 */ 275 gen_update_nip(ctx, ctx->cia); 276 t0 = tcg_const_i32(excp); 277 gen_helper_raise_exception(cpu_env, t0); 278 tcg_temp_free_i32(t0); 279 ctx->base.is_jmp = DISAS_NORETURN; 280 } 281 282 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 283 target_ulong nip) 284 { 285 TCGv_i32 t0; 286 287 gen_update_nip(ctx, nip); 288 t0 = tcg_const_i32(excp); 289 gen_helper_raise_exception(cpu_env, t0); 290 tcg_temp_free_i32(t0); 291 ctx->base.is_jmp = DISAS_NORETURN; 292 } 293 294 static void gen_icount_io_start(DisasContext *ctx) 295 { 296 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 297 gen_io_start(); 298 /* 299 * An I/O instruction must be last in the TB. 300 * Chain to the next TB, and let the code from gen_tb_start 301 * decide if we need to return to the main loop. 302 * Doing this first also allows this value to be overridden. 303 */ 304 ctx->base.is_jmp = DISAS_TOO_MANY; 305 } 306 } 307 308 /* 309 * Tells the caller what is the appropriate exception to generate and prepares 310 * SPR registers for this exception. 311 * 312 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or 313 * POWERPC_EXCP_DEBUG (on BookE). 314 */ 315 static uint32_t gen_prep_dbgex(DisasContext *ctx) 316 { 317 if (ctx->flags & POWERPC_FLAG_DE) { 318 target_ulong dbsr = 0; 319 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) { 320 dbsr = DBCR0_ICMP; 321 } else { 322 /* Must have been branch */ 323 dbsr = DBCR0_BRT; 324 } 325 TCGv t0 = tcg_temp_new(); 326 gen_load_spr(t0, SPR_BOOKE_DBSR); 327 tcg_gen_ori_tl(t0, t0, dbsr); 328 gen_store_spr(SPR_BOOKE_DBSR, t0); 329 tcg_temp_free(t0); 330 return POWERPC_EXCP_DEBUG; 331 } else { 332 return POWERPC_EXCP_TRACE; 333 } 334 } 335 336 static void gen_debug_exception(DisasContext *ctx) 337 { 338 gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx))); 339 ctx->base.is_jmp = DISAS_NORETURN; 340 } 341 342 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 343 { 344 /* Will be converted to program check if needed */ 345 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 346 } 347 348 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 349 { 350 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 351 } 352 353 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 354 { 355 /* Will be converted to program check if needed */ 356 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 357 } 358 359 /*****************************************************************************/ 360 /* SPR READ/WRITE CALLBACKS */ 361 362 void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 363 { 364 #if 0 365 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 366 printf("ERROR: try to access SPR %d !\n", sprn); 367 #endif 368 } 369 370 /* #define PPC_DUMP_SPR_ACCESSES */ 371 372 /* 373 * Generic callbacks: 374 * do nothing but store/retrieve spr value 375 */ 376 static void spr_load_dump_spr(int sprn) 377 { 378 #ifdef PPC_DUMP_SPR_ACCESSES 379 TCGv_i32 t0 = tcg_const_i32(sprn); 380 gen_helper_load_dump_spr(cpu_env, t0); 381 tcg_temp_free_i32(t0); 382 #endif 383 } 384 385 void spr_read_generic(DisasContext *ctx, int gprn, int sprn) 386 { 387 gen_load_spr(cpu_gpr[gprn], sprn); 388 spr_load_dump_spr(sprn); 389 } 390 391 static void spr_store_dump_spr(int sprn) 392 { 393 #ifdef PPC_DUMP_SPR_ACCESSES 394 TCGv_i32 t0 = tcg_const_i32(sprn); 395 gen_helper_store_dump_spr(cpu_env, t0); 396 tcg_temp_free_i32(t0); 397 #endif 398 } 399 400 void spr_write_generic(DisasContext *ctx, int sprn, int gprn) 401 { 402 gen_store_spr(sprn, cpu_gpr[gprn]); 403 spr_store_dump_spr(sprn); 404 } 405 406 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn) 407 { 408 spr_write_generic(ctx, sprn, gprn); 409 410 /* 411 * SPR_CTRL writes must force a new translation block, 412 * allowing the PMU to calculate the run latch events with 413 * more accuracy. 414 */ 415 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 416 } 417 418 #if !defined(CONFIG_USER_ONLY) 419 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn) 420 { 421 #ifdef TARGET_PPC64 422 TCGv t0 = tcg_temp_new(); 423 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]); 424 gen_store_spr(sprn, t0); 425 tcg_temp_free(t0); 426 spr_store_dump_spr(sprn); 427 #else 428 spr_write_generic(ctx, sprn, gprn); 429 #endif 430 } 431 432 void spr_write_clear(DisasContext *ctx, int sprn, int gprn) 433 { 434 TCGv t0 = tcg_temp_new(); 435 TCGv t1 = tcg_temp_new(); 436 gen_load_spr(t0, sprn); 437 tcg_gen_neg_tl(t1, cpu_gpr[gprn]); 438 tcg_gen_and_tl(t0, t0, t1); 439 gen_store_spr(sprn, t0); 440 tcg_temp_free(t0); 441 tcg_temp_free(t1); 442 } 443 444 void spr_access_nop(DisasContext *ctx, int sprn, int gprn) 445 { 446 } 447 448 #endif 449 450 /* SPR common to all PowerPC */ 451 /* XER */ 452 void spr_read_xer(DisasContext *ctx, int gprn, int sprn) 453 { 454 TCGv dst = cpu_gpr[gprn]; 455 TCGv t0 = tcg_temp_new(); 456 TCGv t1 = tcg_temp_new(); 457 TCGv t2 = tcg_temp_new(); 458 tcg_gen_mov_tl(dst, cpu_xer); 459 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 460 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 461 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 462 tcg_gen_or_tl(t0, t0, t1); 463 tcg_gen_or_tl(dst, dst, t2); 464 tcg_gen_or_tl(dst, dst, t0); 465 if (is_isa300(ctx)) { 466 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 467 tcg_gen_or_tl(dst, dst, t0); 468 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 469 tcg_gen_or_tl(dst, dst, t0); 470 } 471 tcg_temp_free(t0); 472 tcg_temp_free(t1); 473 tcg_temp_free(t2); 474 } 475 476 void spr_write_xer(DisasContext *ctx, int sprn, int gprn) 477 { 478 TCGv src = cpu_gpr[gprn]; 479 /* Write all flags, while reading back check for isa300 */ 480 tcg_gen_andi_tl(cpu_xer, src, 481 ~((1u << XER_SO) | 482 (1u << XER_OV) | (1u << XER_OV32) | 483 (1u << XER_CA) | (1u << XER_CA32))); 484 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 485 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 486 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 487 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 488 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 489 } 490 491 /* LR */ 492 void spr_read_lr(DisasContext *ctx, int gprn, int sprn) 493 { 494 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr); 495 } 496 497 void spr_write_lr(DisasContext *ctx, int sprn, int gprn) 498 { 499 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]); 500 } 501 502 /* CFAR */ 503 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 504 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn) 505 { 506 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar); 507 } 508 509 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn) 510 { 511 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]); 512 } 513 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */ 514 515 /* CTR */ 516 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn) 517 { 518 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr); 519 } 520 521 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn) 522 { 523 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]); 524 } 525 526 /* User read access to SPR */ 527 /* USPRx */ 528 /* UMMCRx */ 529 /* UPMCx */ 530 /* USIA */ 531 /* UDECR */ 532 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn) 533 { 534 gen_load_spr(cpu_gpr[gprn], sprn + 0x10); 535 } 536 537 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 538 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn) 539 { 540 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]); 541 } 542 #endif 543 544 /* SPR common to all non-embedded PowerPC */ 545 /* DECR */ 546 #if !defined(CONFIG_USER_ONLY) 547 void spr_read_decr(DisasContext *ctx, int gprn, int sprn) 548 { 549 gen_icount_io_start(ctx); 550 gen_helper_load_decr(cpu_gpr[gprn], cpu_env); 551 } 552 553 void spr_write_decr(DisasContext *ctx, int sprn, int gprn) 554 { 555 gen_icount_io_start(ctx); 556 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]); 557 } 558 #endif 559 560 /* SPR common to all non-embedded PowerPC, except 601 */ 561 /* Time base */ 562 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn) 563 { 564 gen_icount_io_start(ctx); 565 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env); 566 } 567 568 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn) 569 { 570 gen_icount_io_start(ctx); 571 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env); 572 } 573 574 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn) 575 { 576 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env); 577 } 578 579 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn) 580 { 581 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env); 582 } 583 584 #if !defined(CONFIG_USER_ONLY) 585 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn) 586 { 587 gen_icount_io_start(ctx); 588 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]); 589 } 590 591 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn) 592 { 593 gen_icount_io_start(ctx); 594 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]); 595 } 596 597 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn) 598 { 599 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]); 600 } 601 602 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn) 603 { 604 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]); 605 } 606 607 #if defined(TARGET_PPC64) 608 void spr_read_purr(DisasContext *ctx, int gprn, int sprn) 609 { 610 gen_icount_io_start(ctx); 611 gen_helper_load_purr(cpu_gpr[gprn], cpu_env); 612 } 613 614 void spr_write_purr(DisasContext *ctx, int sprn, int gprn) 615 { 616 gen_icount_io_start(ctx); 617 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]); 618 } 619 620 /* HDECR */ 621 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn) 622 { 623 gen_icount_io_start(ctx); 624 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env); 625 } 626 627 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn) 628 { 629 gen_icount_io_start(ctx); 630 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]); 631 } 632 633 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn) 634 { 635 gen_icount_io_start(ctx); 636 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env); 637 } 638 639 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn) 640 { 641 gen_icount_io_start(ctx); 642 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]); 643 } 644 645 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn) 646 { 647 gen_icount_io_start(ctx); 648 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]); 649 } 650 651 #endif 652 #endif 653 654 #if !defined(CONFIG_USER_ONLY) 655 /* IBAT0U...IBAT0U */ 656 /* IBAT0L...IBAT7L */ 657 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn) 658 { 659 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 660 offsetof(CPUPPCState, 661 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 662 } 663 664 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn) 665 { 666 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 667 offsetof(CPUPPCState, 668 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4])); 669 } 670 671 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn) 672 { 673 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 674 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 675 tcg_temp_free_i32(t0); 676 } 677 678 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn) 679 { 680 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4); 681 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 682 tcg_temp_free_i32(t0); 683 } 684 685 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn) 686 { 687 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2); 688 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 689 tcg_temp_free_i32(t0); 690 } 691 692 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn) 693 { 694 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4); 695 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 696 tcg_temp_free_i32(t0); 697 } 698 699 /* DBAT0U...DBAT7U */ 700 /* DBAT0L...DBAT7L */ 701 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn) 702 { 703 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 704 offsetof(CPUPPCState, 705 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2])); 706 } 707 708 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn) 709 { 710 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 711 offsetof(CPUPPCState, 712 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4])); 713 } 714 715 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn) 716 { 717 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2); 718 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 719 tcg_temp_free_i32(t0); 720 } 721 722 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn) 723 { 724 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4); 725 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 726 tcg_temp_free_i32(t0); 727 } 728 729 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn) 730 { 731 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2); 732 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 733 tcg_temp_free_i32(t0); 734 } 735 736 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn) 737 { 738 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4); 739 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 740 tcg_temp_free_i32(t0); 741 } 742 743 /* SDR1 */ 744 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn) 745 { 746 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]); 747 } 748 749 #if defined(TARGET_PPC64) 750 /* 64 bits PowerPC specific SPRs */ 751 /* PIDR */ 752 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn) 753 { 754 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]); 755 } 756 757 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn) 758 { 759 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]); 760 } 761 762 void spr_read_hior(DisasContext *ctx, int gprn, int sprn) 763 { 764 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix)); 765 } 766 767 void spr_write_hior(DisasContext *ctx, int sprn, int gprn) 768 { 769 TCGv t0 = tcg_temp_new(); 770 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL); 771 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 772 tcg_temp_free(t0); 773 } 774 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn) 775 { 776 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]); 777 } 778 779 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn) 780 { 781 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]); 782 } 783 784 /* DPDES */ 785 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn) 786 { 787 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env); 788 } 789 790 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn) 791 { 792 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]); 793 } 794 #endif 795 #endif 796 797 /* PowerPC 601 specific registers */ 798 /* RTC */ 799 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn) 800 { 801 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env); 802 } 803 804 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn) 805 { 806 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env); 807 } 808 809 #if !defined(CONFIG_USER_ONLY) 810 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn) 811 { 812 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]); 813 } 814 815 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn) 816 { 817 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]); 818 } 819 820 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn) 821 { 822 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]); 823 /* Must stop the translation as endianness may have changed */ 824 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 825 } 826 #endif 827 828 /* Unified bats */ 829 #if !defined(CONFIG_USER_ONLY) 830 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn) 831 { 832 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 833 offsetof(CPUPPCState, 834 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 835 } 836 837 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn) 838 { 839 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 840 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]); 841 tcg_temp_free_i32(t0); 842 } 843 844 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn) 845 { 846 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 847 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]); 848 tcg_temp_free_i32(t0); 849 } 850 #endif 851 852 /* PowerPC 40x specific registers */ 853 #if !defined(CONFIG_USER_ONLY) 854 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn) 855 { 856 gen_icount_io_start(ctx); 857 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env); 858 } 859 860 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn) 861 { 862 gen_icount_io_start(ctx); 863 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]); 864 } 865 866 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn) 867 { 868 gen_icount_io_start(ctx); 869 gen_store_spr(sprn, cpu_gpr[gprn]); 870 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]); 871 /* We must stop translation as we may have rebooted */ 872 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 873 } 874 875 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn) 876 { 877 gen_icount_io_start(ctx); 878 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]); 879 } 880 881 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn) 882 { 883 gen_icount_io_start(ctx); 884 gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]); 885 } 886 887 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn) 888 { 889 gen_icount_io_start(ctx); 890 gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]); 891 } 892 893 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn) 894 { 895 TCGv t0 = tcg_temp_new(); 896 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF); 897 gen_store_spr(SPR_40x_PID, t0); 898 tcg_temp_free(t0); 899 } 900 901 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn) 902 { 903 gen_icount_io_start(ctx); 904 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]); 905 } 906 907 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn) 908 { 909 gen_icount_io_start(ctx); 910 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]); 911 } 912 #endif 913 914 /* PowerPC 403 specific registers */ 915 /* PBL1 / PBU1 / PBL2 / PBU2 */ 916 #if !defined(CONFIG_USER_ONLY) 917 void spr_read_403_pbr(DisasContext *ctx, int gprn, int sprn) 918 { 919 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 920 offsetof(CPUPPCState, pb[sprn - SPR_403_PBL1])); 921 } 922 923 void spr_write_403_pbr(DisasContext *ctx, int sprn, int gprn) 924 { 925 TCGv_i32 t0 = tcg_const_i32(sprn - SPR_403_PBL1); 926 gen_helper_store_403_pbr(cpu_env, t0, cpu_gpr[gprn]); 927 tcg_temp_free_i32(t0); 928 } 929 930 void spr_write_pir(DisasContext *ctx, int sprn, int gprn) 931 { 932 TCGv t0 = tcg_temp_new(); 933 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF); 934 gen_store_spr(SPR_PIR, t0); 935 tcg_temp_free(t0); 936 } 937 #endif 938 939 /* SPE specific registers */ 940 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn) 941 { 942 TCGv_i32 t0 = tcg_temp_new_i32(); 943 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 944 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0); 945 tcg_temp_free_i32(t0); 946 } 947 948 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn) 949 { 950 TCGv_i32 t0 = tcg_temp_new_i32(); 951 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]); 952 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 953 tcg_temp_free_i32(t0); 954 } 955 956 #if !defined(CONFIG_USER_ONLY) 957 /* Callback used to write the exception vector base */ 958 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn) 959 { 960 TCGv t0 = tcg_temp_new(); 961 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask)); 962 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 963 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 964 gen_store_spr(sprn, t0); 965 tcg_temp_free(t0); 966 } 967 968 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn) 969 { 970 int sprn_offs; 971 972 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) { 973 sprn_offs = sprn - SPR_BOOKE_IVOR0; 974 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) { 975 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32; 976 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) { 977 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38; 978 } else { 979 printf("Trying to write an unknown exception vector %d %03x\n", 980 sprn, sprn); 981 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 982 return; 983 } 984 985 TCGv t0 = tcg_temp_new(); 986 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask)); 987 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 988 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs])); 989 gen_store_spr(sprn, t0); 990 tcg_temp_free(t0); 991 } 992 #endif 993 994 #ifdef TARGET_PPC64 995 #ifndef CONFIG_USER_ONLY 996 void spr_write_amr(DisasContext *ctx, int sprn, int gprn) 997 { 998 TCGv t0 = tcg_temp_new(); 999 TCGv t1 = tcg_temp_new(); 1000 TCGv t2 = tcg_temp_new(); 1001 1002 /* 1003 * Note, the HV=1 PR=0 case is handled earlier by simply using 1004 * spr_write_generic for HV mode in the SPR table 1005 */ 1006 1007 /* Build insertion mask into t1 based on context */ 1008 if (ctx->pr) { 1009 gen_load_spr(t1, SPR_UAMOR); 1010 } else { 1011 gen_load_spr(t1, SPR_AMOR); 1012 } 1013 1014 /* Mask new bits into t2 */ 1015 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1016 1017 /* Load AMR and clear new bits in t0 */ 1018 gen_load_spr(t0, SPR_AMR); 1019 tcg_gen_andc_tl(t0, t0, t1); 1020 1021 /* Or'in new bits and write it out */ 1022 tcg_gen_or_tl(t0, t0, t2); 1023 gen_store_spr(SPR_AMR, t0); 1024 spr_store_dump_spr(SPR_AMR); 1025 1026 tcg_temp_free(t0); 1027 tcg_temp_free(t1); 1028 tcg_temp_free(t2); 1029 } 1030 1031 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn) 1032 { 1033 TCGv t0 = tcg_temp_new(); 1034 TCGv t1 = tcg_temp_new(); 1035 TCGv t2 = tcg_temp_new(); 1036 1037 /* 1038 * Note, the HV=1 case is handled earlier by simply using 1039 * spr_write_generic for HV mode in the SPR table 1040 */ 1041 1042 /* Build insertion mask into t1 based on context */ 1043 gen_load_spr(t1, SPR_AMOR); 1044 1045 /* Mask new bits into t2 */ 1046 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1047 1048 /* Load AMR and clear new bits in t0 */ 1049 gen_load_spr(t0, SPR_UAMOR); 1050 tcg_gen_andc_tl(t0, t0, t1); 1051 1052 /* Or'in new bits and write it out */ 1053 tcg_gen_or_tl(t0, t0, t2); 1054 gen_store_spr(SPR_UAMOR, t0); 1055 spr_store_dump_spr(SPR_UAMOR); 1056 1057 tcg_temp_free(t0); 1058 tcg_temp_free(t1); 1059 tcg_temp_free(t2); 1060 } 1061 1062 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn) 1063 { 1064 TCGv t0 = tcg_temp_new(); 1065 TCGv t1 = tcg_temp_new(); 1066 TCGv t2 = tcg_temp_new(); 1067 1068 /* 1069 * Note, the HV=1 case is handled earlier by simply using 1070 * spr_write_generic for HV mode in the SPR table 1071 */ 1072 1073 /* Build insertion mask into t1 based on context */ 1074 gen_load_spr(t1, SPR_AMOR); 1075 1076 /* Mask new bits into t2 */ 1077 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1078 1079 /* Load AMR and clear new bits in t0 */ 1080 gen_load_spr(t0, SPR_IAMR); 1081 tcg_gen_andc_tl(t0, t0, t1); 1082 1083 /* Or'in new bits and write it out */ 1084 tcg_gen_or_tl(t0, t0, t2); 1085 gen_store_spr(SPR_IAMR, t0); 1086 spr_store_dump_spr(SPR_IAMR); 1087 1088 tcg_temp_free(t0); 1089 tcg_temp_free(t1); 1090 tcg_temp_free(t2); 1091 } 1092 #endif 1093 #endif 1094 1095 #ifndef CONFIG_USER_ONLY 1096 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn) 1097 { 1098 gen_helper_fixup_thrm(cpu_env); 1099 gen_load_spr(cpu_gpr[gprn], sprn); 1100 spr_load_dump_spr(sprn); 1101 } 1102 #endif /* !CONFIG_USER_ONLY */ 1103 1104 #if !defined(CONFIG_USER_ONLY) 1105 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn) 1106 { 1107 TCGv t0 = tcg_temp_new(); 1108 1109 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE); 1110 gen_store_spr(sprn, t0); 1111 tcg_temp_free(t0); 1112 } 1113 1114 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn) 1115 { 1116 TCGv t0 = tcg_temp_new(); 1117 1118 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE); 1119 gen_store_spr(sprn, t0); 1120 tcg_temp_free(t0); 1121 } 1122 1123 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn) 1124 { 1125 TCGv t0 = tcg_temp_new(); 1126 1127 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 1128 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC)); 1129 gen_store_spr(sprn, t0); 1130 tcg_temp_free(t0); 1131 } 1132 1133 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn) 1134 { 1135 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]); 1136 } 1137 1138 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn) 1139 { 1140 TCGv_i32 t0 = tcg_const_i32(sprn); 1141 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]); 1142 tcg_temp_free_i32(t0); 1143 } 1144 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn) 1145 { 1146 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]); 1147 } 1148 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn) 1149 { 1150 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]); 1151 } 1152 1153 #endif 1154 1155 #if !defined(CONFIG_USER_ONLY) 1156 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn) 1157 { 1158 TCGv val = tcg_temp_new(); 1159 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]); 1160 gen_store_spr(SPR_BOOKE_MAS3, val); 1161 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32); 1162 gen_store_spr(SPR_BOOKE_MAS7, val); 1163 tcg_temp_free(val); 1164 } 1165 1166 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn) 1167 { 1168 TCGv mas7 = tcg_temp_new(); 1169 TCGv mas3 = tcg_temp_new(); 1170 gen_load_spr(mas7, SPR_BOOKE_MAS7); 1171 tcg_gen_shli_tl(mas7, mas7, 32); 1172 gen_load_spr(mas3, SPR_BOOKE_MAS3); 1173 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7); 1174 tcg_temp_free(mas3); 1175 tcg_temp_free(mas7); 1176 } 1177 1178 #endif 1179 1180 #ifdef TARGET_PPC64 1181 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn, 1182 int bit, int sprn, int cause) 1183 { 1184 TCGv_i32 t1 = tcg_const_i32(bit); 1185 TCGv_i32 t2 = tcg_const_i32(sprn); 1186 TCGv_i32 t3 = tcg_const_i32(cause); 1187 1188 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3); 1189 1190 tcg_temp_free_i32(t3); 1191 tcg_temp_free_i32(t2); 1192 tcg_temp_free_i32(t1); 1193 } 1194 1195 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn, 1196 int bit, int sprn, int cause) 1197 { 1198 TCGv_i32 t1 = tcg_const_i32(bit); 1199 TCGv_i32 t2 = tcg_const_i32(sprn); 1200 TCGv_i32 t3 = tcg_const_i32(cause); 1201 1202 gen_helper_msr_facility_check(cpu_env, t1, t2, t3); 1203 1204 tcg_temp_free_i32(t3); 1205 tcg_temp_free_i32(t2); 1206 tcg_temp_free_i32(t1); 1207 } 1208 1209 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn) 1210 { 1211 TCGv spr_up = tcg_temp_new(); 1212 TCGv spr = tcg_temp_new(); 1213 1214 gen_load_spr(spr, sprn - 1); 1215 tcg_gen_shri_tl(spr_up, spr, 32); 1216 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up); 1217 1218 tcg_temp_free(spr); 1219 tcg_temp_free(spr_up); 1220 } 1221 1222 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn) 1223 { 1224 TCGv spr = tcg_temp_new(); 1225 1226 gen_load_spr(spr, sprn - 1); 1227 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32); 1228 gen_store_spr(sprn - 1, spr); 1229 1230 tcg_temp_free(spr); 1231 } 1232 1233 #if !defined(CONFIG_USER_ONLY) 1234 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn) 1235 { 1236 TCGv hmer = tcg_temp_new(); 1237 1238 gen_load_spr(hmer, sprn); 1239 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer); 1240 gen_store_spr(sprn, hmer); 1241 spr_store_dump_spr(sprn); 1242 tcg_temp_free(hmer); 1243 } 1244 1245 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn) 1246 { 1247 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]); 1248 } 1249 #endif /* !defined(CONFIG_USER_ONLY) */ 1250 1251 void spr_read_tar(DisasContext *ctx, int gprn, int sprn) 1252 { 1253 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1254 spr_read_generic(ctx, gprn, sprn); 1255 } 1256 1257 void spr_write_tar(DisasContext *ctx, int sprn, int gprn) 1258 { 1259 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1260 spr_write_generic(ctx, sprn, gprn); 1261 } 1262 1263 void spr_read_tm(DisasContext *ctx, int gprn, int sprn) 1264 { 1265 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1266 spr_read_generic(ctx, gprn, sprn); 1267 } 1268 1269 void spr_write_tm(DisasContext *ctx, int sprn, int gprn) 1270 { 1271 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1272 spr_write_generic(ctx, sprn, gprn); 1273 } 1274 1275 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn) 1276 { 1277 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1278 spr_read_prev_upper32(ctx, gprn, sprn); 1279 } 1280 1281 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn) 1282 { 1283 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1284 spr_write_prev_upper32(ctx, sprn, gprn); 1285 } 1286 1287 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn) 1288 { 1289 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1290 spr_read_generic(ctx, gprn, sprn); 1291 } 1292 1293 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn) 1294 { 1295 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1296 spr_write_generic(ctx, sprn, gprn); 1297 } 1298 1299 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn) 1300 { 1301 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1302 spr_read_prev_upper32(ctx, gprn, sprn); 1303 } 1304 1305 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn) 1306 { 1307 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1308 spr_write_prev_upper32(ctx, sprn, gprn); 1309 } 1310 #endif 1311 1312 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 1313 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 1314 1315 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 1316 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 1317 1318 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 1319 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 1320 1321 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 1322 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 1323 1324 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 1325 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 1326 1327 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 1328 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 1329 1330 typedef struct opcode_t { 1331 unsigned char opc1, opc2, opc3, opc4; 1332 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 1333 unsigned char pad[4]; 1334 #endif 1335 opc_handler_t handler; 1336 const char *oname; 1337 } opcode_t; 1338 1339 /* Helpers for priv. check */ 1340 #define GEN_PRIV \ 1341 do { \ 1342 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 1343 } while (0) 1344 1345 #if defined(CONFIG_USER_ONLY) 1346 #define CHK_HV GEN_PRIV 1347 #define CHK_SV GEN_PRIV 1348 #define CHK_HVRM GEN_PRIV 1349 #else 1350 #define CHK_HV \ 1351 do { \ 1352 if (unlikely(ctx->pr || !ctx->hv)) { \ 1353 GEN_PRIV; \ 1354 } \ 1355 } while (0) 1356 #define CHK_SV \ 1357 do { \ 1358 if (unlikely(ctx->pr)) { \ 1359 GEN_PRIV; \ 1360 } \ 1361 } while (0) 1362 #define CHK_HVRM \ 1363 do { \ 1364 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 1365 GEN_PRIV; \ 1366 } \ 1367 } while (0) 1368 #endif 1369 1370 #define CHK_NONE 1371 1372 /*****************************************************************************/ 1373 /* PowerPC instructions table */ 1374 1375 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 1376 { \ 1377 .opc1 = op1, \ 1378 .opc2 = op2, \ 1379 .opc3 = op3, \ 1380 .opc4 = 0xff, \ 1381 .handler = { \ 1382 .inval1 = invl, \ 1383 .type = _typ, \ 1384 .type2 = _typ2, \ 1385 .handler = &gen_##name, \ 1386 }, \ 1387 .oname = stringify(name), \ 1388 } 1389 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 1390 { \ 1391 .opc1 = op1, \ 1392 .opc2 = op2, \ 1393 .opc3 = op3, \ 1394 .opc4 = 0xff, \ 1395 .handler = { \ 1396 .inval1 = invl1, \ 1397 .inval2 = invl2, \ 1398 .type = _typ, \ 1399 .type2 = _typ2, \ 1400 .handler = &gen_##name, \ 1401 }, \ 1402 .oname = stringify(name), \ 1403 } 1404 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 1405 { \ 1406 .opc1 = op1, \ 1407 .opc2 = op2, \ 1408 .opc3 = op3, \ 1409 .opc4 = 0xff, \ 1410 .handler = { \ 1411 .inval1 = invl, \ 1412 .type = _typ, \ 1413 .type2 = _typ2, \ 1414 .handler = &gen_##name, \ 1415 }, \ 1416 .oname = onam, \ 1417 } 1418 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 1419 { \ 1420 .opc1 = op1, \ 1421 .opc2 = op2, \ 1422 .opc3 = op3, \ 1423 .opc4 = op4, \ 1424 .handler = { \ 1425 .inval1 = invl, \ 1426 .type = _typ, \ 1427 .type2 = _typ2, \ 1428 .handler = &gen_##name, \ 1429 }, \ 1430 .oname = stringify(name), \ 1431 } 1432 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 1433 { \ 1434 .opc1 = op1, \ 1435 .opc2 = op2, \ 1436 .opc3 = op3, \ 1437 .opc4 = op4, \ 1438 .handler = { \ 1439 .inval1 = invl, \ 1440 .type = _typ, \ 1441 .type2 = _typ2, \ 1442 .handler = &gen_##name, \ 1443 }, \ 1444 .oname = onam, \ 1445 } 1446 1447 /* Invalid instruction */ 1448 static void gen_invalid(DisasContext *ctx) 1449 { 1450 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 1451 } 1452 1453 static opc_handler_t invalid_handler = { 1454 .inval1 = 0xFFFFFFFF, 1455 .inval2 = 0xFFFFFFFF, 1456 .type = PPC_NONE, 1457 .type2 = PPC_NONE, 1458 .handler = gen_invalid, 1459 }; 1460 1461 /*** Integer comparison ***/ 1462 1463 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 1464 { 1465 TCGv t0 = tcg_temp_new(); 1466 TCGv t1 = tcg_temp_new(); 1467 TCGv_i32 t = tcg_temp_new_i32(); 1468 1469 tcg_gen_movi_tl(t0, CRF_EQ); 1470 tcg_gen_movi_tl(t1, CRF_LT); 1471 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), 1472 t0, arg0, arg1, t1, t0); 1473 tcg_gen_movi_tl(t1, CRF_GT); 1474 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), 1475 t0, arg0, arg1, t1, t0); 1476 1477 tcg_gen_trunc_tl_i32(t, t0); 1478 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 1479 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 1480 1481 tcg_temp_free(t0); 1482 tcg_temp_free(t1); 1483 tcg_temp_free_i32(t); 1484 } 1485 1486 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 1487 { 1488 TCGv t0 = tcg_const_tl(arg1); 1489 gen_op_cmp(arg0, t0, s, crf); 1490 tcg_temp_free(t0); 1491 } 1492 1493 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 1494 { 1495 TCGv t0, t1; 1496 t0 = tcg_temp_new(); 1497 t1 = tcg_temp_new(); 1498 if (s) { 1499 tcg_gen_ext32s_tl(t0, arg0); 1500 tcg_gen_ext32s_tl(t1, arg1); 1501 } else { 1502 tcg_gen_ext32u_tl(t0, arg0); 1503 tcg_gen_ext32u_tl(t1, arg1); 1504 } 1505 gen_op_cmp(t0, t1, s, crf); 1506 tcg_temp_free(t1); 1507 tcg_temp_free(t0); 1508 } 1509 1510 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 1511 { 1512 TCGv t0 = tcg_const_tl(arg1); 1513 gen_op_cmp32(arg0, t0, s, crf); 1514 tcg_temp_free(t0); 1515 } 1516 1517 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 1518 { 1519 if (NARROW_MODE(ctx)) { 1520 gen_op_cmpi32(reg, 0, 1, 0); 1521 } else { 1522 gen_op_cmpi(reg, 0, 1, 0); 1523 } 1524 } 1525 1526 /* cmprb - range comparison: isupper, isaplha, islower*/ 1527 static void gen_cmprb(DisasContext *ctx) 1528 { 1529 TCGv_i32 src1 = tcg_temp_new_i32(); 1530 TCGv_i32 src2 = tcg_temp_new_i32(); 1531 TCGv_i32 src2lo = tcg_temp_new_i32(); 1532 TCGv_i32 src2hi = tcg_temp_new_i32(); 1533 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 1534 1535 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 1536 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 1537 1538 tcg_gen_andi_i32(src1, src1, 0xFF); 1539 tcg_gen_ext8u_i32(src2lo, src2); 1540 tcg_gen_shri_i32(src2, src2, 8); 1541 tcg_gen_ext8u_i32(src2hi, src2); 1542 1543 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1544 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1545 tcg_gen_and_i32(crf, src2lo, src2hi); 1546 1547 if (ctx->opcode & 0x00200000) { 1548 tcg_gen_shri_i32(src2, src2, 8); 1549 tcg_gen_ext8u_i32(src2lo, src2); 1550 tcg_gen_shri_i32(src2, src2, 8); 1551 tcg_gen_ext8u_i32(src2hi, src2); 1552 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1553 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1554 tcg_gen_and_i32(src2lo, src2lo, src2hi); 1555 tcg_gen_or_i32(crf, crf, src2lo); 1556 } 1557 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 1558 tcg_temp_free_i32(src1); 1559 tcg_temp_free_i32(src2); 1560 tcg_temp_free_i32(src2lo); 1561 tcg_temp_free_i32(src2hi); 1562 } 1563 1564 #if defined(TARGET_PPC64) 1565 /* cmpeqb */ 1566 static void gen_cmpeqb(DisasContext *ctx) 1567 { 1568 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1569 cpu_gpr[rB(ctx->opcode)]); 1570 } 1571 #endif 1572 1573 /* isel (PowerPC 2.03 specification) */ 1574 static void gen_isel(DisasContext *ctx) 1575 { 1576 uint32_t bi = rC(ctx->opcode); 1577 uint32_t mask = 0x08 >> (bi & 0x03); 1578 TCGv t0 = tcg_temp_new(); 1579 TCGv zr; 1580 1581 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 1582 tcg_gen_andi_tl(t0, t0, mask); 1583 1584 zr = tcg_const_tl(0); 1585 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 1586 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 1587 cpu_gpr[rB(ctx->opcode)]); 1588 tcg_temp_free(zr); 1589 tcg_temp_free(t0); 1590 } 1591 1592 /* cmpb: PowerPC 2.05 specification */ 1593 static void gen_cmpb(DisasContext *ctx) 1594 { 1595 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 1596 cpu_gpr[rB(ctx->opcode)]); 1597 } 1598 1599 /*** Integer arithmetic ***/ 1600 1601 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 1602 TCGv arg1, TCGv arg2, int sub) 1603 { 1604 TCGv t0 = tcg_temp_new(); 1605 1606 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 1607 tcg_gen_xor_tl(t0, arg1, arg2); 1608 if (sub) { 1609 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 1610 } else { 1611 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 1612 } 1613 tcg_temp_free(t0); 1614 if (NARROW_MODE(ctx)) { 1615 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 1616 if (is_isa300(ctx)) { 1617 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1618 } 1619 } else { 1620 if (is_isa300(ctx)) { 1621 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 1622 } 1623 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 1624 } 1625 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1626 } 1627 1628 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 1629 TCGv res, TCGv arg0, TCGv arg1, 1630 TCGv ca32, int sub) 1631 { 1632 TCGv t0; 1633 1634 if (!is_isa300(ctx)) { 1635 return; 1636 } 1637 1638 t0 = tcg_temp_new(); 1639 if (sub) { 1640 tcg_gen_eqv_tl(t0, arg0, arg1); 1641 } else { 1642 tcg_gen_xor_tl(t0, arg0, arg1); 1643 } 1644 tcg_gen_xor_tl(t0, t0, res); 1645 tcg_gen_extract_tl(ca32, t0, 32, 1); 1646 tcg_temp_free(t0); 1647 } 1648 1649 /* Common add function */ 1650 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 1651 TCGv arg2, TCGv ca, TCGv ca32, 1652 bool add_ca, bool compute_ca, 1653 bool compute_ov, bool compute_rc0) 1654 { 1655 TCGv t0 = ret; 1656 1657 if (compute_ca || compute_ov) { 1658 t0 = tcg_temp_new(); 1659 } 1660 1661 if (compute_ca) { 1662 if (NARROW_MODE(ctx)) { 1663 /* 1664 * Caution: a non-obvious corner case of the spec is that 1665 * we must produce the *entire* 64-bit addition, but 1666 * produce the carry into bit 32. 1667 */ 1668 TCGv t1 = tcg_temp_new(); 1669 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 1670 tcg_gen_add_tl(t0, arg1, arg2); 1671 if (add_ca) { 1672 tcg_gen_add_tl(t0, t0, ca); 1673 } 1674 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 1675 tcg_temp_free(t1); 1676 tcg_gen_extract_tl(ca, ca, 32, 1); 1677 if (is_isa300(ctx)) { 1678 tcg_gen_mov_tl(ca32, ca); 1679 } 1680 } else { 1681 TCGv zero = tcg_const_tl(0); 1682 if (add_ca) { 1683 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 1684 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 1685 } else { 1686 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 1687 } 1688 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 1689 tcg_temp_free(zero); 1690 } 1691 } else { 1692 tcg_gen_add_tl(t0, arg1, arg2); 1693 if (add_ca) { 1694 tcg_gen_add_tl(t0, t0, ca); 1695 } 1696 } 1697 1698 if (compute_ov) { 1699 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 1700 } 1701 if (unlikely(compute_rc0)) { 1702 gen_set_Rc0(ctx, t0); 1703 } 1704 1705 if (t0 != ret) { 1706 tcg_gen_mov_tl(ret, t0); 1707 tcg_temp_free(t0); 1708 } 1709 } 1710 /* Add functions with two operands */ 1711 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 1712 static void glue(gen_, name)(DisasContext *ctx) \ 1713 { \ 1714 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1715 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1716 ca, glue(ca, 32), \ 1717 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1718 } 1719 /* Add functions with one operand and one immediate */ 1720 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 1721 add_ca, compute_ca, compute_ov) \ 1722 static void glue(gen_, name)(DisasContext *ctx) \ 1723 { \ 1724 TCGv t0 = tcg_const_tl(const_val); \ 1725 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1726 cpu_gpr[rA(ctx->opcode)], t0, \ 1727 ca, glue(ca, 32), \ 1728 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1729 tcg_temp_free(t0); \ 1730 } 1731 1732 /* add add. addo addo. */ 1733 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 1734 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 1735 /* addc addc. addco addco. */ 1736 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 1737 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 1738 /* adde adde. addeo addeo. */ 1739 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 1740 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 1741 /* addme addme. addmeo addmeo. */ 1742 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 1743 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 1744 /* addex */ 1745 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 1746 /* addze addze. addzeo addzeo.*/ 1747 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 1748 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 1749 /* addic addic.*/ 1750 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 1751 { 1752 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1753 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1754 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 1755 tcg_temp_free(c); 1756 } 1757 1758 static void gen_addic(DisasContext *ctx) 1759 { 1760 gen_op_addic(ctx, 0); 1761 } 1762 1763 static void gen_addic_(DisasContext *ctx) 1764 { 1765 gen_op_addic(ctx, 1); 1766 } 1767 1768 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1769 TCGv arg2, int sign, int compute_ov) 1770 { 1771 TCGv_i32 t0 = tcg_temp_new_i32(); 1772 TCGv_i32 t1 = tcg_temp_new_i32(); 1773 TCGv_i32 t2 = tcg_temp_new_i32(); 1774 TCGv_i32 t3 = tcg_temp_new_i32(); 1775 1776 tcg_gen_trunc_tl_i32(t0, arg1); 1777 tcg_gen_trunc_tl_i32(t1, arg2); 1778 if (sign) { 1779 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1780 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1781 tcg_gen_and_i32(t2, t2, t3); 1782 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1783 tcg_gen_or_i32(t2, t2, t3); 1784 tcg_gen_movi_i32(t3, 0); 1785 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1786 tcg_gen_div_i32(t3, t0, t1); 1787 tcg_gen_extu_i32_tl(ret, t3); 1788 } else { 1789 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1790 tcg_gen_movi_i32(t3, 0); 1791 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1792 tcg_gen_divu_i32(t3, t0, t1); 1793 tcg_gen_extu_i32_tl(ret, t3); 1794 } 1795 if (compute_ov) { 1796 tcg_gen_extu_i32_tl(cpu_ov, t2); 1797 if (is_isa300(ctx)) { 1798 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1799 } 1800 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1801 } 1802 tcg_temp_free_i32(t0); 1803 tcg_temp_free_i32(t1); 1804 tcg_temp_free_i32(t2); 1805 tcg_temp_free_i32(t3); 1806 1807 if (unlikely(Rc(ctx->opcode) != 0)) { 1808 gen_set_Rc0(ctx, ret); 1809 } 1810 } 1811 /* Div functions */ 1812 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1813 static void glue(gen_, name)(DisasContext *ctx) \ 1814 { \ 1815 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1816 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1817 sign, compute_ov); \ 1818 } 1819 /* divwu divwu. divwuo divwuo. */ 1820 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1821 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1822 /* divw divw. divwo divwo. */ 1823 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1824 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1825 1826 /* div[wd]eu[o][.] */ 1827 #define GEN_DIVE(name, hlpr, compute_ov) \ 1828 static void gen_##name(DisasContext *ctx) \ 1829 { \ 1830 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1831 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1832 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1833 tcg_temp_free_i32(t0); \ 1834 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1835 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1836 } \ 1837 } 1838 1839 GEN_DIVE(divweu, divweu, 0); 1840 GEN_DIVE(divweuo, divweu, 1); 1841 GEN_DIVE(divwe, divwe, 0); 1842 GEN_DIVE(divweo, divwe, 1); 1843 1844 #if defined(TARGET_PPC64) 1845 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1846 TCGv arg2, int sign, int compute_ov) 1847 { 1848 TCGv_i64 t0 = tcg_temp_new_i64(); 1849 TCGv_i64 t1 = tcg_temp_new_i64(); 1850 TCGv_i64 t2 = tcg_temp_new_i64(); 1851 TCGv_i64 t3 = tcg_temp_new_i64(); 1852 1853 tcg_gen_mov_i64(t0, arg1); 1854 tcg_gen_mov_i64(t1, arg2); 1855 if (sign) { 1856 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1857 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1858 tcg_gen_and_i64(t2, t2, t3); 1859 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1860 tcg_gen_or_i64(t2, t2, t3); 1861 tcg_gen_movi_i64(t3, 0); 1862 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1863 tcg_gen_div_i64(ret, t0, t1); 1864 } else { 1865 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1866 tcg_gen_movi_i64(t3, 0); 1867 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1868 tcg_gen_divu_i64(ret, t0, t1); 1869 } 1870 if (compute_ov) { 1871 tcg_gen_mov_tl(cpu_ov, t2); 1872 if (is_isa300(ctx)) { 1873 tcg_gen_mov_tl(cpu_ov32, t2); 1874 } 1875 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1876 } 1877 tcg_temp_free_i64(t0); 1878 tcg_temp_free_i64(t1); 1879 tcg_temp_free_i64(t2); 1880 tcg_temp_free_i64(t3); 1881 1882 if (unlikely(Rc(ctx->opcode) != 0)) { 1883 gen_set_Rc0(ctx, ret); 1884 } 1885 } 1886 1887 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1888 static void glue(gen_, name)(DisasContext *ctx) \ 1889 { \ 1890 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1891 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1892 sign, compute_ov); \ 1893 } 1894 /* divdu divdu. divduo divduo. */ 1895 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1896 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1897 /* divd divd. divdo divdo. */ 1898 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1899 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1900 1901 GEN_DIVE(divdeu, divdeu, 0); 1902 GEN_DIVE(divdeuo, divdeu, 1); 1903 GEN_DIVE(divde, divde, 0); 1904 GEN_DIVE(divdeo, divde, 1); 1905 #endif 1906 1907 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1908 TCGv arg2, int sign) 1909 { 1910 TCGv_i32 t0 = tcg_temp_new_i32(); 1911 TCGv_i32 t1 = tcg_temp_new_i32(); 1912 1913 tcg_gen_trunc_tl_i32(t0, arg1); 1914 tcg_gen_trunc_tl_i32(t1, arg2); 1915 if (sign) { 1916 TCGv_i32 t2 = tcg_temp_new_i32(); 1917 TCGv_i32 t3 = tcg_temp_new_i32(); 1918 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1919 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1920 tcg_gen_and_i32(t2, t2, t3); 1921 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1922 tcg_gen_or_i32(t2, t2, t3); 1923 tcg_gen_movi_i32(t3, 0); 1924 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1925 tcg_gen_rem_i32(t3, t0, t1); 1926 tcg_gen_ext_i32_tl(ret, t3); 1927 tcg_temp_free_i32(t2); 1928 tcg_temp_free_i32(t3); 1929 } else { 1930 TCGv_i32 t2 = tcg_const_i32(1); 1931 TCGv_i32 t3 = tcg_const_i32(0); 1932 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1933 tcg_gen_remu_i32(t3, t0, t1); 1934 tcg_gen_extu_i32_tl(ret, t3); 1935 tcg_temp_free_i32(t2); 1936 tcg_temp_free_i32(t3); 1937 } 1938 tcg_temp_free_i32(t0); 1939 tcg_temp_free_i32(t1); 1940 } 1941 1942 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1943 static void glue(gen_, name)(DisasContext *ctx) \ 1944 { \ 1945 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1946 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1947 sign); \ 1948 } 1949 1950 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1951 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1952 1953 #if defined(TARGET_PPC64) 1954 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1955 TCGv arg2, int sign) 1956 { 1957 TCGv_i64 t0 = tcg_temp_new_i64(); 1958 TCGv_i64 t1 = tcg_temp_new_i64(); 1959 1960 tcg_gen_mov_i64(t0, arg1); 1961 tcg_gen_mov_i64(t1, arg2); 1962 if (sign) { 1963 TCGv_i64 t2 = tcg_temp_new_i64(); 1964 TCGv_i64 t3 = tcg_temp_new_i64(); 1965 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1966 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1967 tcg_gen_and_i64(t2, t2, t3); 1968 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1969 tcg_gen_or_i64(t2, t2, t3); 1970 tcg_gen_movi_i64(t3, 0); 1971 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1972 tcg_gen_rem_i64(ret, t0, t1); 1973 tcg_temp_free_i64(t2); 1974 tcg_temp_free_i64(t3); 1975 } else { 1976 TCGv_i64 t2 = tcg_const_i64(1); 1977 TCGv_i64 t3 = tcg_const_i64(0); 1978 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1979 tcg_gen_remu_i64(ret, t0, t1); 1980 tcg_temp_free_i64(t2); 1981 tcg_temp_free_i64(t3); 1982 } 1983 tcg_temp_free_i64(t0); 1984 tcg_temp_free_i64(t1); 1985 } 1986 1987 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1988 static void glue(gen_, name)(DisasContext *ctx) \ 1989 { \ 1990 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1991 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1992 sign); \ 1993 } 1994 1995 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1996 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1997 #endif 1998 1999 /* mulhw mulhw. */ 2000 static void gen_mulhw(DisasContext *ctx) 2001 { 2002 TCGv_i32 t0 = tcg_temp_new_i32(); 2003 TCGv_i32 t1 = tcg_temp_new_i32(); 2004 2005 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2006 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2007 tcg_gen_muls2_i32(t0, t1, t0, t1); 2008 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 2009 tcg_temp_free_i32(t0); 2010 tcg_temp_free_i32(t1); 2011 if (unlikely(Rc(ctx->opcode) != 0)) { 2012 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2013 } 2014 } 2015 2016 /* mulhwu mulhwu. */ 2017 static void gen_mulhwu(DisasContext *ctx) 2018 { 2019 TCGv_i32 t0 = tcg_temp_new_i32(); 2020 TCGv_i32 t1 = tcg_temp_new_i32(); 2021 2022 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2023 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2024 tcg_gen_mulu2_i32(t0, t1, t0, t1); 2025 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 2026 tcg_temp_free_i32(t0); 2027 tcg_temp_free_i32(t1); 2028 if (unlikely(Rc(ctx->opcode) != 0)) { 2029 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2030 } 2031 } 2032 2033 /* mullw mullw. */ 2034 static void gen_mullw(DisasContext *ctx) 2035 { 2036 #if defined(TARGET_PPC64) 2037 TCGv_i64 t0, t1; 2038 t0 = tcg_temp_new_i64(); 2039 t1 = tcg_temp_new_i64(); 2040 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 2041 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 2042 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2043 tcg_temp_free(t0); 2044 tcg_temp_free(t1); 2045 #else 2046 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2047 cpu_gpr[rB(ctx->opcode)]); 2048 #endif 2049 if (unlikely(Rc(ctx->opcode) != 0)) { 2050 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2051 } 2052 } 2053 2054 /* mullwo mullwo. */ 2055 static void gen_mullwo(DisasContext *ctx) 2056 { 2057 TCGv_i32 t0 = tcg_temp_new_i32(); 2058 TCGv_i32 t1 = tcg_temp_new_i32(); 2059 2060 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2061 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2062 tcg_gen_muls2_i32(t0, t1, t0, t1); 2063 #if defined(TARGET_PPC64) 2064 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2065 #else 2066 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 2067 #endif 2068 2069 tcg_gen_sari_i32(t0, t0, 31); 2070 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 2071 tcg_gen_extu_i32_tl(cpu_ov, t0); 2072 if (is_isa300(ctx)) { 2073 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2074 } 2075 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2076 2077 tcg_temp_free_i32(t0); 2078 tcg_temp_free_i32(t1); 2079 if (unlikely(Rc(ctx->opcode) != 0)) { 2080 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2081 } 2082 } 2083 2084 /* mulli */ 2085 static void gen_mulli(DisasContext *ctx) 2086 { 2087 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2088 SIMM(ctx->opcode)); 2089 } 2090 2091 #if defined(TARGET_PPC64) 2092 /* mulhd mulhd. */ 2093 static void gen_mulhd(DisasContext *ctx) 2094 { 2095 TCGv lo = tcg_temp_new(); 2096 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2097 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2098 tcg_temp_free(lo); 2099 if (unlikely(Rc(ctx->opcode) != 0)) { 2100 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2101 } 2102 } 2103 2104 /* mulhdu mulhdu. */ 2105 static void gen_mulhdu(DisasContext *ctx) 2106 { 2107 TCGv lo = tcg_temp_new(); 2108 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2109 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2110 tcg_temp_free(lo); 2111 if (unlikely(Rc(ctx->opcode) != 0)) { 2112 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2113 } 2114 } 2115 2116 /* mulld mulld. */ 2117 static void gen_mulld(DisasContext *ctx) 2118 { 2119 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2120 cpu_gpr[rB(ctx->opcode)]); 2121 if (unlikely(Rc(ctx->opcode) != 0)) { 2122 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2123 } 2124 } 2125 2126 /* mulldo mulldo. */ 2127 static void gen_mulldo(DisasContext *ctx) 2128 { 2129 TCGv_i64 t0 = tcg_temp_new_i64(); 2130 TCGv_i64 t1 = tcg_temp_new_i64(); 2131 2132 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 2133 cpu_gpr[rB(ctx->opcode)]); 2134 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 2135 2136 tcg_gen_sari_i64(t0, t0, 63); 2137 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 2138 if (is_isa300(ctx)) { 2139 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2140 } 2141 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2142 2143 tcg_temp_free_i64(t0); 2144 tcg_temp_free_i64(t1); 2145 2146 if (unlikely(Rc(ctx->opcode) != 0)) { 2147 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2148 } 2149 } 2150 #endif 2151 2152 /* Common subf function */ 2153 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 2154 TCGv arg2, bool add_ca, bool compute_ca, 2155 bool compute_ov, bool compute_rc0) 2156 { 2157 TCGv t0 = ret; 2158 2159 if (compute_ca || compute_ov) { 2160 t0 = tcg_temp_new(); 2161 } 2162 2163 if (compute_ca) { 2164 /* dest = ~arg1 + arg2 [+ ca]. */ 2165 if (NARROW_MODE(ctx)) { 2166 /* 2167 * Caution: a non-obvious corner case of the spec is that 2168 * we must produce the *entire* 64-bit addition, but 2169 * produce the carry into bit 32. 2170 */ 2171 TCGv inv1 = tcg_temp_new(); 2172 TCGv t1 = tcg_temp_new(); 2173 tcg_gen_not_tl(inv1, arg1); 2174 if (add_ca) { 2175 tcg_gen_add_tl(t0, arg2, cpu_ca); 2176 } else { 2177 tcg_gen_addi_tl(t0, arg2, 1); 2178 } 2179 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 2180 tcg_gen_add_tl(t0, t0, inv1); 2181 tcg_temp_free(inv1); 2182 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 2183 tcg_temp_free(t1); 2184 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 2185 if (is_isa300(ctx)) { 2186 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2187 } 2188 } else if (add_ca) { 2189 TCGv zero, inv1 = tcg_temp_new(); 2190 tcg_gen_not_tl(inv1, arg1); 2191 zero = tcg_const_tl(0); 2192 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 2193 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 2194 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 2195 tcg_temp_free(zero); 2196 tcg_temp_free(inv1); 2197 } else { 2198 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 2199 tcg_gen_sub_tl(t0, arg2, arg1); 2200 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 2201 } 2202 } else if (add_ca) { 2203 /* 2204 * Since we're ignoring carry-out, we can simplify the 2205 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. 2206 */ 2207 tcg_gen_sub_tl(t0, arg2, arg1); 2208 tcg_gen_add_tl(t0, t0, cpu_ca); 2209 tcg_gen_subi_tl(t0, t0, 1); 2210 } else { 2211 tcg_gen_sub_tl(t0, arg2, arg1); 2212 } 2213 2214 if (compute_ov) { 2215 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 2216 } 2217 if (unlikely(compute_rc0)) { 2218 gen_set_Rc0(ctx, t0); 2219 } 2220 2221 if (t0 != ret) { 2222 tcg_gen_mov_tl(ret, t0); 2223 tcg_temp_free(t0); 2224 } 2225 } 2226 /* Sub functions with Two operands functions */ 2227 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 2228 static void glue(gen_, name)(DisasContext *ctx) \ 2229 { \ 2230 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2231 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 2232 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2233 } 2234 /* Sub functions with one operand and one immediate */ 2235 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 2236 add_ca, compute_ca, compute_ov) \ 2237 static void glue(gen_, name)(DisasContext *ctx) \ 2238 { \ 2239 TCGv t0 = tcg_const_tl(const_val); \ 2240 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2241 cpu_gpr[rA(ctx->opcode)], t0, \ 2242 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2243 tcg_temp_free(t0); \ 2244 } 2245 /* subf subf. subfo subfo. */ 2246 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 2247 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 2248 /* subfc subfc. subfco subfco. */ 2249 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 2250 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 2251 /* subfe subfe. subfeo subfo. */ 2252 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 2253 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 2254 /* subfme subfme. subfmeo subfmeo. */ 2255 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 2256 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 2257 /* subfze subfze. subfzeo subfzeo.*/ 2258 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 2259 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 2260 2261 /* subfic */ 2262 static void gen_subfic(DisasContext *ctx) 2263 { 2264 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 2265 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2266 c, 0, 1, 0, 0); 2267 tcg_temp_free(c); 2268 } 2269 2270 /* neg neg. nego nego. */ 2271 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 2272 { 2273 TCGv zero = tcg_const_tl(0); 2274 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2275 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 2276 tcg_temp_free(zero); 2277 } 2278 2279 static void gen_neg(DisasContext *ctx) 2280 { 2281 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2282 if (unlikely(Rc(ctx->opcode))) { 2283 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2284 } 2285 } 2286 2287 static void gen_nego(DisasContext *ctx) 2288 { 2289 gen_op_arith_neg(ctx, 1); 2290 } 2291 2292 /*** Integer logical ***/ 2293 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 2294 static void glue(gen_, name)(DisasContext *ctx) \ 2295 { \ 2296 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 2297 cpu_gpr[rB(ctx->opcode)]); \ 2298 if (unlikely(Rc(ctx->opcode) != 0)) \ 2299 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2300 } 2301 2302 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 2303 static void glue(gen_, name)(DisasContext *ctx) \ 2304 { \ 2305 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 2306 if (unlikely(Rc(ctx->opcode) != 0)) \ 2307 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2308 } 2309 2310 /* and & and. */ 2311 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 2312 /* andc & andc. */ 2313 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 2314 2315 /* andi. */ 2316 static void gen_andi_(DisasContext *ctx) 2317 { 2318 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2319 UIMM(ctx->opcode)); 2320 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2321 } 2322 2323 /* andis. */ 2324 static void gen_andis_(DisasContext *ctx) 2325 { 2326 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2327 UIMM(ctx->opcode) << 16); 2328 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2329 } 2330 2331 /* cntlzw */ 2332 static void gen_cntlzw(DisasContext *ctx) 2333 { 2334 TCGv_i32 t = tcg_temp_new_i32(); 2335 2336 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2337 tcg_gen_clzi_i32(t, t, 32); 2338 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2339 tcg_temp_free_i32(t); 2340 2341 if (unlikely(Rc(ctx->opcode) != 0)) { 2342 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2343 } 2344 } 2345 2346 /* cnttzw */ 2347 static void gen_cnttzw(DisasContext *ctx) 2348 { 2349 TCGv_i32 t = tcg_temp_new_i32(); 2350 2351 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2352 tcg_gen_ctzi_i32(t, t, 32); 2353 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2354 tcg_temp_free_i32(t); 2355 2356 if (unlikely(Rc(ctx->opcode) != 0)) { 2357 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2358 } 2359 } 2360 2361 /* eqv & eqv. */ 2362 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 2363 /* extsb & extsb. */ 2364 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 2365 /* extsh & extsh. */ 2366 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 2367 /* nand & nand. */ 2368 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 2369 /* nor & nor. */ 2370 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 2371 2372 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 2373 static void gen_pause(DisasContext *ctx) 2374 { 2375 TCGv_i32 t0 = tcg_const_i32(0); 2376 tcg_gen_st_i32(t0, cpu_env, 2377 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 2378 tcg_temp_free_i32(t0); 2379 2380 /* Stop translation, this gives other CPUs a chance to run */ 2381 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 2382 } 2383 #endif /* defined(TARGET_PPC64) */ 2384 2385 /* or & or. */ 2386 static void gen_or(DisasContext *ctx) 2387 { 2388 int rs, ra, rb; 2389 2390 rs = rS(ctx->opcode); 2391 ra = rA(ctx->opcode); 2392 rb = rB(ctx->opcode); 2393 /* Optimisation for mr. ri case */ 2394 if (rs != ra || rs != rb) { 2395 if (rs != rb) { 2396 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 2397 } else { 2398 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 2399 } 2400 if (unlikely(Rc(ctx->opcode) != 0)) { 2401 gen_set_Rc0(ctx, cpu_gpr[ra]); 2402 } 2403 } else if (unlikely(Rc(ctx->opcode) != 0)) { 2404 gen_set_Rc0(ctx, cpu_gpr[rs]); 2405 #if defined(TARGET_PPC64) 2406 } else if (rs != 0) { /* 0 is nop */ 2407 int prio = 0; 2408 2409 switch (rs) { 2410 case 1: 2411 /* Set process priority to low */ 2412 prio = 2; 2413 break; 2414 case 6: 2415 /* Set process priority to medium-low */ 2416 prio = 3; 2417 break; 2418 case 2: 2419 /* Set process priority to normal */ 2420 prio = 4; 2421 break; 2422 #if !defined(CONFIG_USER_ONLY) 2423 case 31: 2424 if (!ctx->pr) { 2425 /* Set process priority to very low */ 2426 prio = 1; 2427 } 2428 break; 2429 case 5: 2430 if (!ctx->pr) { 2431 /* Set process priority to medium-hight */ 2432 prio = 5; 2433 } 2434 break; 2435 case 3: 2436 if (!ctx->pr) { 2437 /* Set process priority to high */ 2438 prio = 6; 2439 } 2440 break; 2441 case 7: 2442 if (ctx->hv && !ctx->pr) { 2443 /* Set process priority to very high */ 2444 prio = 7; 2445 } 2446 break; 2447 #endif 2448 default: 2449 break; 2450 } 2451 if (prio) { 2452 TCGv t0 = tcg_temp_new(); 2453 gen_load_spr(t0, SPR_PPR); 2454 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 2455 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 2456 gen_store_spr(SPR_PPR, t0); 2457 tcg_temp_free(t0); 2458 } 2459 #if !defined(CONFIG_USER_ONLY) 2460 /* 2461 * Pause out of TCG otherwise spin loops with smt_low eat too 2462 * much CPU and the kernel hangs. This applies to all 2463 * encodings other than no-op, e.g., miso(rs=26), yield(27), 2464 * mdoio(29), mdoom(30), and all currently undefined. 2465 */ 2466 gen_pause(ctx); 2467 #endif 2468 #endif 2469 } 2470 } 2471 /* orc & orc. */ 2472 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 2473 2474 /* xor & xor. */ 2475 static void gen_xor(DisasContext *ctx) 2476 { 2477 /* Optimisation for "set to zero" case */ 2478 if (rS(ctx->opcode) != rB(ctx->opcode)) { 2479 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2480 cpu_gpr[rB(ctx->opcode)]); 2481 } else { 2482 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 2483 } 2484 if (unlikely(Rc(ctx->opcode) != 0)) { 2485 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2486 } 2487 } 2488 2489 /* ori */ 2490 static void gen_ori(DisasContext *ctx) 2491 { 2492 target_ulong uimm = UIMM(ctx->opcode); 2493 2494 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2495 return; 2496 } 2497 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2498 } 2499 2500 /* oris */ 2501 static void gen_oris(DisasContext *ctx) 2502 { 2503 target_ulong uimm = UIMM(ctx->opcode); 2504 2505 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2506 /* NOP */ 2507 return; 2508 } 2509 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2510 uimm << 16); 2511 } 2512 2513 /* xori */ 2514 static void gen_xori(DisasContext *ctx) 2515 { 2516 target_ulong uimm = UIMM(ctx->opcode); 2517 2518 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2519 /* NOP */ 2520 return; 2521 } 2522 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2523 } 2524 2525 /* xoris */ 2526 static void gen_xoris(DisasContext *ctx) 2527 { 2528 target_ulong uimm = UIMM(ctx->opcode); 2529 2530 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2531 /* NOP */ 2532 return; 2533 } 2534 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2535 uimm << 16); 2536 } 2537 2538 /* popcntb : PowerPC 2.03 specification */ 2539 static void gen_popcntb(DisasContext *ctx) 2540 { 2541 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2542 } 2543 2544 static void gen_popcntw(DisasContext *ctx) 2545 { 2546 #if defined(TARGET_PPC64) 2547 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2548 #else 2549 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2550 #endif 2551 } 2552 2553 #if defined(TARGET_PPC64) 2554 /* popcntd: PowerPC 2.06 specification */ 2555 static void gen_popcntd(DisasContext *ctx) 2556 { 2557 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2558 } 2559 #endif 2560 2561 /* prtyw: PowerPC 2.05 specification */ 2562 static void gen_prtyw(DisasContext *ctx) 2563 { 2564 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2565 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2566 TCGv t0 = tcg_temp_new(); 2567 tcg_gen_shri_tl(t0, rs, 16); 2568 tcg_gen_xor_tl(ra, rs, t0); 2569 tcg_gen_shri_tl(t0, ra, 8); 2570 tcg_gen_xor_tl(ra, ra, t0); 2571 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 2572 tcg_temp_free(t0); 2573 } 2574 2575 #if defined(TARGET_PPC64) 2576 /* prtyd: PowerPC 2.05 specification */ 2577 static void gen_prtyd(DisasContext *ctx) 2578 { 2579 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2580 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2581 TCGv t0 = tcg_temp_new(); 2582 tcg_gen_shri_tl(t0, rs, 32); 2583 tcg_gen_xor_tl(ra, rs, t0); 2584 tcg_gen_shri_tl(t0, ra, 16); 2585 tcg_gen_xor_tl(ra, ra, t0); 2586 tcg_gen_shri_tl(t0, ra, 8); 2587 tcg_gen_xor_tl(ra, ra, t0); 2588 tcg_gen_andi_tl(ra, ra, 1); 2589 tcg_temp_free(t0); 2590 } 2591 #endif 2592 2593 #if defined(TARGET_PPC64) 2594 /* bpermd */ 2595 static void gen_bpermd(DisasContext *ctx) 2596 { 2597 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 2598 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2599 } 2600 #endif 2601 2602 #if defined(TARGET_PPC64) 2603 /* extsw & extsw. */ 2604 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 2605 2606 /* cntlzd */ 2607 static void gen_cntlzd(DisasContext *ctx) 2608 { 2609 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2610 if (unlikely(Rc(ctx->opcode) != 0)) { 2611 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2612 } 2613 } 2614 2615 /* cnttzd */ 2616 static void gen_cnttzd(DisasContext *ctx) 2617 { 2618 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2619 if (unlikely(Rc(ctx->opcode) != 0)) { 2620 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2621 } 2622 } 2623 2624 /* darn */ 2625 static void gen_darn(DisasContext *ctx) 2626 { 2627 int l = L(ctx->opcode); 2628 2629 if (l > 2) { 2630 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 2631 } else { 2632 gen_icount_io_start(ctx); 2633 if (l == 0) { 2634 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 2635 } else { 2636 /* Return 64-bit random for both CRN and RRN */ 2637 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 2638 } 2639 } 2640 } 2641 #endif 2642 2643 /*** Integer rotate ***/ 2644 2645 /* rlwimi & rlwimi. */ 2646 static void gen_rlwimi(DisasContext *ctx) 2647 { 2648 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2649 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2650 uint32_t sh = SH(ctx->opcode); 2651 uint32_t mb = MB(ctx->opcode); 2652 uint32_t me = ME(ctx->opcode); 2653 2654 if (sh == (31 - me) && mb <= me) { 2655 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2656 } else { 2657 target_ulong mask; 2658 bool mask_in_32b = true; 2659 TCGv t1; 2660 2661 #if defined(TARGET_PPC64) 2662 mb += 32; 2663 me += 32; 2664 #endif 2665 mask = MASK(mb, me); 2666 2667 #if defined(TARGET_PPC64) 2668 if (mask > 0xffffffffu) { 2669 mask_in_32b = false; 2670 } 2671 #endif 2672 t1 = tcg_temp_new(); 2673 if (mask_in_32b) { 2674 TCGv_i32 t0 = tcg_temp_new_i32(); 2675 tcg_gen_trunc_tl_i32(t0, t_rs); 2676 tcg_gen_rotli_i32(t0, t0, sh); 2677 tcg_gen_extu_i32_tl(t1, t0); 2678 tcg_temp_free_i32(t0); 2679 } else { 2680 #if defined(TARGET_PPC64) 2681 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 2682 tcg_gen_rotli_i64(t1, t1, sh); 2683 #else 2684 g_assert_not_reached(); 2685 #endif 2686 } 2687 2688 tcg_gen_andi_tl(t1, t1, mask); 2689 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2690 tcg_gen_or_tl(t_ra, t_ra, t1); 2691 tcg_temp_free(t1); 2692 } 2693 if (unlikely(Rc(ctx->opcode) != 0)) { 2694 gen_set_Rc0(ctx, t_ra); 2695 } 2696 } 2697 2698 /* rlwinm & rlwinm. */ 2699 static void gen_rlwinm(DisasContext *ctx) 2700 { 2701 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2702 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2703 int sh = SH(ctx->opcode); 2704 int mb = MB(ctx->opcode); 2705 int me = ME(ctx->opcode); 2706 int len = me - mb + 1; 2707 int rsh = (32 - sh) & 31; 2708 2709 if (sh != 0 && len > 0 && me == (31 - sh)) { 2710 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2711 } else if (me == 31 && rsh + len <= 32) { 2712 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2713 } else { 2714 target_ulong mask; 2715 bool mask_in_32b = true; 2716 #if defined(TARGET_PPC64) 2717 mb += 32; 2718 me += 32; 2719 #endif 2720 mask = MASK(mb, me); 2721 #if defined(TARGET_PPC64) 2722 if (mask > 0xffffffffu) { 2723 mask_in_32b = false; 2724 } 2725 #endif 2726 if (mask_in_32b) { 2727 if (sh == 0) { 2728 tcg_gen_andi_tl(t_ra, t_rs, mask); 2729 } else { 2730 TCGv_i32 t0 = tcg_temp_new_i32(); 2731 tcg_gen_trunc_tl_i32(t0, t_rs); 2732 tcg_gen_rotli_i32(t0, t0, sh); 2733 tcg_gen_andi_i32(t0, t0, mask); 2734 tcg_gen_extu_i32_tl(t_ra, t0); 2735 tcg_temp_free_i32(t0); 2736 } 2737 } else { 2738 #if defined(TARGET_PPC64) 2739 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2740 tcg_gen_rotli_i64(t_ra, t_ra, sh); 2741 tcg_gen_andi_i64(t_ra, t_ra, mask); 2742 #else 2743 g_assert_not_reached(); 2744 #endif 2745 } 2746 } 2747 if (unlikely(Rc(ctx->opcode) != 0)) { 2748 gen_set_Rc0(ctx, t_ra); 2749 } 2750 } 2751 2752 /* rlwnm & rlwnm. */ 2753 static void gen_rlwnm(DisasContext *ctx) 2754 { 2755 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2756 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2757 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2758 uint32_t mb = MB(ctx->opcode); 2759 uint32_t me = ME(ctx->opcode); 2760 target_ulong mask; 2761 bool mask_in_32b = true; 2762 2763 #if defined(TARGET_PPC64) 2764 mb += 32; 2765 me += 32; 2766 #endif 2767 mask = MASK(mb, me); 2768 2769 #if defined(TARGET_PPC64) 2770 if (mask > 0xffffffffu) { 2771 mask_in_32b = false; 2772 } 2773 #endif 2774 if (mask_in_32b) { 2775 TCGv_i32 t0 = tcg_temp_new_i32(); 2776 TCGv_i32 t1 = tcg_temp_new_i32(); 2777 tcg_gen_trunc_tl_i32(t0, t_rb); 2778 tcg_gen_trunc_tl_i32(t1, t_rs); 2779 tcg_gen_andi_i32(t0, t0, 0x1f); 2780 tcg_gen_rotl_i32(t1, t1, t0); 2781 tcg_gen_extu_i32_tl(t_ra, t1); 2782 tcg_temp_free_i32(t0); 2783 tcg_temp_free_i32(t1); 2784 } else { 2785 #if defined(TARGET_PPC64) 2786 TCGv_i64 t0 = tcg_temp_new_i64(); 2787 tcg_gen_andi_i64(t0, t_rb, 0x1f); 2788 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2789 tcg_gen_rotl_i64(t_ra, t_ra, t0); 2790 tcg_temp_free_i64(t0); 2791 #else 2792 g_assert_not_reached(); 2793 #endif 2794 } 2795 2796 tcg_gen_andi_tl(t_ra, t_ra, mask); 2797 2798 if (unlikely(Rc(ctx->opcode) != 0)) { 2799 gen_set_Rc0(ctx, t_ra); 2800 } 2801 } 2802 2803 #if defined(TARGET_PPC64) 2804 #define GEN_PPC64_R2(name, opc1, opc2) \ 2805 static void glue(gen_, name##0)(DisasContext *ctx) \ 2806 { \ 2807 gen_##name(ctx, 0); \ 2808 } \ 2809 \ 2810 static void glue(gen_, name##1)(DisasContext *ctx) \ 2811 { \ 2812 gen_##name(ctx, 1); \ 2813 } 2814 #define GEN_PPC64_R4(name, opc1, opc2) \ 2815 static void glue(gen_, name##0)(DisasContext *ctx) \ 2816 { \ 2817 gen_##name(ctx, 0, 0); \ 2818 } \ 2819 \ 2820 static void glue(gen_, name##1)(DisasContext *ctx) \ 2821 { \ 2822 gen_##name(ctx, 0, 1); \ 2823 } \ 2824 \ 2825 static void glue(gen_, name##2)(DisasContext *ctx) \ 2826 { \ 2827 gen_##name(ctx, 1, 0); \ 2828 } \ 2829 \ 2830 static void glue(gen_, name##3)(DisasContext *ctx) \ 2831 { \ 2832 gen_##name(ctx, 1, 1); \ 2833 } 2834 2835 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2836 { 2837 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2838 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2839 int len = me - mb + 1; 2840 int rsh = (64 - sh) & 63; 2841 2842 if (sh != 0 && len > 0 && me == (63 - sh)) { 2843 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2844 } else if (me == 63 && rsh + len <= 64) { 2845 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2846 } else { 2847 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2848 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2849 } 2850 if (unlikely(Rc(ctx->opcode) != 0)) { 2851 gen_set_Rc0(ctx, t_ra); 2852 } 2853 } 2854 2855 /* rldicl - rldicl. */ 2856 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2857 { 2858 uint32_t sh, mb; 2859 2860 sh = SH(ctx->opcode) | (shn << 5); 2861 mb = MB(ctx->opcode) | (mbn << 5); 2862 gen_rldinm(ctx, mb, 63, sh); 2863 } 2864 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2865 2866 /* rldicr - rldicr. */ 2867 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2868 { 2869 uint32_t sh, me; 2870 2871 sh = SH(ctx->opcode) | (shn << 5); 2872 me = MB(ctx->opcode) | (men << 5); 2873 gen_rldinm(ctx, 0, me, sh); 2874 } 2875 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2876 2877 /* rldic - rldic. */ 2878 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2879 { 2880 uint32_t sh, mb; 2881 2882 sh = SH(ctx->opcode) | (shn << 5); 2883 mb = MB(ctx->opcode) | (mbn << 5); 2884 gen_rldinm(ctx, mb, 63 - sh, sh); 2885 } 2886 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2887 2888 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2889 { 2890 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2891 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2892 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2893 TCGv t0; 2894 2895 t0 = tcg_temp_new(); 2896 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2897 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2898 tcg_temp_free(t0); 2899 2900 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2901 if (unlikely(Rc(ctx->opcode) != 0)) { 2902 gen_set_Rc0(ctx, t_ra); 2903 } 2904 } 2905 2906 /* rldcl - rldcl. */ 2907 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2908 { 2909 uint32_t mb; 2910 2911 mb = MB(ctx->opcode) | (mbn << 5); 2912 gen_rldnm(ctx, mb, 63); 2913 } 2914 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2915 2916 /* rldcr - rldcr. */ 2917 static inline void gen_rldcr(DisasContext *ctx, int men) 2918 { 2919 uint32_t me; 2920 2921 me = MB(ctx->opcode) | (men << 5); 2922 gen_rldnm(ctx, 0, me); 2923 } 2924 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2925 2926 /* rldimi - rldimi. */ 2927 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2928 { 2929 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2930 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2931 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2932 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2933 uint32_t me = 63 - sh; 2934 2935 if (mb <= me) { 2936 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2937 } else { 2938 target_ulong mask = MASK(mb, me); 2939 TCGv t1 = tcg_temp_new(); 2940 2941 tcg_gen_rotli_tl(t1, t_rs, sh); 2942 tcg_gen_andi_tl(t1, t1, mask); 2943 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2944 tcg_gen_or_tl(t_ra, t_ra, t1); 2945 tcg_temp_free(t1); 2946 } 2947 if (unlikely(Rc(ctx->opcode) != 0)) { 2948 gen_set_Rc0(ctx, t_ra); 2949 } 2950 } 2951 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2952 #endif 2953 2954 /*** Integer shift ***/ 2955 2956 /* slw & slw. */ 2957 static void gen_slw(DisasContext *ctx) 2958 { 2959 TCGv t0, t1; 2960 2961 t0 = tcg_temp_new(); 2962 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2963 #if defined(TARGET_PPC64) 2964 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2965 tcg_gen_sari_tl(t0, t0, 0x3f); 2966 #else 2967 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2968 tcg_gen_sari_tl(t0, t0, 0x1f); 2969 #endif 2970 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2971 t1 = tcg_temp_new(); 2972 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2973 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2974 tcg_temp_free(t1); 2975 tcg_temp_free(t0); 2976 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2977 if (unlikely(Rc(ctx->opcode) != 0)) { 2978 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2979 } 2980 } 2981 2982 /* sraw & sraw. */ 2983 static void gen_sraw(DisasContext *ctx) 2984 { 2985 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2986 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2987 if (unlikely(Rc(ctx->opcode) != 0)) { 2988 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2989 } 2990 } 2991 2992 /* srawi & srawi. */ 2993 static void gen_srawi(DisasContext *ctx) 2994 { 2995 int sh = SH(ctx->opcode); 2996 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2997 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2998 if (sh == 0) { 2999 tcg_gen_ext32s_tl(dst, src); 3000 tcg_gen_movi_tl(cpu_ca, 0); 3001 if (is_isa300(ctx)) { 3002 tcg_gen_movi_tl(cpu_ca32, 0); 3003 } 3004 } else { 3005 TCGv t0; 3006 tcg_gen_ext32s_tl(dst, src); 3007 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 3008 t0 = tcg_temp_new(); 3009 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 3010 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 3011 tcg_temp_free(t0); 3012 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 3013 if (is_isa300(ctx)) { 3014 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 3015 } 3016 tcg_gen_sari_tl(dst, dst, sh); 3017 } 3018 if (unlikely(Rc(ctx->opcode) != 0)) { 3019 gen_set_Rc0(ctx, dst); 3020 } 3021 } 3022 3023 /* srw & srw. */ 3024 static void gen_srw(DisasContext *ctx) 3025 { 3026 TCGv t0, t1; 3027 3028 t0 = tcg_temp_new(); 3029 /* AND rS with a mask that is 0 when rB >= 0x20 */ 3030 #if defined(TARGET_PPC64) 3031 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 3032 tcg_gen_sari_tl(t0, t0, 0x3f); 3033 #else 3034 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 3035 tcg_gen_sari_tl(t0, t0, 0x1f); 3036 #endif 3037 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3038 tcg_gen_ext32u_tl(t0, t0); 3039 t1 = tcg_temp_new(); 3040 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 3041 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3042 tcg_temp_free(t1); 3043 tcg_temp_free(t0); 3044 if (unlikely(Rc(ctx->opcode) != 0)) { 3045 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3046 } 3047 } 3048 3049 #if defined(TARGET_PPC64) 3050 /* sld & sld. */ 3051 static void gen_sld(DisasContext *ctx) 3052 { 3053 TCGv t0, t1; 3054 3055 t0 = tcg_temp_new(); 3056 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3057 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3058 tcg_gen_sari_tl(t0, t0, 0x3f); 3059 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3060 t1 = tcg_temp_new(); 3061 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3062 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3063 tcg_temp_free(t1); 3064 tcg_temp_free(t0); 3065 if (unlikely(Rc(ctx->opcode) != 0)) { 3066 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3067 } 3068 } 3069 3070 /* srad & srad. */ 3071 static void gen_srad(DisasContext *ctx) 3072 { 3073 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 3074 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3075 if (unlikely(Rc(ctx->opcode) != 0)) { 3076 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3077 } 3078 } 3079 /* sradi & sradi. */ 3080 static inline void gen_sradi(DisasContext *ctx, int n) 3081 { 3082 int sh = SH(ctx->opcode) + (n << 5); 3083 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3084 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3085 if (sh == 0) { 3086 tcg_gen_mov_tl(dst, src); 3087 tcg_gen_movi_tl(cpu_ca, 0); 3088 if (is_isa300(ctx)) { 3089 tcg_gen_movi_tl(cpu_ca32, 0); 3090 } 3091 } else { 3092 TCGv t0; 3093 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 3094 t0 = tcg_temp_new(); 3095 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 3096 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 3097 tcg_temp_free(t0); 3098 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 3099 if (is_isa300(ctx)) { 3100 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 3101 } 3102 tcg_gen_sari_tl(dst, src, sh); 3103 } 3104 if (unlikely(Rc(ctx->opcode) != 0)) { 3105 gen_set_Rc0(ctx, dst); 3106 } 3107 } 3108 3109 static void gen_sradi0(DisasContext *ctx) 3110 { 3111 gen_sradi(ctx, 0); 3112 } 3113 3114 static void gen_sradi1(DisasContext *ctx) 3115 { 3116 gen_sradi(ctx, 1); 3117 } 3118 3119 /* extswsli & extswsli. */ 3120 static inline void gen_extswsli(DisasContext *ctx, int n) 3121 { 3122 int sh = SH(ctx->opcode) + (n << 5); 3123 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3124 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3125 3126 tcg_gen_ext32s_tl(dst, src); 3127 tcg_gen_shli_tl(dst, dst, sh); 3128 if (unlikely(Rc(ctx->opcode) != 0)) { 3129 gen_set_Rc0(ctx, dst); 3130 } 3131 } 3132 3133 static void gen_extswsli0(DisasContext *ctx) 3134 { 3135 gen_extswsli(ctx, 0); 3136 } 3137 3138 static void gen_extswsli1(DisasContext *ctx) 3139 { 3140 gen_extswsli(ctx, 1); 3141 } 3142 3143 /* srd & srd. */ 3144 static void gen_srd(DisasContext *ctx) 3145 { 3146 TCGv t0, t1; 3147 3148 t0 = tcg_temp_new(); 3149 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3150 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3151 tcg_gen_sari_tl(t0, t0, 0x3f); 3152 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3153 t1 = tcg_temp_new(); 3154 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3155 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3156 tcg_temp_free(t1); 3157 tcg_temp_free(t0); 3158 if (unlikely(Rc(ctx->opcode) != 0)) { 3159 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3160 } 3161 } 3162 #endif 3163 3164 /*** Addressing modes ***/ 3165 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 3166 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 3167 target_long maskl) 3168 { 3169 target_long simm = SIMM(ctx->opcode); 3170 3171 simm &= ~maskl; 3172 if (rA(ctx->opcode) == 0) { 3173 if (NARROW_MODE(ctx)) { 3174 simm = (uint32_t)simm; 3175 } 3176 tcg_gen_movi_tl(EA, simm); 3177 } else if (likely(simm != 0)) { 3178 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 3179 if (NARROW_MODE(ctx)) { 3180 tcg_gen_ext32u_tl(EA, EA); 3181 } 3182 } else { 3183 if (NARROW_MODE(ctx)) { 3184 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3185 } else { 3186 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3187 } 3188 } 3189 } 3190 3191 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 3192 { 3193 if (rA(ctx->opcode) == 0) { 3194 if (NARROW_MODE(ctx)) { 3195 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3196 } else { 3197 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3198 } 3199 } else { 3200 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3201 if (NARROW_MODE(ctx)) { 3202 tcg_gen_ext32u_tl(EA, EA); 3203 } 3204 } 3205 } 3206 3207 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 3208 { 3209 if (rA(ctx->opcode) == 0) { 3210 tcg_gen_movi_tl(EA, 0); 3211 } else if (NARROW_MODE(ctx)) { 3212 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3213 } else { 3214 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3215 } 3216 } 3217 3218 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 3219 target_long val) 3220 { 3221 tcg_gen_addi_tl(ret, arg1, val); 3222 if (NARROW_MODE(ctx)) { 3223 tcg_gen_ext32u_tl(ret, ret); 3224 } 3225 } 3226 3227 static inline void gen_align_no_le(DisasContext *ctx) 3228 { 3229 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 3230 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 3231 } 3232 3233 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ) 3234 { 3235 TCGv ea = tcg_temp_new(); 3236 if (ra) { 3237 tcg_gen_add_tl(ea, cpu_gpr[ra], displ); 3238 } else { 3239 tcg_gen_mov_tl(ea, displ); 3240 } 3241 if (NARROW_MODE(ctx)) { 3242 tcg_gen_ext32u_tl(ea, ea); 3243 } 3244 return ea; 3245 } 3246 3247 /*** Integer load ***/ 3248 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 3249 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 3250 3251 #define GEN_QEMU_LOAD_TL(ldop, op) \ 3252 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 3253 TCGv val, \ 3254 TCGv addr) \ 3255 { \ 3256 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 3257 } 3258 3259 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 3260 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 3261 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 3262 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 3263 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 3264 3265 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 3266 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 3267 3268 #define GEN_QEMU_LOAD_64(ldop, op) \ 3269 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 3270 TCGv_i64 val, \ 3271 TCGv addr) \ 3272 { \ 3273 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 3274 } 3275 3276 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 3277 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 3278 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 3279 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 3280 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_UQ)) 3281 3282 #if defined(TARGET_PPC64) 3283 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ)) 3284 #endif 3285 3286 #define GEN_QEMU_STORE_TL(stop, op) \ 3287 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 3288 TCGv val, \ 3289 TCGv addr) \ 3290 { \ 3291 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 3292 } 3293 3294 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY) 3295 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 3296 #endif 3297 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 3298 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 3299 3300 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 3301 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 3302 3303 #define GEN_QEMU_STORE_64(stop, op) \ 3304 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 3305 TCGv_i64 val, \ 3306 TCGv addr) \ 3307 { \ 3308 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 3309 } 3310 3311 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 3312 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 3313 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 3314 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ)) 3315 3316 #if defined(TARGET_PPC64) 3317 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ)) 3318 #endif 3319 3320 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 3321 static void glue(gen_, name##x)(DisasContext *ctx) \ 3322 { \ 3323 TCGv EA; \ 3324 chk; \ 3325 gen_set_access_type(ctx, ACCESS_INT); \ 3326 EA = tcg_temp_new(); \ 3327 gen_addr_reg_index(ctx, EA); \ 3328 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 3329 tcg_temp_free(EA); \ 3330 } 3331 3332 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 3333 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3334 3335 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 3336 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3337 3338 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 3339 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3340 { \ 3341 TCGv EA; \ 3342 CHK_SV; \ 3343 gen_set_access_type(ctx, ACCESS_INT); \ 3344 EA = tcg_temp_new(); \ 3345 gen_addr_reg_index(ctx, EA); \ 3346 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 3347 tcg_temp_free(EA); \ 3348 } 3349 3350 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 3351 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 3352 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 3353 #if defined(TARGET_PPC64) 3354 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00) 3355 #endif 3356 3357 #if defined(TARGET_PPC64) 3358 /* CI load/store variants */ 3359 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 3360 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 3361 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 3362 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 3363 #endif 3364 3365 /*** Integer store ***/ 3366 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 3367 static void glue(gen_, name##x)(DisasContext *ctx) \ 3368 { \ 3369 TCGv EA; \ 3370 chk; \ 3371 gen_set_access_type(ctx, ACCESS_INT); \ 3372 EA = tcg_temp_new(); \ 3373 gen_addr_reg_index(ctx, EA); \ 3374 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 3375 tcg_temp_free(EA); \ 3376 } 3377 #define GEN_STX(name, stop, opc2, opc3, type) \ 3378 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3379 3380 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 3381 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3382 3383 #define GEN_STEPX(name, stop, opc2, opc3) \ 3384 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3385 { \ 3386 TCGv EA; \ 3387 CHK_SV; \ 3388 gen_set_access_type(ctx, ACCESS_INT); \ 3389 EA = tcg_temp_new(); \ 3390 gen_addr_reg_index(ctx, EA); \ 3391 tcg_gen_qemu_st_tl( \ 3392 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 3393 tcg_temp_free(EA); \ 3394 } 3395 3396 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 3397 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 3398 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 3399 #if defined(TARGET_PPC64) 3400 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04) 3401 #endif 3402 3403 #if defined(TARGET_PPC64) 3404 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 3405 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 3406 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 3407 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 3408 #endif 3409 /*** Integer load and store with byte reverse ***/ 3410 3411 /* lhbrx */ 3412 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 3413 3414 /* lwbrx */ 3415 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 3416 3417 #if defined(TARGET_PPC64) 3418 /* ldbrx */ 3419 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 3420 /* stdbrx */ 3421 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 3422 #endif /* TARGET_PPC64 */ 3423 3424 /* sthbrx */ 3425 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 3426 /* stwbrx */ 3427 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 3428 3429 /*** Integer load and store multiple ***/ 3430 3431 /* lmw */ 3432 static void gen_lmw(DisasContext *ctx) 3433 { 3434 TCGv t0; 3435 TCGv_i32 t1; 3436 3437 if (ctx->le_mode) { 3438 gen_align_no_le(ctx); 3439 return; 3440 } 3441 gen_set_access_type(ctx, ACCESS_INT); 3442 t0 = tcg_temp_new(); 3443 t1 = tcg_const_i32(rD(ctx->opcode)); 3444 gen_addr_imm_index(ctx, t0, 0); 3445 gen_helper_lmw(cpu_env, t0, t1); 3446 tcg_temp_free(t0); 3447 tcg_temp_free_i32(t1); 3448 } 3449 3450 /* stmw */ 3451 static void gen_stmw(DisasContext *ctx) 3452 { 3453 TCGv t0; 3454 TCGv_i32 t1; 3455 3456 if (ctx->le_mode) { 3457 gen_align_no_le(ctx); 3458 return; 3459 } 3460 gen_set_access_type(ctx, ACCESS_INT); 3461 t0 = tcg_temp_new(); 3462 t1 = tcg_const_i32(rS(ctx->opcode)); 3463 gen_addr_imm_index(ctx, t0, 0); 3464 gen_helper_stmw(cpu_env, t0, t1); 3465 tcg_temp_free(t0); 3466 tcg_temp_free_i32(t1); 3467 } 3468 3469 /*** Integer load and store strings ***/ 3470 3471 /* lswi */ 3472 /* 3473 * PowerPC32 specification says we must generate an exception if rA is 3474 * in the range of registers to be loaded. In an other hand, IBM says 3475 * this is valid, but rA won't be loaded. For now, I'll follow the 3476 * spec... 3477 */ 3478 static void gen_lswi(DisasContext *ctx) 3479 { 3480 TCGv t0; 3481 TCGv_i32 t1, t2; 3482 int nb = NB(ctx->opcode); 3483 int start = rD(ctx->opcode); 3484 int ra = rA(ctx->opcode); 3485 int nr; 3486 3487 if (ctx->le_mode) { 3488 gen_align_no_le(ctx); 3489 return; 3490 } 3491 if (nb == 0) { 3492 nb = 32; 3493 } 3494 nr = DIV_ROUND_UP(nb, 4); 3495 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 3496 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 3497 return; 3498 } 3499 gen_set_access_type(ctx, ACCESS_INT); 3500 t0 = tcg_temp_new(); 3501 gen_addr_register(ctx, t0); 3502 t1 = tcg_const_i32(nb); 3503 t2 = tcg_const_i32(start); 3504 gen_helper_lsw(cpu_env, t0, t1, t2); 3505 tcg_temp_free(t0); 3506 tcg_temp_free_i32(t1); 3507 tcg_temp_free_i32(t2); 3508 } 3509 3510 /* lswx */ 3511 static void gen_lswx(DisasContext *ctx) 3512 { 3513 TCGv t0; 3514 TCGv_i32 t1, t2, t3; 3515 3516 if (ctx->le_mode) { 3517 gen_align_no_le(ctx); 3518 return; 3519 } 3520 gen_set_access_type(ctx, ACCESS_INT); 3521 t0 = tcg_temp_new(); 3522 gen_addr_reg_index(ctx, t0); 3523 t1 = tcg_const_i32(rD(ctx->opcode)); 3524 t2 = tcg_const_i32(rA(ctx->opcode)); 3525 t3 = tcg_const_i32(rB(ctx->opcode)); 3526 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3527 tcg_temp_free(t0); 3528 tcg_temp_free_i32(t1); 3529 tcg_temp_free_i32(t2); 3530 tcg_temp_free_i32(t3); 3531 } 3532 3533 /* stswi */ 3534 static void gen_stswi(DisasContext *ctx) 3535 { 3536 TCGv t0; 3537 TCGv_i32 t1, t2; 3538 int nb = NB(ctx->opcode); 3539 3540 if (ctx->le_mode) { 3541 gen_align_no_le(ctx); 3542 return; 3543 } 3544 gen_set_access_type(ctx, ACCESS_INT); 3545 t0 = tcg_temp_new(); 3546 gen_addr_register(ctx, t0); 3547 if (nb == 0) { 3548 nb = 32; 3549 } 3550 t1 = tcg_const_i32(nb); 3551 t2 = tcg_const_i32(rS(ctx->opcode)); 3552 gen_helper_stsw(cpu_env, t0, t1, t2); 3553 tcg_temp_free(t0); 3554 tcg_temp_free_i32(t1); 3555 tcg_temp_free_i32(t2); 3556 } 3557 3558 /* stswx */ 3559 static void gen_stswx(DisasContext *ctx) 3560 { 3561 TCGv t0; 3562 TCGv_i32 t1, t2; 3563 3564 if (ctx->le_mode) { 3565 gen_align_no_le(ctx); 3566 return; 3567 } 3568 gen_set_access_type(ctx, ACCESS_INT); 3569 t0 = tcg_temp_new(); 3570 gen_addr_reg_index(ctx, t0); 3571 t1 = tcg_temp_new_i32(); 3572 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3573 tcg_gen_andi_i32(t1, t1, 0x7F); 3574 t2 = tcg_const_i32(rS(ctx->opcode)); 3575 gen_helper_stsw(cpu_env, t0, t1, t2); 3576 tcg_temp_free(t0); 3577 tcg_temp_free_i32(t1); 3578 tcg_temp_free_i32(t2); 3579 } 3580 3581 /*** Memory synchronisation ***/ 3582 /* eieio */ 3583 static void gen_eieio(DisasContext *ctx) 3584 { 3585 TCGBar bar = TCG_MO_LD_ST; 3586 3587 /* 3588 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3589 * tell the CPU it is a store-forwarding barrier. 3590 */ 3591 if (ctx->opcode & 0x2000000) { 3592 /* 3593 * ISA says that "Reserved fields in instructions are ignored 3594 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3595 * as this is not an instruction software should be using, 3596 * complain to the user. 3597 */ 3598 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3599 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3600 TARGET_FMT_lx "\n", ctx->cia); 3601 } else { 3602 bar = TCG_MO_ST_LD; 3603 } 3604 } 3605 3606 tcg_gen_mb(bar | TCG_BAR_SC); 3607 } 3608 3609 #if !defined(CONFIG_USER_ONLY) 3610 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3611 { 3612 TCGv_i32 t; 3613 TCGLabel *l; 3614 3615 if (!ctx->lazy_tlb_flush) { 3616 return; 3617 } 3618 l = gen_new_label(); 3619 t = tcg_temp_new_i32(); 3620 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3621 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3622 if (global) { 3623 gen_helper_check_tlb_flush_global(cpu_env); 3624 } else { 3625 gen_helper_check_tlb_flush_local(cpu_env); 3626 } 3627 gen_set_label(l); 3628 tcg_temp_free_i32(t); 3629 } 3630 #else 3631 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3632 #endif 3633 3634 /* isync */ 3635 static void gen_isync(DisasContext *ctx) 3636 { 3637 /* 3638 * We need to check for a pending TLB flush. This can only happen in 3639 * kernel mode however so check MSR_PR 3640 */ 3641 if (!ctx->pr) { 3642 gen_check_tlb_flush(ctx, false); 3643 } 3644 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3645 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 3646 } 3647 3648 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3649 3650 static void gen_load_locked(DisasContext *ctx, MemOp memop) 3651 { 3652 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3653 TCGv t0 = tcg_temp_new(); 3654 3655 gen_set_access_type(ctx, ACCESS_RES); 3656 gen_addr_reg_index(ctx, t0); 3657 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3658 tcg_gen_mov_tl(cpu_reserve, t0); 3659 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3660 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3661 tcg_temp_free(t0); 3662 } 3663 3664 #define LARX(name, memop) \ 3665 static void gen_##name(DisasContext *ctx) \ 3666 { \ 3667 gen_load_locked(ctx, memop); \ 3668 } 3669 3670 /* lwarx */ 3671 LARX(lbarx, DEF_MEMOP(MO_UB)) 3672 LARX(lharx, DEF_MEMOP(MO_UW)) 3673 LARX(lwarx, DEF_MEMOP(MO_UL)) 3674 3675 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop, 3676 TCGv EA, TCGCond cond, int addend) 3677 { 3678 TCGv t = tcg_temp_new(); 3679 TCGv t2 = tcg_temp_new(); 3680 TCGv u = tcg_temp_new(); 3681 3682 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3683 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3684 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3685 tcg_gen_addi_tl(u, t, addend); 3686 3687 /* E.g. for fetch and increment bounded... */ 3688 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3689 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3690 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3691 3692 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3693 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3694 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3695 3696 tcg_temp_free(t); 3697 tcg_temp_free(t2); 3698 tcg_temp_free(u); 3699 } 3700 3701 static void gen_ld_atomic(DisasContext *ctx, MemOp memop) 3702 { 3703 uint32_t gpr_FC = FC(ctx->opcode); 3704 TCGv EA = tcg_temp_new(); 3705 int rt = rD(ctx->opcode); 3706 bool need_serial; 3707 TCGv src, dst; 3708 3709 gen_addr_register(ctx, EA); 3710 dst = cpu_gpr[rt]; 3711 src = cpu_gpr[(rt + 1) & 31]; 3712 3713 need_serial = false; 3714 memop |= MO_ALIGN; 3715 switch (gpr_FC) { 3716 case 0: /* Fetch and add */ 3717 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3718 break; 3719 case 1: /* Fetch and xor */ 3720 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3721 break; 3722 case 2: /* Fetch and or */ 3723 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3724 break; 3725 case 3: /* Fetch and 'and' */ 3726 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3727 break; 3728 case 4: /* Fetch and max unsigned */ 3729 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3730 break; 3731 case 5: /* Fetch and max signed */ 3732 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3733 break; 3734 case 6: /* Fetch and min unsigned */ 3735 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3736 break; 3737 case 7: /* Fetch and min signed */ 3738 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3739 break; 3740 case 8: /* Swap */ 3741 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3742 break; 3743 3744 case 16: /* Compare and swap not equal */ 3745 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3746 need_serial = true; 3747 } else { 3748 TCGv t0 = tcg_temp_new(); 3749 TCGv t1 = tcg_temp_new(); 3750 3751 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3752 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3753 tcg_gen_mov_tl(t1, src); 3754 } else { 3755 tcg_gen_ext32u_tl(t1, src); 3756 } 3757 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3758 cpu_gpr[(rt + 2) & 31], t0); 3759 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3760 tcg_gen_mov_tl(dst, t0); 3761 3762 tcg_temp_free(t0); 3763 tcg_temp_free(t1); 3764 } 3765 break; 3766 3767 case 24: /* Fetch and increment bounded */ 3768 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3769 need_serial = true; 3770 } else { 3771 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3772 } 3773 break; 3774 case 25: /* Fetch and increment equal */ 3775 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3776 need_serial = true; 3777 } else { 3778 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3779 } 3780 break; 3781 case 28: /* Fetch and decrement bounded */ 3782 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3783 need_serial = true; 3784 } else { 3785 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3786 } 3787 break; 3788 3789 default: 3790 /* invoke data storage error handler */ 3791 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3792 } 3793 tcg_temp_free(EA); 3794 3795 if (need_serial) { 3796 /* Restart with exclusive lock. */ 3797 gen_helper_exit_atomic(cpu_env); 3798 ctx->base.is_jmp = DISAS_NORETURN; 3799 } 3800 } 3801 3802 static void gen_lwat(DisasContext *ctx) 3803 { 3804 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3805 } 3806 3807 #ifdef TARGET_PPC64 3808 static void gen_ldat(DisasContext *ctx) 3809 { 3810 gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ)); 3811 } 3812 #endif 3813 3814 static void gen_st_atomic(DisasContext *ctx, MemOp memop) 3815 { 3816 uint32_t gpr_FC = FC(ctx->opcode); 3817 TCGv EA = tcg_temp_new(); 3818 TCGv src, discard; 3819 3820 gen_addr_register(ctx, EA); 3821 src = cpu_gpr[rD(ctx->opcode)]; 3822 discard = tcg_temp_new(); 3823 3824 memop |= MO_ALIGN; 3825 switch (gpr_FC) { 3826 case 0: /* add and Store */ 3827 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3828 break; 3829 case 1: /* xor and Store */ 3830 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3831 break; 3832 case 2: /* Or and Store */ 3833 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3834 break; 3835 case 3: /* 'and' and Store */ 3836 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3837 break; 3838 case 4: /* Store max unsigned */ 3839 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3840 break; 3841 case 5: /* Store max signed */ 3842 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3843 break; 3844 case 6: /* Store min unsigned */ 3845 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3846 break; 3847 case 7: /* Store min signed */ 3848 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3849 break; 3850 case 24: /* Store twin */ 3851 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3852 /* Restart with exclusive lock. */ 3853 gen_helper_exit_atomic(cpu_env); 3854 ctx->base.is_jmp = DISAS_NORETURN; 3855 } else { 3856 TCGv t = tcg_temp_new(); 3857 TCGv t2 = tcg_temp_new(); 3858 TCGv s = tcg_temp_new(); 3859 TCGv s2 = tcg_temp_new(); 3860 TCGv ea_plus_s = tcg_temp_new(); 3861 3862 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3863 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3864 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3865 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3866 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3867 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3868 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3869 3870 tcg_temp_free(ea_plus_s); 3871 tcg_temp_free(s2); 3872 tcg_temp_free(s); 3873 tcg_temp_free(t2); 3874 tcg_temp_free(t); 3875 } 3876 break; 3877 default: 3878 /* invoke data storage error handler */ 3879 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3880 } 3881 tcg_temp_free(discard); 3882 tcg_temp_free(EA); 3883 } 3884 3885 static void gen_stwat(DisasContext *ctx) 3886 { 3887 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3888 } 3889 3890 #ifdef TARGET_PPC64 3891 static void gen_stdat(DisasContext *ctx) 3892 { 3893 gen_st_atomic(ctx, DEF_MEMOP(MO_UQ)); 3894 } 3895 #endif 3896 3897 static void gen_conditional_store(DisasContext *ctx, MemOp memop) 3898 { 3899 TCGLabel *l1 = gen_new_label(); 3900 TCGLabel *l2 = gen_new_label(); 3901 TCGv t0 = tcg_temp_new(); 3902 int reg = rS(ctx->opcode); 3903 3904 gen_set_access_type(ctx, ACCESS_RES); 3905 gen_addr_reg_index(ctx, t0); 3906 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3907 tcg_temp_free(t0); 3908 3909 t0 = tcg_temp_new(); 3910 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3911 cpu_gpr[reg], ctx->mem_idx, 3912 DEF_MEMOP(memop) | MO_ALIGN); 3913 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3914 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3915 tcg_gen_or_tl(t0, t0, cpu_so); 3916 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3917 tcg_temp_free(t0); 3918 tcg_gen_br(l2); 3919 3920 gen_set_label(l1); 3921 3922 /* 3923 * Address mismatch implies failure. But we still need to provide 3924 * the memory barrier semantics of the instruction. 3925 */ 3926 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3927 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3928 3929 gen_set_label(l2); 3930 tcg_gen_movi_tl(cpu_reserve, -1); 3931 } 3932 3933 #define STCX(name, memop) \ 3934 static void gen_##name(DisasContext *ctx) \ 3935 { \ 3936 gen_conditional_store(ctx, memop); \ 3937 } 3938 3939 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3940 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3941 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3942 3943 #if defined(TARGET_PPC64) 3944 /* ldarx */ 3945 LARX(ldarx, DEF_MEMOP(MO_UQ)) 3946 /* stdcx. */ 3947 STCX(stdcx_, DEF_MEMOP(MO_UQ)) 3948 3949 /* lqarx */ 3950 static void gen_lqarx(DisasContext *ctx) 3951 { 3952 int rd = rD(ctx->opcode); 3953 TCGv EA, hi, lo; 3954 3955 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3956 (rd == rB(ctx->opcode)))) { 3957 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3958 return; 3959 } 3960 3961 gen_set_access_type(ctx, ACCESS_RES); 3962 EA = tcg_temp_new(); 3963 gen_addr_reg_index(ctx, EA); 3964 3965 /* Note that the low part is always in RD+1, even in LE mode. */ 3966 lo = cpu_gpr[rd + 1]; 3967 hi = cpu_gpr[rd]; 3968 3969 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3970 if (HAVE_ATOMIC128) { 3971 TCGv_i32 oi = tcg_temp_new_i32(); 3972 if (ctx->le_mode) { 3973 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN, 3974 ctx->mem_idx)); 3975 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3976 } else { 3977 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN, 3978 ctx->mem_idx)); 3979 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3980 } 3981 tcg_temp_free_i32(oi); 3982 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3983 } else { 3984 /* Restart with exclusive lock. */ 3985 gen_helper_exit_atomic(cpu_env); 3986 ctx->base.is_jmp = DISAS_NORETURN; 3987 tcg_temp_free(EA); 3988 return; 3989 } 3990 } else if (ctx->le_mode) { 3991 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEUQ | MO_ALIGN_16); 3992 tcg_gen_mov_tl(cpu_reserve, EA); 3993 gen_addr_add(ctx, EA, EA, 8); 3994 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEUQ); 3995 } else { 3996 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEUQ | MO_ALIGN_16); 3997 tcg_gen_mov_tl(cpu_reserve, EA); 3998 gen_addr_add(ctx, EA, EA, 8); 3999 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEUQ); 4000 } 4001 tcg_temp_free(EA); 4002 4003 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 4004 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 4005 } 4006 4007 /* stqcx. */ 4008 static void gen_stqcx_(DisasContext *ctx) 4009 { 4010 int rs = rS(ctx->opcode); 4011 TCGv EA, hi, lo; 4012 4013 if (unlikely(rs & 1)) { 4014 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4015 return; 4016 } 4017 4018 gen_set_access_type(ctx, ACCESS_RES); 4019 EA = tcg_temp_new(); 4020 gen_addr_reg_index(ctx, EA); 4021 4022 /* Note that the low part is always in RS+1, even in LE mode. */ 4023 lo = cpu_gpr[rs + 1]; 4024 hi = cpu_gpr[rs]; 4025 4026 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4027 if (HAVE_CMPXCHG128) { 4028 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN); 4029 if (ctx->le_mode) { 4030 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 4031 EA, lo, hi, oi); 4032 } else { 4033 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 4034 EA, lo, hi, oi); 4035 } 4036 tcg_temp_free_i32(oi); 4037 } else { 4038 /* Restart with exclusive lock. */ 4039 gen_helper_exit_atomic(cpu_env); 4040 ctx->base.is_jmp = DISAS_NORETURN; 4041 } 4042 tcg_temp_free(EA); 4043 } else { 4044 TCGLabel *lab_fail = gen_new_label(); 4045 TCGLabel *lab_over = gen_new_label(); 4046 TCGv_i64 t0 = tcg_temp_new_i64(); 4047 TCGv_i64 t1 = tcg_temp_new_i64(); 4048 4049 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 4050 tcg_temp_free(EA); 4051 4052 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 4053 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4054 ? offsetof(CPUPPCState, reserve_val2) 4055 : offsetof(CPUPPCState, reserve_val))); 4056 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4057 4058 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4059 gen_qemu_ld64_i64(ctx, t0, t0); 4060 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4061 ? offsetof(CPUPPCState, reserve_val) 4062 : offsetof(CPUPPCState, reserve_val2))); 4063 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4064 4065 /* Success */ 4066 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 4067 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4068 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 4069 4070 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4071 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4072 tcg_gen_br(lab_over); 4073 4074 gen_set_label(lab_fail); 4075 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4076 4077 gen_set_label(lab_over); 4078 tcg_gen_movi_tl(cpu_reserve, -1); 4079 tcg_temp_free_i64(t0); 4080 tcg_temp_free_i64(t1); 4081 } 4082 } 4083 #endif /* defined(TARGET_PPC64) */ 4084 4085 /* sync */ 4086 static void gen_sync(DisasContext *ctx) 4087 { 4088 uint32_t l = (ctx->opcode >> 21) & 3; 4089 4090 /* 4091 * We may need to check for a pending TLB flush. 4092 * 4093 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 4094 * 4095 * Additionally, this can only happen in kernel mode however so 4096 * check MSR_PR as well. 4097 */ 4098 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 4099 gen_check_tlb_flush(ctx, true); 4100 } 4101 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 4102 } 4103 4104 /* wait */ 4105 static void gen_wait(DisasContext *ctx) 4106 { 4107 TCGv_i32 t0 = tcg_const_i32(1); 4108 tcg_gen_st_i32(t0, cpu_env, 4109 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 4110 tcg_temp_free_i32(t0); 4111 /* Stop translation, as the CPU is supposed to sleep from now */ 4112 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4113 } 4114 4115 #if defined(TARGET_PPC64) 4116 static void gen_doze(DisasContext *ctx) 4117 { 4118 #if defined(CONFIG_USER_ONLY) 4119 GEN_PRIV; 4120 #else 4121 TCGv_i32 t; 4122 4123 CHK_HV; 4124 t = tcg_const_i32(PPC_PM_DOZE); 4125 gen_helper_pminsn(cpu_env, t); 4126 tcg_temp_free_i32(t); 4127 /* Stop translation, as the CPU is supposed to sleep from now */ 4128 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4129 #endif /* defined(CONFIG_USER_ONLY) */ 4130 } 4131 4132 static void gen_nap(DisasContext *ctx) 4133 { 4134 #if defined(CONFIG_USER_ONLY) 4135 GEN_PRIV; 4136 #else 4137 TCGv_i32 t; 4138 4139 CHK_HV; 4140 t = tcg_const_i32(PPC_PM_NAP); 4141 gen_helper_pminsn(cpu_env, t); 4142 tcg_temp_free_i32(t); 4143 /* Stop translation, as the CPU is supposed to sleep from now */ 4144 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4145 #endif /* defined(CONFIG_USER_ONLY) */ 4146 } 4147 4148 static void gen_stop(DisasContext *ctx) 4149 { 4150 #if defined(CONFIG_USER_ONLY) 4151 GEN_PRIV; 4152 #else 4153 TCGv_i32 t; 4154 4155 CHK_HV; 4156 t = tcg_const_i32(PPC_PM_STOP); 4157 gen_helper_pminsn(cpu_env, t); 4158 tcg_temp_free_i32(t); 4159 /* Stop translation, as the CPU is supposed to sleep from now */ 4160 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4161 #endif /* defined(CONFIG_USER_ONLY) */ 4162 } 4163 4164 static void gen_sleep(DisasContext *ctx) 4165 { 4166 #if defined(CONFIG_USER_ONLY) 4167 GEN_PRIV; 4168 #else 4169 TCGv_i32 t; 4170 4171 CHK_HV; 4172 t = tcg_const_i32(PPC_PM_SLEEP); 4173 gen_helper_pminsn(cpu_env, t); 4174 tcg_temp_free_i32(t); 4175 /* Stop translation, as the CPU is supposed to sleep from now */ 4176 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4177 #endif /* defined(CONFIG_USER_ONLY) */ 4178 } 4179 4180 static void gen_rvwinkle(DisasContext *ctx) 4181 { 4182 #if defined(CONFIG_USER_ONLY) 4183 GEN_PRIV; 4184 #else 4185 TCGv_i32 t; 4186 4187 CHK_HV; 4188 t = tcg_const_i32(PPC_PM_RVWINKLE); 4189 gen_helper_pminsn(cpu_env, t); 4190 tcg_temp_free_i32(t); 4191 /* Stop translation, as the CPU is supposed to sleep from now */ 4192 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4193 #endif /* defined(CONFIG_USER_ONLY) */ 4194 } 4195 #endif /* #if defined(TARGET_PPC64) */ 4196 4197 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 4198 { 4199 #if defined(TARGET_PPC64) 4200 if (ctx->has_cfar) { 4201 tcg_gen_movi_tl(cpu_cfar, nip); 4202 } 4203 #endif 4204 } 4205 4206 #if defined(TARGET_PPC64) 4207 static void pmu_count_insns(DisasContext *ctx) 4208 { 4209 /* 4210 * Do not bother calling the helper if the PMU isn't counting 4211 * instructions. 4212 */ 4213 if (!ctx->pmu_insn_cnt) { 4214 return; 4215 } 4216 4217 #if !defined(CONFIG_USER_ONLY) 4218 /* 4219 * The PMU insns_inc() helper stops the internal PMU timer if a 4220 * counter overflows happens. In that case, if the guest is 4221 * running with icount and we do not handle it beforehand, 4222 * the helper can trigger a 'bad icount read'. 4223 */ 4224 gen_icount_io_start(ctx); 4225 4226 gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns)); 4227 #else 4228 /* 4229 * User mode can read (but not write) PMC5 and start/stop 4230 * the PMU via MMCR0_FC. In this case just increment 4231 * PMC5 with base.num_insns. 4232 */ 4233 TCGv t0 = tcg_temp_new(); 4234 4235 gen_load_spr(t0, SPR_POWER_PMC5); 4236 tcg_gen_addi_tl(t0, t0, ctx->base.num_insns); 4237 gen_store_spr(SPR_POWER_PMC5, t0); 4238 4239 tcg_temp_free(t0); 4240 #endif /* #if !defined(CONFIG_USER_ONLY) */ 4241 } 4242 #else 4243 static void pmu_count_insns(DisasContext *ctx) 4244 { 4245 return; 4246 } 4247 #endif /* #if defined(TARGET_PPC64) */ 4248 4249 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 4250 { 4251 return translator_use_goto_tb(&ctx->base, dest); 4252 } 4253 4254 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 4255 { 4256 if (unlikely(ctx->singlestep_enabled)) { 4257 gen_debug_exception(ctx); 4258 } else { 4259 /* 4260 * tcg_gen_lookup_and_goto_ptr will exit the TB if 4261 * CF_NO_GOTO_PTR is set. Count insns now. 4262 */ 4263 if (ctx->base.tb->flags & CF_NO_GOTO_PTR) { 4264 pmu_count_insns(ctx); 4265 } 4266 4267 tcg_gen_lookup_and_goto_ptr(); 4268 } 4269 } 4270 4271 /*** Branch ***/ 4272 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 4273 { 4274 if (NARROW_MODE(ctx)) { 4275 dest = (uint32_t) dest; 4276 } 4277 if (use_goto_tb(ctx, dest)) { 4278 pmu_count_insns(ctx); 4279 tcg_gen_goto_tb(n); 4280 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4281 tcg_gen_exit_tb(ctx->base.tb, n); 4282 } else { 4283 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4284 gen_lookup_and_goto_ptr(ctx); 4285 } 4286 } 4287 4288 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 4289 { 4290 if (NARROW_MODE(ctx)) { 4291 nip = (uint32_t)nip; 4292 } 4293 tcg_gen_movi_tl(cpu_lr, nip); 4294 } 4295 4296 /* b ba bl bla */ 4297 static void gen_b(DisasContext *ctx) 4298 { 4299 target_ulong li, target; 4300 4301 /* sign extend LI */ 4302 li = LI(ctx->opcode); 4303 li = (li ^ 0x02000000) - 0x02000000; 4304 if (likely(AA(ctx->opcode) == 0)) { 4305 target = ctx->cia + li; 4306 } else { 4307 target = li; 4308 } 4309 if (LK(ctx->opcode)) { 4310 gen_setlr(ctx, ctx->base.pc_next); 4311 } 4312 gen_update_cfar(ctx, ctx->cia); 4313 gen_goto_tb(ctx, 0, target); 4314 ctx->base.is_jmp = DISAS_NORETURN; 4315 } 4316 4317 #define BCOND_IM 0 4318 #define BCOND_LR 1 4319 #define BCOND_CTR 2 4320 #define BCOND_TAR 3 4321 4322 static void gen_bcond(DisasContext *ctx, int type) 4323 { 4324 uint32_t bo = BO(ctx->opcode); 4325 TCGLabel *l1; 4326 TCGv target; 4327 4328 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 4329 target = tcg_temp_local_new(); 4330 if (type == BCOND_CTR) { 4331 tcg_gen_mov_tl(target, cpu_ctr); 4332 } else if (type == BCOND_TAR) { 4333 gen_load_spr(target, SPR_TAR); 4334 } else { 4335 tcg_gen_mov_tl(target, cpu_lr); 4336 } 4337 } else { 4338 target = NULL; 4339 } 4340 if (LK(ctx->opcode)) { 4341 gen_setlr(ctx, ctx->base.pc_next); 4342 } 4343 l1 = gen_new_label(); 4344 if ((bo & 0x4) == 0) { 4345 /* Decrement and test CTR */ 4346 TCGv temp = tcg_temp_new(); 4347 4348 if (type == BCOND_CTR) { 4349 /* 4350 * All ISAs up to v3 describe this form of bcctr as invalid but 4351 * some processors, ie. 64-bit server processors compliant with 4352 * arch 2.x, do implement a "test and decrement" logic instead, 4353 * as described in their respective UMs. This logic involves CTR 4354 * to act as both the branch target and a counter, which makes 4355 * it basically useless and thus never used in real code. 4356 * 4357 * This form was hence chosen to trigger extra micro-architectural 4358 * side-effect on real HW needed for the Spectre v2 workaround. 4359 * It is up to guests that implement such workaround, ie. linux, to 4360 * use this form in a way it just triggers the side-effect without 4361 * doing anything else harmful. 4362 */ 4363 if (unlikely(!is_book3s_arch2x(ctx))) { 4364 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4365 tcg_temp_free(temp); 4366 tcg_temp_free(target); 4367 return; 4368 } 4369 4370 if (NARROW_MODE(ctx)) { 4371 tcg_gen_ext32u_tl(temp, cpu_ctr); 4372 } else { 4373 tcg_gen_mov_tl(temp, cpu_ctr); 4374 } 4375 if (bo & 0x2) { 4376 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4377 } else { 4378 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4379 } 4380 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4381 } else { 4382 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4383 if (NARROW_MODE(ctx)) { 4384 tcg_gen_ext32u_tl(temp, cpu_ctr); 4385 } else { 4386 tcg_gen_mov_tl(temp, cpu_ctr); 4387 } 4388 if (bo & 0x2) { 4389 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4390 } else { 4391 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4392 } 4393 } 4394 tcg_temp_free(temp); 4395 } 4396 if ((bo & 0x10) == 0) { 4397 /* Test CR */ 4398 uint32_t bi = BI(ctx->opcode); 4399 uint32_t mask = 0x08 >> (bi & 0x03); 4400 TCGv_i32 temp = tcg_temp_new_i32(); 4401 4402 if (bo & 0x8) { 4403 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4404 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 4405 } else { 4406 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4407 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 4408 } 4409 tcg_temp_free_i32(temp); 4410 } 4411 gen_update_cfar(ctx, ctx->cia); 4412 if (type == BCOND_IM) { 4413 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 4414 if (likely(AA(ctx->opcode) == 0)) { 4415 gen_goto_tb(ctx, 0, ctx->cia + li); 4416 } else { 4417 gen_goto_tb(ctx, 0, li); 4418 } 4419 } else { 4420 if (NARROW_MODE(ctx)) { 4421 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 4422 } else { 4423 tcg_gen_andi_tl(cpu_nip, target, ~3); 4424 } 4425 gen_lookup_and_goto_ptr(ctx); 4426 tcg_temp_free(target); 4427 } 4428 if ((bo & 0x14) != 0x14) { 4429 /* fallthrough case */ 4430 gen_set_label(l1); 4431 gen_goto_tb(ctx, 1, ctx->base.pc_next); 4432 } 4433 ctx->base.is_jmp = DISAS_NORETURN; 4434 } 4435 4436 static void gen_bc(DisasContext *ctx) 4437 { 4438 gen_bcond(ctx, BCOND_IM); 4439 } 4440 4441 static void gen_bcctr(DisasContext *ctx) 4442 { 4443 gen_bcond(ctx, BCOND_CTR); 4444 } 4445 4446 static void gen_bclr(DisasContext *ctx) 4447 { 4448 gen_bcond(ctx, BCOND_LR); 4449 } 4450 4451 static void gen_bctar(DisasContext *ctx) 4452 { 4453 gen_bcond(ctx, BCOND_TAR); 4454 } 4455 4456 /*** Condition register logical ***/ 4457 #define GEN_CRLOGIC(name, tcg_op, opc) \ 4458 static void glue(gen_, name)(DisasContext *ctx) \ 4459 { \ 4460 uint8_t bitmask; \ 4461 int sh; \ 4462 TCGv_i32 t0, t1; \ 4463 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 4464 t0 = tcg_temp_new_i32(); \ 4465 if (sh > 0) \ 4466 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 4467 else if (sh < 0) \ 4468 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 4469 else \ 4470 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 4471 t1 = tcg_temp_new_i32(); \ 4472 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 4473 if (sh > 0) \ 4474 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 4475 else if (sh < 0) \ 4476 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 4477 else \ 4478 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 4479 tcg_op(t0, t0, t1); \ 4480 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 4481 tcg_gen_andi_i32(t0, t0, bitmask); \ 4482 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 4483 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 4484 tcg_temp_free_i32(t0); \ 4485 tcg_temp_free_i32(t1); \ 4486 } 4487 4488 /* crand */ 4489 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 4490 /* crandc */ 4491 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 4492 /* creqv */ 4493 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 4494 /* crnand */ 4495 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 4496 /* crnor */ 4497 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 4498 /* cror */ 4499 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 4500 /* crorc */ 4501 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 4502 /* crxor */ 4503 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 4504 4505 /* mcrf */ 4506 static void gen_mcrf(DisasContext *ctx) 4507 { 4508 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 4509 } 4510 4511 /*** System linkage ***/ 4512 4513 /* rfi (supervisor only) */ 4514 static void gen_rfi(DisasContext *ctx) 4515 { 4516 #if defined(CONFIG_USER_ONLY) 4517 GEN_PRIV; 4518 #else 4519 /* 4520 * This instruction doesn't exist anymore on 64-bit server 4521 * processors compliant with arch 2.x 4522 */ 4523 if (is_book3s_arch2x(ctx)) { 4524 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4525 return; 4526 } 4527 /* Restore CPU state */ 4528 CHK_SV; 4529 gen_icount_io_start(ctx); 4530 gen_update_cfar(ctx, ctx->cia); 4531 gen_helper_rfi(cpu_env); 4532 ctx->base.is_jmp = DISAS_EXIT; 4533 #endif 4534 } 4535 4536 #if defined(TARGET_PPC64) 4537 static void gen_rfid(DisasContext *ctx) 4538 { 4539 #if defined(CONFIG_USER_ONLY) 4540 GEN_PRIV; 4541 #else 4542 /* Restore CPU state */ 4543 CHK_SV; 4544 gen_icount_io_start(ctx); 4545 gen_update_cfar(ctx, ctx->cia); 4546 gen_helper_rfid(cpu_env); 4547 ctx->base.is_jmp = DISAS_EXIT; 4548 #endif 4549 } 4550 4551 #if !defined(CONFIG_USER_ONLY) 4552 static void gen_rfscv(DisasContext *ctx) 4553 { 4554 #if defined(CONFIG_USER_ONLY) 4555 GEN_PRIV; 4556 #else 4557 /* Restore CPU state */ 4558 CHK_SV; 4559 gen_icount_io_start(ctx); 4560 gen_update_cfar(ctx, ctx->cia); 4561 gen_helper_rfscv(cpu_env); 4562 ctx->base.is_jmp = DISAS_EXIT; 4563 #endif 4564 } 4565 #endif 4566 4567 static void gen_hrfid(DisasContext *ctx) 4568 { 4569 #if defined(CONFIG_USER_ONLY) 4570 GEN_PRIV; 4571 #else 4572 /* Restore CPU state */ 4573 CHK_HV; 4574 gen_helper_hrfid(cpu_env); 4575 ctx->base.is_jmp = DISAS_EXIT; 4576 #endif 4577 } 4578 #endif 4579 4580 /* sc */ 4581 #if defined(CONFIG_USER_ONLY) 4582 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 4583 #else 4584 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 4585 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED 4586 #endif 4587 static void gen_sc(DisasContext *ctx) 4588 { 4589 uint32_t lev; 4590 4591 lev = (ctx->opcode >> 5) & 0x7F; 4592 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 4593 } 4594 4595 #if defined(TARGET_PPC64) 4596 #if !defined(CONFIG_USER_ONLY) 4597 static void gen_scv(DisasContext *ctx) 4598 { 4599 uint32_t lev = (ctx->opcode >> 5) & 0x7F; 4600 4601 /* Set the PC back to the faulting instruction. */ 4602 gen_update_nip(ctx, ctx->cia); 4603 gen_helper_scv(cpu_env, tcg_constant_i32(lev)); 4604 4605 ctx->base.is_jmp = DISAS_NORETURN; 4606 } 4607 #endif 4608 #endif 4609 4610 /*** Trap ***/ 4611 4612 /* Check for unconditional traps (always or never) */ 4613 static bool check_unconditional_trap(DisasContext *ctx) 4614 { 4615 /* Trap never */ 4616 if (TO(ctx->opcode) == 0) { 4617 return true; 4618 } 4619 /* Trap always */ 4620 if (TO(ctx->opcode) == 31) { 4621 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 4622 return true; 4623 } 4624 return false; 4625 } 4626 4627 /* tw */ 4628 static void gen_tw(DisasContext *ctx) 4629 { 4630 TCGv_i32 t0; 4631 4632 if (check_unconditional_trap(ctx)) { 4633 return; 4634 } 4635 t0 = tcg_const_i32(TO(ctx->opcode)); 4636 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4637 t0); 4638 tcg_temp_free_i32(t0); 4639 } 4640 4641 /* twi */ 4642 static void gen_twi(DisasContext *ctx) 4643 { 4644 TCGv t0; 4645 TCGv_i32 t1; 4646 4647 if (check_unconditional_trap(ctx)) { 4648 return; 4649 } 4650 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4651 t1 = tcg_const_i32(TO(ctx->opcode)); 4652 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4653 tcg_temp_free(t0); 4654 tcg_temp_free_i32(t1); 4655 } 4656 4657 #if defined(TARGET_PPC64) 4658 /* td */ 4659 static void gen_td(DisasContext *ctx) 4660 { 4661 TCGv_i32 t0; 4662 4663 if (check_unconditional_trap(ctx)) { 4664 return; 4665 } 4666 t0 = tcg_const_i32(TO(ctx->opcode)); 4667 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4668 t0); 4669 tcg_temp_free_i32(t0); 4670 } 4671 4672 /* tdi */ 4673 static void gen_tdi(DisasContext *ctx) 4674 { 4675 TCGv t0; 4676 TCGv_i32 t1; 4677 4678 if (check_unconditional_trap(ctx)) { 4679 return; 4680 } 4681 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4682 t1 = tcg_const_i32(TO(ctx->opcode)); 4683 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4684 tcg_temp_free(t0); 4685 tcg_temp_free_i32(t1); 4686 } 4687 #endif 4688 4689 /*** Processor control ***/ 4690 4691 /* mcrxr */ 4692 static void gen_mcrxr(DisasContext *ctx) 4693 { 4694 TCGv_i32 t0 = tcg_temp_new_i32(); 4695 TCGv_i32 t1 = tcg_temp_new_i32(); 4696 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4697 4698 tcg_gen_trunc_tl_i32(t0, cpu_so); 4699 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4700 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4701 tcg_gen_shli_i32(t0, t0, 3); 4702 tcg_gen_shli_i32(t1, t1, 2); 4703 tcg_gen_shli_i32(dst, dst, 1); 4704 tcg_gen_or_i32(dst, dst, t0); 4705 tcg_gen_or_i32(dst, dst, t1); 4706 tcg_temp_free_i32(t0); 4707 tcg_temp_free_i32(t1); 4708 4709 tcg_gen_movi_tl(cpu_so, 0); 4710 tcg_gen_movi_tl(cpu_ov, 0); 4711 tcg_gen_movi_tl(cpu_ca, 0); 4712 } 4713 4714 #ifdef TARGET_PPC64 4715 /* mcrxrx */ 4716 static void gen_mcrxrx(DisasContext *ctx) 4717 { 4718 TCGv t0 = tcg_temp_new(); 4719 TCGv t1 = tcg_temp_new(); 4720 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4721 4722 /* copy OV and OV32 */ 4723 tcg_gen_shli_tl(t0, cpu_ov, 1); 4724 tcg_gen_or_tl(t0, t0, cpu_ov32); 4725 tcg_gen_shli_tl(t0, t0, 2); 4726 /* copy CA and CA32 */ 4727 tcg_gen_shli_tl(t1, cpu_ca, 1); 4728 tcg_gen_or_tl(t1, t1, cpu_ca32); 4729 tcg_gen_or_tl(t0, t0, t1); 4730 tcg_gen_trunc_tl_i32(dst, t0); 4731 tcg_temp_free(t0); 4732 tcg_temp_free(t1); 4733 } 4734 #endif 4735 4736 /* mfcr mfocrf */ 4737 static void gen_mfcr(DisasContext *ctx) 4738 { 4739 uint32_t crm, crn; 4740 4741 if (likely(ctx->opcode & 0x00100000)) { 4742 crm = CRM(ctx->opcode); 4743 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4744 crn = ctz32(crm); 4745 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4746 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4747 cpu_gpr[rD(ctx->opcode)], crn * 4); 4748 } 4749 } else { 4750 TCGv_i32 t0 = tcg_temp_new_i32(); 4751 tcg_gen_mov_i32(t0, cpu_crf[0]); 4752 tcg_gen_shli_i32(t0, t0, 4); 4753 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4754 tcg_gen_shli_i32(t0, t0, 4); 4755 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4756 tcg_gen_shli_i32(t0, t0, 4); 4757 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4758 tcg_gen_shli_i32(t0, t0, 4); 4759 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4760 tcg_gen_shli_i32(t0, t0, 4); 4761 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4762 tcg_gen_shli_i32(t0, t0, 4); 4763 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4764 tcg_gen_shli_i32(t0, t0, 4); 4765 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4766 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4767 tcg_temp_free_i32(t0); 4768 } 4769 } 4770 4771 /* mfmsr */ 4772 static void gen_mfmsr(DisasContext *ctx) 4773 { 4774 CHK_SV; 4775 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4776 } 4777 4778 /* mfspr */ 4779 static inline void gen_op_mfspr(DisasContext *ctx) 4780 { 4781 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4782 uint32_t sprn = SPR(ctx->opcode); 4783 4784 #if defined(CONFIG_USER_ONLY) 4785 read_cb = ctx->spr_cb[sprn].uea_read; 4786 #else 4787 if (ctx->pr) { 4788 read_cb = ctx->spr_cb[sprn].uea_read; 4789 } else if (ctx->hv) { 4790 read_cb = ctx->spr_cb[sprn].hea_read; 4791 } else { 4792 read_cb = ctx->spr_cb[sprn].oea_read; 4793 } 4794 #endif 4795 if (likely(read_cb != NULL)) { 4796 if (likely(read_cb != SPR_NOACCESS)) { 4797 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4798 } else { 4799 /* Privilege exception */ 4800 /* 4801 * This is a hack to avoid warnings when running Linux: 4802 * this OS breaks the PowerPC virtualisation model, 4803 * allowing userland application to read the PVR 4804 */ 4805 if (sprn != SPR_PVR) { 4806 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4807 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4808 ctx->cia); 4809 } 4810 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4811 } 4812 } else { 4813 /* ISA 2.07 defines these as no-ops */ 4814 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4815 (sprn >= 808 && sprn <= 811)) { 4816 /* This is a nop */ 4817 return; 4818 } 4819 /* Not defined */ 4820 qemu_log_mask(LOG_GUEST_ERROR, 4821 "Trying to read invalid spr %d (0x%03x) at " 4822 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 4823 4824 /* 4825 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 4826 * generate a priv, a hv emu or a no-op 4827 */ 4828 if (sprn & 0x10) { 4829 if (ctx->pr) { 4830 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4831 } 4832 } else { 4833 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4834 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4835 } 4836 } 4837 } 4838 } 4839 4840 static void gen_mfspr(DisasContext *ctx) 4841 { 4842 gen_op_mfspr(ctx); 4843 } 4844 4845 /* mftb */ 4846 static void gen_mftb(DisasContext *ctx) 4847 { 4848 gen_op_mfspr(ctx); 4849 } 4850 4851 /* mtcrf mtocrf*/ 4852 static void gen_mtcrf(DisasContext *ctx) 4853 { 4854 uint32_t crm, crn; 4855 4856 crm = CRM(ctx->opcode); 4857 if (likely((ctx->opcode & 0x00100000))) { 4858 if (crm && ((crm & (crm - 1)) == 0)) { 4859 TCGv_i32 temp = tcg_temp_new_i32(); 4860 crn = ctz32(crm); 4861 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4862 tcg_gen_shri_i32(temp, temp, crn * 4); 4863 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4864 tcg_temp_free_i32(temp); 4865 } 4866 } else { 4867 TCGv_i32 temp = tcg_temp_new_i32(); 4868 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4869 for (crn = 0 ; crn < 8 ; crn++) { 4870 if (crm & (1 << crn)) { 4871 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4872 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4873 } 4874 } 4875 tcg_temp_free_i32(temp); 4876 } 4877 } 4878 4879 /* mtmsr */ 4880 #if defined(TARGET_PPC64) 4881 static void gen_mtmsrd(DisasContext *ctx) 4882 { 4883 if (unlikely(!is_book3s_arch2x(ctx))) { 4884 gen_invalid(ctx); 4885 return; 4886 } 4887 4888 CHK_SV; 4889 4890 #if !defined(CONFIG_USER_ONLY) 4891 TCGv t0, t1; 4892 target_ulong mask; 4893 4894 t0 = tcg_temp_new(); 4895 t1 = tcg_temp_new(); 4896 4897 gen_icount_io_start(ctx); 4898 4899 if (ctx->opcode & 0x00010000) { 4900 /* L=1 form only updates EE and RI */ 4901 mask = (1ULL << MSR_RI) | (1ULL << MSR_EE); 4902 } else { 4903 /* mtmsrd does not alter HV, S, ME, or LE */ 4904 mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) | 4905 (1ULL << MSR_HV)); 4906 /* 4907 * XXX: we need to update nip before the store if we enter 4908 * power saving mode, we will exit the loop directly from 4909 * ppc_store_msr 4910 */ 4911 gen_update_nip(ctx, ctx->base.pc_next); 4912 } 4913 4914 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask); 4915 tcg_gen_andi_tl(t1, cpu_msr, ~mask); 4916 tcg_gen_or_tl(t0, t0, t1); 4917 4918 gen_helper_store_msr(cpu_env, t0); 4919 4920 /* Must stop the translation as machine state (may have) changed */ 4921 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 4922 4923 tcg_temp_free(t0); 4924 tcg_temp_free(t1); 4925 #endif /* !defined(CONFIG_USER_ONLY) */ 4926 } 4927 #endif /* defined(TARGET_PPC64) */ 4928 4929 static void gen_mtmsr(DisasContext *ctx) 4930 { 4931 CHK_SV; 4932 4933 #if !defined(CONFIG_USER_ONLY) 4934 TCGv t0, t1; 4935 target_ulong mask = 0xFFFFFFFF; 4936 4937 t0 = tcg_temp_new(); 4938 t1 = tcg_temp_new(); 4939 4940 gen_icount_io_start(ctx); 4941 if (ctx->opcode & 0x00010000) { 4942 /* L=1 form only updates EE and RI */ 4943 mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE); 4944 } else { 4945 /* mtmsr does not alter S, ME, or LE */ 4946 mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S)); 4947 4948 /* 4949 * XXX: we need to update nip before the store if we enter 4950 * power saving mode, we will exit the loop directly from 4951 * ppc_store_msr 4952 */ 4953 gen_update_nip(ctx, ctx->base.pc_next); 4954 } 4955 4956 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask); 4957 tcg_gen_andi_tl(t1, cpu_msr, ~mask); 4958 tcg_gen_or_tl(t0, t0, t1); 4959 4960 gen_helper_store_msr(cpu_env, t0); 4961 4962 /* Must stop the translation as machine state (may have) changed */ 4963 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 4964 4965 tcg_temp_free(t0); 4966 tcg_temp_free(t1); 4967 #endif 4968 } 4969 4970 /* mtspr */ 4971 static void gen_mtspr(DisasContext *ctx) 4972 { 4973 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4974 uint32_t sprn = SPR(ctx->opcode); 4975 4976 #if defined(CONFIG_USER_ONLY) 4977 write_cb = ctx->spr_cb[sprn].uea_write; 4978 #else 4979 if (ctx->pr) { 4980 write_cb = ctx->spr_cb[sprn].uea_write; 4981 } else if (ctx->hv) { 4982 write_cb = ctx->spr_cb[sprn].hea_write; 4983 } else { 4984 write_cb = ctx->spr_cb[sprn].oea_write; 4985 } 4986 #endif 4987 if (likely(write_cb != NULL)) { 4988 if (likely(write_cb != SPR_NOACCESS)) { 4989 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4990 } else { 4991 /* Privilege exception */ 4992 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4993 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4994 ctx->cia); 4995 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4996 } 4997 } else { 4998 /* ISA 2.07 defines these as no-ops */ 4999 if ((ctx->insns_flags2 & PPC2_ISA207S) && 5000 (sprn >= 808 && sprn <= 811)) { 5001 /* This is a nop */ 5002 return; 5003 } 5004 5005 /* Not defined */ 5006 qemu_log_mask(LOG_GUEST_ERROR, 5007 "Trying to write invalid spr %d (0x%03x) at " 5008 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 5009 5010 5011 /* 5012 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 5013 * generate a priv, a hv emu or a no-op 5014 */ 5015 if (sprn & 0x10) { 5016 if (ctx->pr) { 5017 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5018 } 5019 } else { 5020 if (ctx->pr || sprn == 0) { 5021 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5022 } 5023 } 5024 } 5025 } 5026 5027 #if defined(TARGET_PPC64) 5028 /* setb */ 5029 static void gen_setb(DisasContext *ctx) 5030 { 5031 TCGv_i32 t0 = tcg_temp_new_i32(); 5032 TCGv_i32 t8 = tcg_constant_i32(8); 5033 TCGv_i32 tm1 = tcg_constant_i32(-1); 5034 int crf = crfS(ctx->opcode); 5035 5036 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 5037 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 5038 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 5039 5040 tcg_temp_free_i32(t0); 5041 } 5042 #endif 5043 5044 /*** Cache management ***/ 5045 5046 /* dcbf */ 5047 static void gen_dcbf(DisasContext *ctx) 5048 { 5049 /* XXX: specification says this is treated as a load by the MMU */ 5050 TCGv t0; 5051 gen_set_access_type(ctx, ACCESS_CACHE); 5052 t0 = tcg_temp_new(); 5053 gen_addr_reg_index(ctx, t0); 5054 gen_qemu_ld8u(ctx, t0, t0); 5055 tcg_temp_free(t0); 5056 } 5057 5058 /* dcbfep (external PID dcbf) */ 5059 static void gen_dcbfep(DisasContext *ctx) 5060 { 5061 /* XXX: specification says this is treated as a load by the MMU */ 5062 TCGv t0; 5063 CHK_SV; 5064 gen_set_access_type(ctx, ACCESS_CACHE); 5065 t0 = tcg_temp_new(); 5066 gen_addr_reg_index(ctx, t0); 5067 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5068 tcg_temp_free(t0); 5069 } 5070 5071 /* dcbi (Supervisor only) */ 5072 static void gen_dcbi(DisasContext *ctx) 5073 { 5074 #if defined(CONFIG_USER_ONLY) 5075 GEN_PRIV; 5076 #else 5077 TCGv EA, val; 5078 5079 CHK_SV; 5080 EA = tcg_temp_new(); 5081 gen_set_access_type(ctx, ACCESS_CACHE); 5082 gen_addr_reg_index(ctx, EA); 5083 val = tcg_temp_new(); 5084 /* XXX: specification says this should be treated as a store by the MMU */ 5085 gen_qemu_ld8u(ctx, val, EA); 5086 gen_qemu_st8(ctx, val, EA); 5087 tcg_temp_free(val); 5088 tcg_temp_free(EA); 5089 #endif /* defined(CONFIG_USER_ONLY) */ 5090 } 5091 5092 /* dcdst */ 5093 static void gen_dcbst(DisasContext *ctx) 5094 { 5095 /* XXX: specification say this is treated as a load by the MMU */ 5096 TCGv t0; 5097 gen_set_access_type(ctx, ACCESS_CACHE); 5098 t0 = tcg_temp_new(); 5099 gen_addr_reg_index(ctx, t0); 5100 gen_qemu_ld8u(ctx, t0, t0); 5101 tcg_temp_free(t0); 5102 } 5103 5104 /* dcbstep (dcbstep External PID version) */ 5105 static void gen_dcbstep(DisasContext *ctx) 5106 { 5107 /* XXX: specification say this is treated as a load by the MMU */ 5108 TCGv t0; 5109 gen_set_access_type(ctx, ACCESS_CACHE); 5110 t0 = tcg_temp_new(); 5111 gen_addr_reg_index(ctx, t0); 5112 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5113 tcg_temp_free(t0); 5114 } 5115 5116 /* dcbt */ 5117 static void gen_dcbt(DisasContext *ctx) 5118 { 5119 /* 5120 * interpreted as no-op 5121 * XXX: specification say this is treated as a load by the MMU but 5122 * does not generate any exception 5123 */ 5124 } 5125 5126 /* dcbtep */ 5127 static void gen_dcbtep(DisasContext *ctx) 5128 { 5129 /* 5130 * interpreted as no-op 5131 * XXX: specification say this is treated as a load by the MMU but 5132 * does not generate any exception 5133 */ 5134 } 5135 5136 /* dcbtst */ 5137 static void gen_dcbtst(DisasContext *ctx) 5138 { 5139 /* 5140 * interpreted as no-op 5141 * XXX: specification say this is treated as a load by the MMU but 5142 * does not generate any exception 5143 */ 5144 } 5145 5146 /* dcbtstep */ 5147 static void gen_dcbtstep(DisasContext *ctx) 5148 { 5149 /* 5150 * interpreted as no-op 5151 * XXX: specification say this is treated as a load by the MMU but 5152 * does not generate any exception 5153 */ 5154 } 5155 5156 /* dcbtls */ 5157 static void gen_dcbtls(DisasContext *ctx) 5158 { 5159 /* Always fails locking the cache */ 5160 TCGv t0 = tcg_temp_new(); 5161 gen_load_spr(t0, SPR_Exxx_L1CSR0); 5162 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 5163 gen_store_spr(SPR_Exxx_L1CSR0, t0); 5164 tcg_temp_free(t0); 5165 } 5166 5167 /* dcbz */ 5168 static void gen_dcbz(DisasContext *ctx) 5169 { 5170 TCGv tcgv_addr; 5171 TCGv_i32 tcgv_op; 5172 5173 gen_set_access_type(ctx, ACCESS_CACHE); 5174 tcgv_addr = tcg_temp_new(); 5175 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5176 gen_addr_reg_index(ctx, tcgv_addr); 5177 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 5178 tcg_temp_free(tcgv_addr); 5179 tcg_temp_free_i32(tcgv_op); 5180 } 5181 5182 /* dcbzep */ 5183 static void gen_dcbzep(DisasContext *ctx) 5184 { 5185 TCGv tcgv_addr; 5186 TCGv_i32 tcgv_op; 5187 5188 gen_set_access_type(ctx, ACCESS_CACHE); 5189 tcgv_addr = tcg_temp_new(); 5190 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5191 gen_addr_reg_index(ctx, tcgv_addr); 5192 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 5193 tcg_temp_free(tcgv_addr); 5194 tcg_temp_free_i32(tcgv_op); 5195 } 5196 5197 /* dst / dstt */ 5198 static void gen_dst(DisasContext *ctx) 5199 { 5200 if (rA(ctx->opcode) == 0) { 5201 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5202 } else { 5203 /* interpreted as no-op */ 5204 } 5205 } 5206 5207 /* dstst /dststt */ 5208 static void gen_dstst(DisasContext *ctx) 5209 { 5210 if (rA(ctx->opcode) == 0) { 5211 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5212 } else { 5213 /* interpreted as no-op */ 5214 } 5215 5216 } 5217 5218 /* dss / dssall */ 5219 static void gen_dss(DisasContext *ctx) 5220 { 5221 /* interpreted as no-op */ 5222 } 5223 5224 /* icbi */ 5225 static void gen_icbi(DisasContext *ctx) 5226 { 5227 TCGv t0; 5228 gen_set_access_type(ctx, ACCESS_CACHE); 5229 t0 = tcg_temp_new(); 5230 gen_addr_reg_index(ctx, t0); 5231 gen_helper_icbi(cpu_env, t0); 5232 tcg_temp_free(t0); 5233 } 5234 5235 /* icbiep */ 5236 static void gen_icbiep(DisasContext *ctx) 5237 { 5238 TCGv t0; 5239 gen_set_access_type(ctx, ACCESS_CACHE); 5240 t0 = tcg_temp_new(); 5241 gen_addr_reg_index(ctx, t0); 5242 gen_helper_icbiep(cpu_env, t0); 5243 tcg_temp_free(t0); 5244 } 5245 5246 /* Optional: */ 5247 /* dcba */ 5248 static void gen_dcba(DisasContext *ctx) 5249 { 5250 /* 5251 * interpreted as no-op 5252 * XXX: specification say this is treated as a store by the MMU 5253 * but does not generate any exception 5254 */ 5255 } 5256 5257 /*** Segment register manipulation ***/ 5258 /* Supervisor only: */ 5259 5260 /* mfsr */ 5261 static void gen_mfsr(DisasContext *ctx) 5262 { 5263 #if defined(CONFIG_USER_ONLY) 5264 GEN_PRIV; 5265 #else 5266 TCGv t0; 5267 5268 CHK_SV; 5269 t0 = tcg_const_tl(SR(ctx->opcode)); 5270 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5271 tcg_temp_free(t0); 5272 #endif /* defined(CONFIG_USER_ONLY) */ 5273 } 5274 5275 /* mfsrin */ 5276 static void gen_mfsrin(DisasContext *ctx) 5277 { 5278 #if defined(CONFIG_USER_ONLY) 5279 GEN_PRIV; 5280 #else 5281 TCGv t0; 5282 5283 CHK_SV; 5284 t0 = tcg_temp_new(); 5285 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5286 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5287 tcg_temp_free(t0); 5288 #endif /* defined(CONFIG_USER_ONLY) */ 5289 } 5290 5291 /* mtsr */ 5292 static void gen_mtsr(DisasContext *ctx) 5293 { 5294 #if defined(CONFIG_USER_ONLY) 5295 GEN_PRIV; 5296 #else 5297 TCGv t0; 5298 5299 CHK_SV; 5300 t0 = tcg_const_tl(SR(ctx->opcode)); 5301 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5302 tcg_temp_free(t0); 5303 #endif /* defined(CONFIG_USER_ONLY) */ 5304 } 5305 5306 /* mtsrin */ 5307 static void gen_mtsrin(DisasContext *ctx) 5308 { 5309 #if defined(CONFIG_USER_ONLY) 5310 GEN_PRIV; 5311 #else 5312 TCGv t0; 5313 CHK_SV; 5314 5315 t0 = tcg_temp_new(); 5316 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5317 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 5318 tcg_temp_free(t0); 5319 #endif /* defined(CONFIG_USER_ONLY) */ 5320 } 5321 5322 #if defined(TARGET_PPC64) 5323 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 5324 5325 /* mfsr */ 5326 static void gen_mfsr_64b(DisasContext *ctx) 5327 { 5328 #if defined(CONFIG_USER_ONLY) 5329 GEN_PRIV; 5330 #else 5331 TCGv t0; 5332 5333 CHK_SV; 5334 t0 = tcg_const_tl(SR(ctx->opcode)); 5335 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5336 tcg_temp_free(t0); 5337 #endif /* defined(CONFIG_USER_ONLY) */ 5338 } 5339 5340 /* mfsrin */ 5341 static void gen_mfsrin_64b(DisasContext *ctx) 5342 { 5343 #if defined(CONFIG_USER_ONLY) 5344 GEN_PRIV; 5345 #else 5346 TCGv t0; 5347 5348 CHK_SV; 5349 t0 = tcg_temp_new(); 5350 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5351 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5352 tcg_temp_free(t0); 5353 #endif /* defined(CONFIG_USER_ONLY) */ 5354 } 5355 5356 /* mtsr */ 5357 static void gen_mtsr_64b(DisasContext *ctx) 5358 { 5359 #if defined(CONFIG_USER_ONLY) 5360 GEN_PRIV; 5361 #else 5362 TCGv t0; 5363 5364 CHK_SV; 5365 t0 = tcg_const_tl(SR(ctx->opcode)); 5366 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5367 tcg_temp_free(t0); 5368 #endif /* defined(CONFIG_USER_ONLY) */ 5369 } 5370 5371 /* mtsrin */ 5372 static void gen_mtsrin_64b(DisasContext *ctx) 5373 { 5374 #if defined(CONFIG_USER_ONLY) 5375 GEN_PRIV; 5376 #else 5377 TCGv t0; 5378 5379 CHK_SV; 5380 t0 = tcg_temp_new(); 5381 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5382 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5383 tcg_temp_free(t0); 5384 #endif /* defined(CONFIG_USER_ONLY) */ 5385 } 5386 5387 /* slbmte */ 5388 static void gen_slbmte(DisasContext *ctx) 5389 { 5390 #if defined(CONFIG_USER_ONLY) 5391 GEN_PRIV; 5392 #else 5393 CHK_SV; 5394 5395 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 5396 cpu_gpr[rS(ctx->opcode)]); 5397 #endif /* defined(CONFIG_USER_ONLY) */ 5398 } 5399 5400 static void gen_slbmfee(DisasContext *ctx) 5401 { 5402 #if defined(CONFIG_USER_ONLY) 5403 GEN_PRIV; 5404 #else 5405 CHK_SV; 5406 5407 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5408 cpu_gpr[rB(ctx->opcode)]); 5409 #endif /* defined(CONFIG_USER_ONLY) */ 5410 } 5411 5412 static void gen_slbmfev(DisasContext *ctx) 5413 { 5414 #if defined(CONFIG_USER_ONLY) 5415 GEN_PRIV; 5416 #else 5417 CHK_SV; 5418 5419 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5420 cpu_gpr[rB(ctx->opcode)]); 5421 #endif /* defined(CONFIG_USER_ONLY) */ 5422 } 5423 5424 static void gen_slbfee_(DisasContext *ctx) 5425 { 5426 #if defined(CONFIG_USER_ONLY) 5427 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5428 #else 5429 TCGLabel *l1, *l2; 5430 5431 if (unlikely(ctx->pr)) { 5432 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5433 return; 5434 } 5435 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5436 cpu_gpr[rB(ctx->opcode)]); 5437 l1 = gen_new_label(); 5438 l2 = gen_new_label(); 5439 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5440 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 5441 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 5442 tcg_gen_br(l2); 5443 gen_set_label(l1); 5444 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 5445 gen_set_label(l2); 5446 #endif 5447 } 5448 #endif /* defined(TARGET_PPC64) */ 5449 5450 /*** Lookaside buffer management ***/ 5451 /* Optional & supervisor only: */ 5452 5453 /* tlbia */ 5454 static void gen_tlbia(DisasContext *ctx) 5455 { 5456 #if defined(CONFIG_USER_ONLY) 5457 GEN_PRIV; 5458 #else 5459 CHK_HV; 5460 5461 gen_helper_tlbia(cpu_env); 5462 #endif /* defined(CONFIG_USER_ONLY) */ 5463 } 5464 5465 /* tlbiel */ 5466 static void gen_tlbiel(DisasContext *ctx) 5467 { 5468 #if defined(CONFIG_USER_ONLY) 5469 GEN_PRIV; 5470 #else 5471 bool psr = (ctx->opcode >> 17) & 0x1; 5472 5473 if (ctx->pr || (!ctx->hv && !psr && ctx->hr)) { 5474 /* 5475 * tlbiel is privileged except when PSR=0 and HR=1, making it 5476 * hypervisor privileged. 5477 */ 5478 GEN_PRIV; 5479 } 5480 5481 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5482 #endif /* defined(CONFIG_USER_ONLY) */ 5483 } 5484 5485 /* tlbie */ 5486 static void gen_tlbie(DisasContext *ctx) 5487 { 5488 #if defined(CONFIG_USER_ONLY) 5489 GEN_PRIV; 5490 #else 5491 bool psr = (ctx->opcode >> 17) & 0x1; 5492 TCGv_i32 t1; 5493 5494 if (ctx->pr) { 5495 /* tlbie is privileged... */ 5496 GEN_PRIV; 5497 } else if (!ctx->hv) { 5498 if (!ctx->gtse || (!psr && ctx->hr)) { 5499 /* 5500 * ... except when GTSE=0 or when PSR=0 and HR=1, making it 5501 * hypervisor privileged. 5502 */ 5503 GEN_PRIV; 5504 } 5505 } 5506 5507 if (NARROW_MODE(ctx)) { 5508 TCGv t0 = tcg_temp_new(); 5509 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 5510 gen_helper_tlbie(cpu_env, t0); 5511 tcg_temp_free(t0); 5512 } else { 5513 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5514 } 5515 t1 = tcg_temp_new_i32(); 5516 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5517 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 5518 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5519 tcg_temp_free_i32(t1); 5520 #endif /* defined(CONFIG_USER_ONLY) */ 5521 } 5522 5523 /* tlbsync */ 5524 static void gen_tlbsync(DisasContext *ctx) 5525 { 5526 #if defined(CONFIG_USER_ONLY) 5527 GEN_PRIV; 5528 #else 5529 5530 if (ctx->gtse) { 5531 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 5532 } else { 5533 CHK_HV; /* Else hypervisor privileged */ 5534 } 5535 5536 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 5537 if (ctx->insns_flags & PPC_BOOKE) { 5538 gen_check_tlb_flush(ctx, true); 5539 } 5540 #endif /* defined(CONFIG_USER_ONLY) */ 5541 } 5542 5543 #if defined(TARGET_PPC64) 5544 /* slbia */ 5545 static void gen_slbia(DisasContext *ctx) 5546 { 5547 #if defined(CONFIG_USER_ONLY) 5548 GEN_PRIV; 5549 #else 5550 uint32_t ih = (ctx->opcode >> 21) & 0x7; 5551 TCGv_i32 t0 = tcg_const_i32(ih); 5552 5553 CHK_SV; 5554 5555 gen_helper_slbia(cpu_env, t0); 5556 tcg_temp_free_i32(t0); 5557 #endif /* defined(CONFIG_USER_ONLY) */ 5558 } 5559 5560 /* slbie */ 5561 static void gen_slbie(DisasContext *ctx) 5562 { 5563 #if defined(CONFIG_USER_ONLY) 5564 GEN_PRIV; 5565 #else 5566 CHK_SV; 5567 5568 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5569 #endif /* defined(CONFIG_USER_ONLY) */ 5570 } 5571 5572 /* slbieg */ 5573 static void gen_slbieg(DisasContext *ctx) 5574 { 5575 #if defined(CONFIG_USER_ONLY) 5576 GEN_PRIV; 5577 #else 5578 CHK_SV; 5579 5580 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5581 #endif /* defined(CONFIG_USER_ONLY) */ 5582 } 5583 5584 /* slbsync */ 5585 static void gen_slbsync(DisasContext *ctx) 5586 { 5587 #if defined(CONFIG_USER_ONLY) 5588 GEN_PRIV; 5589 #else 5590 CHK_SV; 5591 gen_check_tlb_flush(ctx, true); 5592 #endif /* defined(CONFIG_USER_ONLY) */ 5593 } 5594 5595 #endif /* defined(TARGET_PPC64) */ 5596 5597 /*** External control ***/ 5598 /* Optional: */ 5599 5600 /* eciwx */ 5601 static void gen_eciwx(DisasContext *ctx) 5602 { 5603 TCGv t0; 5604 /* Should check EAR[E] ! */ 5605 gen_set_access_type(ctx, ACCESS_EXT); 5606 t0 = tcg_temp_new(); 5607 gen_addr_reg_index(ctx, t0); 5608 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5609 DEF_MEMOP(MO_UL | MO_ALIGN)); 5610 tcg_temp_free(t0); 5611 } 5612 5613 /* ecowx */ 5614 static void gen_ecowx(DisasContext *ctx) 5615 { 5616 TCGv t0; 5617 /* Should check EAR[E] ! */ 5618 gen_set_access_type(ctx, ACCESS_EXT); 5619 t0 = tcg_temp_new(); 5620 gen_addr_reg_index(ctx, t0); 5621 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5622 DEF_MEMOP(MO_UL | MO_ALIGN)); 5623 tcg_temp_free(t0); 5624 } 5625 5626 /* PowerPC 601 specific instructions */ 5627 5628 /* abs - abs. */ 5629 static void gen_abs(DisasContext *ctx) 5630 { 5631 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5632 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5633 5634 tcg_gen_abs_tl(d, a); 5635 if (unlikely(Rc(ctx->opcode) != 0)) { 5636 gen_set_Rc0(ctx, d); 5637 } 5638 } 5639 5640 /* abso - abso. */ 5641 static void gen_abso(DisasContext *ctx) 5642 { 5643 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5644 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5645 5646 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000); 5647 tcg_gen_abs_tl(d, a); 5648 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 5649 if (unlikely(Rc(ctx->opcode) != 0)) { 5650 gen_set_Rc0(ctx, d); 5651 } 5652 } 5653 5654 /* clcs */ 5655 static void gen_clcs(DisasContext *ctx) 5656 { 5657 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5658 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5659 tcg_temp_free_i32(t0); 5660 /* Rc=1 sets CR0 to an undefined state */ 5661 } 5662 5663 /* div - div. */ 5664 static void gen_div(DisasContext *ctx) 5665 { 5666 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5667 cpu_gpr[rB(ctx->opcode)]); 5668 if (unlikely(Rc(ctx->opcode) != 0)) { 5669 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5670 } 5671 } 5672 5673 /* divo - divo. */ 5674 static void gen_divo(DisasContext *ctx) 5675 { 5676 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5677 cpu_gpr[rB(ctx->opcode)]); 5678 if (unlikely(Rc(ctx->opcode) != 0)) { 5679 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5680 } 5681 } 5682 5683 /* divs - divs. */ 5684 static void gen_divs(DisasContext *ctx) 5685 { 5686 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5687 cpu_gpr[rB(ctx->opcode)]); 5688 if (unlikely(Rc(ctx->opcode) != 0)) { 5689 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5690 } 5691 } 5692 5693 /* divso - divso. */ 5694 static void gen_divso(DisasContext *ctx) 5695 { 5696 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5697 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5698 if (unlikely(Rc(ctx->opcode) != 0)) { 5699 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5700 } 5701 } 5702 5703 /* doz - doz. */ 5704 static void gen_doz(DisasContext *ctx) 5705 { 5706 TCGLabel *l1 = gen_new_label(); 5707 TCGLabel *l2 = gen_new_label(); 5708 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5709 cpu_gpr[rA(ctx->opcode)], l1); 5710 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 5711 cpu_gpr[rA(ctx->opcode)]); 5712 tcg_gen_br(l2); 5713 gen_set_label(l1); 5714 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5715 gen_set_label(l2); 5716 if (unlikely(Rc(ctx->opcode) != 0)) { 5717 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5718 } 5719 } 5720 5721 /* dozo - dozo. */ 5722 static void gen_dozo(DisasContext *ctx) 5723 { 5724 TCGLabel *l1 = gen_new_label(); 5725 TCGLabel *l2 = gen_new_label(); 5726 TCGv t0 = tcg_temp_new(); 5727 TCGv t1 = tcg_temp_new(); 5728 TCGv t2 = tcg_temp_new(); 5729 /* Start with XER OV disabled, the most likely case */ 5730 tcg_gen_movi_tl(cpu_ov, 0); 5731 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5732 cpu_gpr[rA(ctx->opcode)], l1); 5733 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5734 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5735 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5736 tcg_gen_andc_tl(t1, t1, t2); 5737 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5738 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5739 tcg_gen_movi_tl(cpu_ov, 1); 5740 tcg_gen_movi_tl(cpu_so, 1); 5741 tcg_gen_br(l2); 5742 gen_set_label(l1); 5743 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5744 gen_set_label(l2); 5745 tcg_temp_free(t0); 5746 tcg_temp_free(t1); 5747 tcg_temp_free(t2); 5748 if (unlikely(Rc(ctx->opcode) != 0)) { 5749 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5750 } 5751 } 5752 5753 /* dozi */ 5754 static void gen_dozi(DisasContext *ctx) 5755 { 5756 target_long simm = SIMM(ctx->opcode); 5757 TCGLabel *l1 = gen_new_label(); 5758 TCGLabel *l2 = gen_new_label(); 5759 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5760 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5761 tcg_gen_br(l2); 5762 gen_set_label(l1); 5763 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5764 gen_set_label(l2); 5765 if (unlikely(Rc(ctx->opcode) != 0)) { 5766 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5767 } 5768 } 5769 5770 /* lscbx - lscbx. */ 5771 static void gen_lscbx(DisasContext *ctx) 5772 { 5773 TCGv t0 = tcg_temp_new(); 5774 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5775 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5776 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5777 5778 gen_addr_reg_index(ctx, t0); 5779 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5780 tcg_temp_free_i32(t1); 5781 tcg_temp_free_i32(t2); 5782 tcg_temp_free_i32(t3); 5783 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5784 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5785 if (unlikely(Rc(ctx->opcode) != 0)) { 5786 gen_set_Rc0(ctx, t0); 5787 } 5788 tcg_temp_free(t0); 5789 } 5790 5791 /* maskg - maskg. */ 5792 static void gen_maskg(DisasContext *ctx) 5793 { 5794 TCGLabel *l1 = gen_new_label(); 5795 TCGv t0 = tcg_temp_new(); 5796 TCGv t1 = tcg_temp_new(); 5797 TCGv t2 = tcg_temp_new(); 5798 TCGv t3 = tcg_temp_new(); 5799 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5800 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5801 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5802 tcg_gen_addi_tl(t2, t0, 1); 5803 tcg_gen_shr_tl(t2, t3, t2); 5804 tcg_gen_shr_tl(t3, t3, t1); 5805 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5806 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5807 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5808 gen_set_label(l1); 5809 tcg_temp_free(t0); 5810 tcg_temp_free(t1); 5811 tcg_temp_free(t2); 5812 tcg_temp_free(t3); 5813 if (unlikely(Rc(ctx->opcode) != 0)) { 5814 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5815 } 5816 } 5817 5818 /* maskir - maskir. */ 5819 static void gen_maskir(DisasContext *ctx) 5820 { 5821 TCGv t0 = tcg_temp_new(); 5822 TCGv t1 = tcg_temp_new(); 5823 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5824 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5825 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5826 tcg_temp_free(t0); 5827 tcg_temp_free(t1); 5828 if (unlikely(Rc(ctx->opcode) != 0)) { 5829 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5830 } 5831 } 5832 5833 /* mul - mul. */ 5834 static void gen_mul(DisasContext *ctx) 5835 { 5836 TCGv_i64 t0 = tcg_temp_new_i64(); 5837 TCGv_i64 t1 = tcg_temp_new_i64(); 5838 TCGv t2 = tcg_temp_new(); 5839 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5840 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5841 tcg_gen_mul_i64(t0, t0, t1); 5842 tcg_gen_trunc_i64_tl(t2, t0); 5843 gen_store_spr(SPR_MQ, t2); 5844 tcg_gen_shri_i64(t1, t0, 32); 5845 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5846 tcg_temp_free_i64(t0); 5847 tcg_temp_free_i64(t1); 5848 tcg_temp_free(t2); 5849 if (unlikely(Rc(ctx->opcode) != 0)) { 5850 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5851 } 5852 } 5853 5854 /* mulo - mulo. */ 5855 static void gen_mulo(DisasContext *ctx) 5856 { 5857 TCGLabel *l1 = gen_new_label(); 5858 TCGv_i64 t0 = tcg_temp_new_i64(); 5859 TCGv_i64 t1 = tcg_temp_new_i64(); 5860 TCGv t2 = tcg_temp_new(); 5861 /* Start with XER OV disabled, the most likely case */ 5862 tcg_gen_movi_tl(cpu_ov, 0); 5863 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5864 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5865 tcg_gen_mul_i64(t0, t0, t1); 5866 tcg_gen_trunc_i64_tl(t2, t0); 5867 gen_store_spr(SPR_MQ, t2); 5868 tcg_gen_shri_i64(t1, t0, 32); 5869 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5870 tcg_gen_ext32s_i64(t1, t0); 5871 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5872 tcg_gen_movi_tl(cpu_ov, 1); 5873 tcg_gen_movi_tl(cpu_so, 1); 5874 gen_set_label(l1); 5875 tcg_temp_free_i64(t0); 5876 tcg_temp_free_i64(t1); 5877 tcg_temp_free(t2); 5878 if (unlikely(Rc(ctx->opcode) != 0)) { 5879 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5880 } 5881 } 5882 5883 /* nabs - nabs. */ 5884 static void gen_nabs(DisasContext *ctx) 5885 { 5886 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5887 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5888 5889 tcg_gen_abs_tl(d, a); 5890 tcg_gen_neg_tl(d, d); 5891 if (unlikely(Rc(ctx->opcode) != 0)) { 5892 gen_set_Rc0(ctx, d); 5893 } 5894 } 5895 5896 /* nabso - nabso. */ 5897 static void gen_nabso(DisasContext *ctx) 5898 { 5899 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5900 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5901 5902 tcg_gen_abs_tl(d, a); 5903 tcg_gen_neg_tl(d, d); 5904 /* nabs never overflows */ 5905 tcg_gen_movi_tl(cpu_ov, 0); 5906 if (unlikely(Rc(ctx->opcode) != 0)) { 5907 gen_set_Rc0(ctx, d); 5908 } 5909 } 5910 5911 /* rlmi - rlmi. */ 5912 static void gen_rlmi(DisasContext *ctx) 5913 { 5914 uint32_t mb = MB(ctx->opcode); 5915 uint32_t me = ME(ctx->opcode); 5916 TCGv t0 = tcg_temp_new(); 5917 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5918 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5919 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5920 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 5921 ~MASK(mb, me)); 5922 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5923 tcg_temp_free(t0); 5924 if (unlikely(Rc(ctx->opcode) != 0)) { 5925 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5926 } 5927 } 5928 5929 /* rrib - rrib. */ 5930 static void gen_rrib(DisasContext *ctx) 5931 { 5932 TCGv t0 = tcg_temp_new(); 5933 TCGv t1 = tcg_temp_new(); 5934 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5935 tcg_gen_movi_tl(t1, 0x80000000); 5936 tcg_gen_shr_tl(t1, t1, t0); 5937 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5938 tcg_gen_and_tl(t0, t0, t1); 5939 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5940 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5941 tcg_temp_free(t0); 5942 tcg_temp_free(t1); 5943 if (unlikely(Rc(ctx->opcode) != 0)) { 5944 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5945 } 5946 } 5947 5948 /* sle - sle. */ 5949 static void gen_sle(DisasContext *ctx) 5950 { 5951 TCGv t0 = tcg_temp_new(); 5952 TCGv t1 = tcg_temp_new(); 5953 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5954 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5955 tcg_gen_subfi_tl(t1, 32, t1); 5956 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5957 tcg_gen_or_tl(t1, t0, t1); 5958 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5959 gen_store_spr(SPR_MQ, t1); 5960 tcg_temp_free(t0); 5961 tcg_temp_free(t1); 5962 if (unlikely(Rc(ctx->opcode) != 0)) { 5963 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5964 } 5965 } 5966 5967 /* sleq - sleq. */ 5968 static void gen_sleq(DisasContext *ctx) 5969 { 5970 TCGv t0 = tcg_temp_new(); 5971 TCGv t1 = tcg_temp_new(); 5972 TCGv t2 = tcg_temp_new(); 5973 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5974 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5975 tcg_gen_shl_tl(t2, t2, t0); 5976 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5977 gen_load_spr(t1, SPR_MQ); 5978 gen_store_spr(SPR_MQ, t0); 5979 tcg_gen_and_tl(t0, t0, t2); 5980 tcg_gen_andc_tl(t1, t1, t2); 5981 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5982 tcg_temp_free(t0); 5983 tcg_temp_free(t1); 5984 tcg_temp_free(t2); 5985 if (unlikely(Rc(ctx->opcode) != 0)) { 5986 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5987 } 5988 } 5989 5990 /* sliq - sliq. */ 5991 static void gen_sliq(DisasContext *ctx) 5992 { 5993 int sh = SH(ctx->opcode); 5994 TCGv t0 = tcg_temp_new(); 5995 TCGv t1 = tcg_temp_new(); 5996 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5997 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5998 tcg_gen_or_tl(t1, t0, t1); 5999 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6000 gen_store_spr(SPR_MQ, t1); 6001 tcg_temp_free(t0); 6002 tcg_temp_free(t1); 6003 if (unlikely(Rc(ctx->opcode) != 0)) { 6004 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6005 } 6006 } 6007 6008 /* slliq - slliq. */ 6009 static void gen_slliq(DisasContext *ctx) 6010 { 6011 int sh = SH(ctx->opcode); 6012 TCGv t0 = tcg_temp_new(); 6013 TCGv t1 = tcg_temp_new(); 6014 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6015 gen_load_spr(t1, SPR_MQ); 6016 gen_store_spr(SPR_MQ, t0); 6017 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 6018 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 6019 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6020 tcg_temp_free(t0); 6021 tcg_temp_free(t1); 6022 if (unlikely(Rc(ctx->opcode) != 0)) { 6023 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6024 } 6025 } 6026 6027 /* sllq - sllq. */ 6028 static void gen_sllq(DisasContext *ctx) 6029 { 6030 TCGLabel *l1 = gen_new_label(); 6031 TCGLabel *l2 = gen_new_label(); 6032 TCGv t0 = tcg_temp_local_new(); 6033 TCGv t1 = tcg_temp_local_new(); 6034 TCGv t2 = tcg_temp_local_new(); 6035 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6036 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6037 tcg_gen_shl_tl(t1, t1, t2); 6038 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6039 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6040 gen_load_spr(t0, SPR_MQ); 6041 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6042 tcg_gen_br(l2); 6043 gen_set_label(l1); 6044 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6045 gen_load_spr(t2, SPR_MQ); 6046 tcg_gen_andc_tl(t1, t2, t1); 6047 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6048 gen_set_label(l2); 6049 tcg_temp_free(t0); 6050 tcg_temp_free(t1); 6051 tcg_temp_free(t2); 6052 if (unlikely(Rc(ctx->opcode) != 0)) { 6053 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6054 } 6055 } 6056 6057 /* slq - slq. */ 6058 static void gen_slq(DisasContext *ctx) 6059 { 6060 TCGLabel *l1 = gen_new_label(); 6061 TCGv t0 = tcg_temp_new(); 6062 TCGv t1 = tcg_temp_new(); 6063 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6064 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6065 tcg_gen_subfi_tl(t1, 32, t1); 6066 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6067 tcg_gen_or_tl(t1, t0, t1); 6068 gen_store_spr(SPR_MQ, t1); 6069 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6070 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6071 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6072 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6073 gen_set_label(l1); 6074 tcg_temp_free(t0); 6075 tcg_temp_free(t1); 6076 if (unlikely(Rc(ctx->opcode) != 0)) { 6077 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6078 } 6079 } 6080 6081 /* sraiq - sraiq. */ 6082 static void gen_sraiq(DisasContext *ctx) 6083 { 6084 int sh = SH(ctx->opcode); 6085 TCGLabel *l1 = gen_new_label(); 6086 TCGv t0 = tcg_temp_new(); 6087 TCGv t1 = tcg_temp_new(); 6088 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6089 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6090 tcg_gen_or_tl(t0, t0, t1); 6091 gen_store_spr(SPR_MQ, t0); 6092 tcg_gen_movi_tl(cpu_ca, 0); 6093 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6094 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 6095 tcg_gen_movi_tl(cpu_ca, 1); 6096 gen_set_label(l1); 6097 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 6098 tcg_temp_free(t0); 6099 tcg_temp_free(t1); 6100 if (unlikely(Rc(ctx->opcode) != 0)) { 6101 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6102 } 6103 } 6104 6105 /* sraq - sraq. */ 6106 static void gen_sraq(DisasContext *ctx) 6107 { 6108 TCGLabel *l1 = gen_new_label(); 6109 TCGLabel *l2 = gen_new_label(); 6110 TCGv t0 = tcg_temp_new(); 6111 TCGv t1 = tcg_temp_local_new(); 6112 TCGv t2 = tcg_temp_local_new(); 6113 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6114 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6115 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 6116 tcg_gen_subfi_tl(t2, 32, t2); 6117 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 6118 tcg_gen_or_tl(t0, t0, t2); 6119 gen_store_spr(SPR_MQ, t0); 6120 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6121 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 6122 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 6123 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 6124 gen_set_label(l1); 6125 tcg_temp_free(t0); 6126 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 6127 tcg_gen_movi_tl(cpu_ca, 0); 6128 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 6129 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 6130 tcg_gen_movi_tl(cpu_ca, 1); 6131 gen_set_label(l2); 6132 tcg_temp_free(t1); 6133 tcg_temp_free(t2); 6134 if (unlikely(Rc(ctx->opcode) != 0)) { 6135 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6136 } 6137 } 6138 6139 /* sre - sre. */ 6140 static void gen_sre(DisasContext *ctx) 6141 { 6142 TCGv t0 = tcg_temp_new(); 6143 TCGv t1 = tcg_temp_new(); 6144 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6145 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6146 tcg_gen_subfi_tl(t1, 32, t1); 6147 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6148 tcg_gen_or_tl(t1, t0, t1); 6149 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6150 gen_store_spr(SPR_MQ, t1); 6151 tcg_temp_free(t0); 6152 tcg_temp_free(t1); 6153 if (unlikely(Rc(ctx->opcode) != 0)) { 6154 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6155 } 6156 } 6157 6158 /* srea - srea. */ 6159 static void gen_srea(DisasContext *ctx) 6160 { 6161 TCGv t0 = tcg_temp_new(); 6162 TCGv t1 = tcg_temp_new(); 6163 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6164 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6165 gen_store_spr(SPR_MQ, t0); 6166 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 6167 tcg_temp_free(t0); 6168 tcg_temp_free(t1); 6169 if (unlikely(Rc(ctx->opcode) != 0)) { 6170 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6171 } 6172 } 6173 6174 /* sreq */ 6175 static void gen_sreq(DisasContext *ctx) 6176 { 6177 TCGv t0 = tcg_temp_new(); 6178 TCGv t1 = tcg_temp_new(); 6179 TCGv t2 = tcg_temp_new(); 6180 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6181 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6182 tcg_gen_shr_tl(t1, t1, t0); 6183 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6184 gen_load_spr(t2, SPR_MQ); 6185 gen_store_spr(SPR_MQ, t0); 6186 tcg_gen_and_tl(t0, t0, t1); 6187 tcg_gen_andc_tl(t2, t2, t1); 6188 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6189 tcg_temp_free(t0); 6190 tcg_temp_free(t1); 6191 tcg_temp_free(t2); 6192 if (unlikely(Rc(ctx->opcode) != 0)) { 6193 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6194 } 6195 } 6196 6197 /* sriq */ 6198 static void gen_sriq(DisasContext *ctx) 6199 { 6200 int sh = SH(ctx->opcode); 6201 TCGv t0 = tcg_temp_new(); 6202 TCGv t1 = tcg_temp_new(); 6203 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6204 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6205 tcg_gen_or_tl(t1, t0, t1); 6206 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6207 gen_store_spr(SPR_MQ, t1); 6208 tcg_temp_free(t0); 6209 tcg_temp_free(t1); 6210 if (unlikely(Rc(ctx->opcode) != 0)) { 6211 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6212 } 6213 } 6214 6215 /* srliq */ 6216 static void gen_srliq(DisasContext *ctx) 6217 { 6218 int sh = SH(ctx->opcode); 6219 TCGv t0 = tcg_temp_new(); 6220 TCGv t1 = tcg_temp_new(); 6221 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6222 gen_load_spr(t1, SPR_MQ); 6223 gen_store_spr(SPR_MQ, t0); 6224 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 6225 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 6226 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6227 tcg_temp_free(t0); 6228 tcg_temp_free(t1); 6229 if (unlikely(Rc(ctx->opcode) != 0)) { 6230 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6231 } 6232 } 6233 6234 /* srlq */ 6235 static void gen_srlq(DisasContext *ctx) 6236 { 6237 TCGLabel *l1 = gen_new_label(); 6238 TCGLabel *l2 = gen_new_label(); 6239 TCGv t0 = tcg_temp_local_new(); 6240 TCGv t1 = tcg_temp_local_new(); 6241 TCGv t2 = tcg_temp_local_new(); 6242 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6243 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6244 tcg_gen_shr_tl(t2, t1, t2); 6245 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6246 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6247 gen_load_spr(t0, SPR_MQ); 6248 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6249 tcg_gen_br(l2); 6250 gen_set_label(l1); 6251 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6252 tcg_gen_and_tl(t0, t0, t2); 6253 gen_load_spr(t1, SPR_MQ); 6254 tcg_gen_andc_tl(t1, t1, t2); 6255 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6256 gen_set_label(l2); 6257 tcg_temp_free(t0); 6258 tcg_temp_free(t1); 6259 tcg_temp_free(t2); 6260 if (unlikely(Rc(ctx->opcode) != 0)) { 6261 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6262 } 6263 } 6264 6265 /* srq */ 6266 static void gen_srq(DisasContext *ctx) 6267 { 6268 TCGLabel *l1 = gen_new_label(); 6269 TCGv t0 = tcg_temp_new(); 6270 TCGv t1 = tcg_temp_new(); 6271 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6272 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6273 tcg_gen_subfi_tl(t1, 32, t1); 6274 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6275 tcg_gen_or_tl(t1, t0, t1); 6276 gen_store_spr(SPR_MQ, t1); 6277 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6278 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6279 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6280 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6281 gen_set_label(l1); 6282 tcg_temp_free(t0); 6283 tcg_temp_free(t1); 6284 if (unlikely(Rc(ctx->opcode) != 0)) { 6285 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6286 } 6287 } 6288 6289 /* PowerPC 602 specific instructions */ 6290 6291 /* dsa */ 6292 static void gen_dsa(DisasContext *ctx) 6293 { 6294 /* XXX: TODO */ 6295 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6296 } 6297 6298 /* esa */ 6299 static void gen_esa(DisasContext *ctx) 6300 { 6301 /* XXX: TODO */ 6302 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6303 } 6304 6305 /* mfrom */ 6306 static void gen_mfrom(DisasContext *ctx) 6307 { 6308 #if defined(CONFIG_USER_ONLY) 6309 GEN_PRIV; 6310 #else 6311 CHK_SV; 6312 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 6313 #endif /* defined(CONFIG_USER_ONLY) */ 6314 } 6315 6316 /* 602 - 603 - G2 TLB management */ 6317 6318 /* tlbld */ 6319 static void gen_tlbld_6xx(DisasContext *ctx) 6320 { 6321 #if defined(CONFIG_USER_ONLY) 6322 GEN_PRIV; 6323 #else 6324 CHK_SV; 6325 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6326 #endif /* defined(CONFIG_USER_ONLY) */ 6327 } 6328 6329 /* tlbli */ 6330 static void gen_tlbli_6xx(DisasContext *ctx) 6331 { 6332 #if defined(CONFIG_USER_ONLY) 6333 GEN_PRIV; 6334 #else 6335 CHK_SV; 6336 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6337 #endif /* defined(CONFIG_USER_ONLY) */ 6338 } 6339 6340 /* POWER instructions not in PowerPC 601 */ 6341 6342 /* clf */ 6343 static void gen_clf(DisasContext *ctx) 6344 { 6345 /* Cache line flush: implemented as no-op */ 6346 } 6347 6348 /* cli */ 6349 static void gen_cli(DisasContext *ctx) 6350 { 6351 #if defined(CONFIG_USER_ONLY) 6352 GEN_PRIV; 6353 #else 6354 /* Cache line invalidate: privileged and treated as no-op */ 6355 CHK_SV; 6356 #endif /* defined(CONFIG_USER_ONLY) */ 6357 } 6358 6359 /* dclst */ 6360 static void gen_dclst(DisasContext *ctx) 6361 { 6362 /* Data cache line store: treated as no-op */ 6363 } 6364 6365 static void gen_mfsri(DisasContext *ctx) 6366 { 6367 #if defined(CONFIG_USER_ONLY) 6368 GEN_PRIV; 6369 #else 6370 int ra = rA(ctx->opcode); 6371 int rd = rD(ctx->opcode); 6372 TCGv t0; 6373 6374 CHK_SV; 6375 t0 = tcg_temp_new(); 6376 gen_addr_reg_index(ctx, t0); 6377 tcg_gen_extract_tl(t0, t0, 28, 4); 6378 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 6379 tcg_temp_free(t0); 6380 if (ra != 0 && ra != rd) { 6381 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 6382 } 6383 #endif /* defined(CONFIG_USER_ONLY) */ 6384 } 6385 6386 static void gen_rac(DisasContext *ctx) 6387 { 6388 #if defined(CONFIG_USER_ONLY) 6389 GEN_PRIV; 6390 #else 6391 TCGv t0; 6392 6393 CHK_SV; 6394 t0 = tcg_temp_new(); 6395 gen_addr_reg_index(ctx, t0); 6396 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6397 tcg_temp_free(t0); 6398 #endif /* defined(CONFIG_USER_ONLY) */ 6399 } 6400 6401 static void gen_rfsvc(DisasContext *ctx) 6402 { 6403 #if defined(CONFIG_USER_ONLY) 6404 GEN_PRIV; 6405 #else 6406 CHK_SV; 6407 6408 gen_helper_rfsvc(cpu_env); 6409 ctx->base.is_jmp = DISAS_EXIT; 6410 #endif /* defined(CONFIG_USER_ONLY) */ 6411 } 6412 6413 /* svc is not implemented for now */ 6414 6415 /* BookE specific instructions */ 6416 6417 /* XXX: not implemented on 440 ? */ 6418 static void gen_mfapidi(DisasContext *ctx) 6419 { 6420 /* XXX: TODO */ 6421 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6422 } 6423 6424 /* XXX: not implemented on 440 ? */ 6425 static void gen_tlbiva(DisasContext *ctx) 6426 { 6427 #if defined(CONFIG_USER_ONLY) 6428 GEN_PRIV; 6429 #else 6430 TCGv t0; 6431 6432 CHK_SV; 6433 t0 = tcg_temp_new(); 6434 gen_addr_reg_index(ctx, t0); 6435 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6436 tcg_temp_free(t0); 6437 #endif /* defined(CONFIG_USER_ONLY) */ 6438 } 6439 6440 /* All 405 MAC instructions are translated here */ 6441 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 6442 int ra, int rb, int rt, int Rc) 6443 { 6444 TCGv t0, t1; 6445 6446 t0 = tcg_temp_local_new(); 6447 t1 = tcg_temp_local_new(); 6448 6449 switch (opc3 & 0x0D) { 6450 case 0x05: 6451 /* macchw - macchw. - macchwo - macchwo. */ 6452 /* macchws - macchws. - macchwso - macchwso. */ 6453 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 6454 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 6455 /* mulchw - mulchw. */ 6456 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6457 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6458 tcg_gen_ext16s_tl(t1, t1); 6459 break; 6460 case 0x04: 6461 /* macchwu - macchwu. - macchwuo - macchwuo. */ 6462 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 6463 /* mulchwu - mulchwu. */ 6464 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6465 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6466 tcg_gen_ext16u_tl(t1, t1); 6467 break; 6468 case 0x01: 6469 /* machhw - machhw. - machhwo - machhwo. */ 6470 /* machhws - machhws. - machhwso - machhwso. */ 6471 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 6472 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 6473 /* mulhhw - mulhhw. */ 6474 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 6475 tcg_gen_ext16s_tl(t0, t0); 6476 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6477 tcg_gen_ext16s_tl(t1, t1); 6478 break; 6479 case 0x00: 6480 /* machhwu - machhwu. - machhwuo - machhwuo. */ 6481 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 6482 /* mulhhwu - mulhhwu. */ 6483 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 6484 tcg_gen_ext16u_tl(t0, t0); 6485 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6486 tcg_gen_ext16u_tl(t1, t1); 6487 break; 6488 case 0x0D: 6489 /* maclhw - maclhw. - maclhwo - maclhwo. */ 6490 /* maclhws - maclhws. - maclhwso - maclhwso. */ 6491 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 6492 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 6493 /* mullhw - mullhw. */ 6494 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6495 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 6496 break; 6497 case 0x0C: 6498 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 6499 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 6500 /* mullhwu - mullhwu. */ 6501 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6502 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 6503 break; 6504 } 6505 if (opc2 & 0x04) { 6506 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 6507 tcg_gen_mul_tl(t1, t0, t1); 6508 if (opc2 & 0x02) { 6509 /* nmultiply-and-accumulate (0x0E) */ 6510 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 6511 } else { 6512 /* multiply-and-accumulate (0x0C) */ 6513 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 6514 } 6515 6516 if (opc3 & 0x12) { 6517 /* Check overflow and/or saturate */ 6518 TCGLabel *l1 = gen_new_label(); 6519 6520 if (opc3 & 0x10) { 6521 /* Start with XER OV disabled, the most likely case */ 6522 tcg_gen_movi_tl(cpu_ov, 0); 6523 } 6524 if (opc3 & 0x01) { 6525 /* Signed */ 6526 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 6527 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 6528 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 6529 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 6530 if (opc3 & 0x02) { 6531 /* Saturate */ 6532 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 6533 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 6534 } 6535 } else { 6536 /* Unsigned */ 6537 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 6538 if (opc3 & 0x02) { 6539 /* Saturate */ 6540 tcg_gen_movi_tl(t0, UINT32_MAX); 6541 } 6542 } 6543 if (opc3 & 0x10) { 6544 /* Check overflow */ 6545 tcg_gen_movi_tl(cpu_ov, 1); 6546 tcg_gen_movi_tl(cpu_so, 1); 6547 } 6548 gen_set_label(l1); 6549 tcg_gen_mov_tl(cpu_gpr[rt], t0); 6550 } 6551 } else { 6552 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 6553 } 6554 tcg_temp_free(t0); 6555 tcg_temp_free(t1); 6556 if (unlikely(Rc) != 0) { 6557 /* Update Rc0 */ 6558 gen_set_Rc0(ctx, cpu_gpr[rt]); 6559 } 6560 } 6561 6562 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6563 static void glue(gen_, name)(DisasContext *ctx) \ 6564 { \ 6565 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 6566 rD(ctx->opcode), Rc(ctx->opcode)); \ 6567 } 6568 6569 /* macchw - macchw. */ 6570 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 6571 /* macchwo - macchwo. */ 6572 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 6573 /* macchws - macchws. */ 6574 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 6575 /* macchwso - macchwso. */ 6576 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 6577 /* macchwsu - macchwsu. */ 6578 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 6579 /* macchwsuo - macchwsuo. */ 6580 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 6581 /* macchwu - macchwu. */ 6582 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 6583 /* macchwuo - macchwuo. */ 6584 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 6585 /* machhw - machhw. */ 6586 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 6587 /* machhwo - machhwo. */ 6588 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 6589 /* machhws - machhws. */ 6590 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 6591 /* machhwso - machhwso. */ 6592 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 6593 /* machhwsu - machhwsu. */ 6594 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 6595 /* machhwsuo - machhwsuo. */ 6596 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 6597 /* machhwu - machhwu. */ 6598 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 6599 /* machhwuo - machhwuo. */ 6600 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 6601 /* maclhw - maclhw. */ 6602 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 6603 /* maclhwo - maclhwo. */ 6604 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 6605 /* maclhws - maclhws. */ 6606 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 6607 /* maclhwso - maclhwso. */ 6608 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 6609 /* maclhwu - maclhwu. */ 6610 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 6611 /* maclhwuo - maclhwuo. */ 6612 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 6613 /* maclhwsu - maclhwsu. */ 6614 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 6615 /* maclhwsuo - maclhwsuo. */ 6616 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 6617 /* nmacchw - nmacchw. */ 6618 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 6619 /* nmacchwo - nmacchwo. */ 6620 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 6621 /* nmacchws - nmacchws. */ 6622 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 6623 /* nmacchwso - nmacchwso. */ 6624 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 6625 /* nmachhw - nmachhw. */ 6626 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 6627 /* nmachhwo - nmachhwo. */ 6628 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 6629 /* nmachhws - nmachhws. */ 6630 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 6631 /* nmachhwso - nmachhwso. */ 6632 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 6633 /* nmaclhw - nmaclhw. */ 6634 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 6635 /* nmaclhwo - nmaclhwo. */ 6636 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 6637 /* nmaclhws - nmaclhws. */ 6638 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 6639 /* nmaclhwso - nmaclhwso. */ 6640 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 6641 6642 /* mulchw - mulchw. */ 6643 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 6644 /* mulchwu - mulchwu. */ 6645 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 6646 /* mulhhw - mulhhw. */ 6647 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 6648 /* mulhhwu - mulhhwu. */ 6649 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 6650 /* mullhw - mullhw. */ 6651 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 6652 /* mullhwu - mullhwu. */ 6653 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 6654 6655 /* mfdcr */ 6656 static void gen_mfdcr(DisasContext *ctx) 6657 { 6658 #if defined(CONFIG_USER_ONLY) 6659 GEN_PRIV; 6660 #else 6661 TCGv dcrn; 6662 6663 CHK_SV; 6664 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6665 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6666 tcg_temp_free(dcrn); 6667 #endif /* defined(CONFIG_USER_ONLY) */ 6668 } 6669 6670 /* mtdcr */ 6671 static void gen_mtdcr(DisasContext *ctx) 6672 { 6673 #if defined(CONFIG_USER_ONLY) 6674 GEN_PRIV; 6675 #else 6676 TCGv dcrn; 6677 6678 CHK_SV; 6679 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6680 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6681 tcg_temp_free(dcrn); 6682 #endif /* defined(CONFIG_USER_ONLY) */ 6683 } 6684 6685 /* mfdcrx */ 6686 /* XXX: not implemented on 440 ? */ 6687 static void gen_mfdcrx(DisasContext *ctx) 6688 { 6689 #if defined(CONFIG_USER_ONLY) 6690 GEN_PRIV; 6691 #else 6692 CHK_SV; 6693 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6694 cpu_gpr[rA(ctx->opcode)]); 6695 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6696 #endif /* defined(CONFIG_USER_ONLY) */ 6697 } 6698 6699 /* mtdcrx */ 6700 /* XXX: not implemented on 440 ? */ 6701 static void gen_mtdcrx(DisasContext *ctx) 6702 { 6703 #if defined(CONFIG_USER_ONLY) 6704 GEN_PRIV; 6705 #else 6706 CHK_SV; 6707 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6708 cpu_gpr[rS(ctx->opcode)]); 6709 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6710 #endif /* defined(CONFIG_USER_ONLY) */ 6711 } 6712 6713 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6714 static void gen_mfdcrux(DisasContext *ctx) 6715 { 6716 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6717 cpu_gpr[rA(ctx->opcode)]); 6718 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6719 } 6720 6721 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6722 static void gen_mtdcrux(DisasContext *ctx) 6723 { 6724 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6725 cpu_gpr[rS(ctx->opcode)]); 6726 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6727 } 6728 6729 /* dccci */ 6730 static void gen_dccci(DisasContext *ctx) 6731 { 6732 CHK_SV; 6733 /* interpreted as no-op */ 6734 } 6735 6736 /* dcread */ 6737 static void gen_dcread(DisasContext *ctx) 6738 { 6739 #if defined(CONFIG_USER_ONLY) 6740 GEN_PRIV; 6741 #else 6742 TCGv EA, val; 6743 6744 CHK_SV; 6745 gen_set_access_type(ctx, ACCESS_CACHE); 6746 EA = tcg_temp_new(); 6747 gen_addr_reg_index(ctx, EA); 6748 val = tcg_temp_new(); 6749 gen_qemu_ld32u(ctx, val, EA); 6750 tcg_temp_free(val); 6751 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6752 tcg_temp_free(EA); 6753 #endif /* defined(CONFIG_USER_ONLY) */ 6754 } 6755 6756 /* icbt */ 6757 static void gen_icbt_40x(DisasContext *ctx) 6758 { 6759 /* 6760 * interpreted as no-op 6761 * XXX: specification say this is treated as a load by the MMU but 6762 * does not generate any exception 6763 */ 6764 } 6765 6766 /* iccci */ 6767 static void gen_iccci(DisasContext *ctx) 6768 { 6769 CHK_SV; 6770 /* interpreted as no-op */ 6771 } 6772 6773 /* icread */ 6774 static void gen_icread(DisasContext *ctx) 6775 { 6776 CHK_SV; 6777 /* interpreted as no-op */ 6778 } 6779 6780 /* rfci (supervisor only) */ 6781 static void gen_rfci_40x(DisasContext *ctx) 6782 { 6783 #if defined(CONFIG_USER_ONLY) 6784 GEN_PRIV; 6785 #else 6786 CHK_SV; 6787 /* Restore CPU state */ 6788 gen_helper_40x_rfci(cpu_env); 6789 ctx->base.is_jmp = DISAS_EXIT; 6790 #endif /* defined(CONFIG_USER_ONLY) */ 6791 } 6792 6793 static void gen_rfci(DisasContext *ctx) 6794 { 6795 #if defined(CONFIG_USER_ONLY) 6796 GEN_PRIV; 6797 #else 6798 CHK_SV; 6799 /* Restore CPU state */ 6800 gen_helper_rfci(cpu_env); 6801 ctx->base.is_jmp = DISAS_EXIT; 6802 #endif /* defined(CONFIG_USER_ONLY) */ 6803 } 6804 6805 /* BookE specific */ 6806 6807 /* XXX: not implemented on 440 ? */ 6808 static void gen_rfdi(DisasContext *ctx) 6809 { 6810 #if defined(CONFIG_USER_ONLY) 6811 GEN_PRIV; 6812 #else 6813 CHK_SV; 6814 /* Restore CPU state */ 6815 gen_helper_rfdi(cpu_env); 6816 ctx->base.is_jmp = DISAS_EXIT; 6817 #endif /* defined(CONFIG_USER_ONLY) */ 6818 } 6819 6820 /* XXX: not implemented on 440 ? */ 6821 static void gen_rfmci(DisasContext *ctx) 6822 { 6823 #if defined(CONFIG_USER_ONLY) 6824 GEN_PRIV; 6825 #else 6826 CHK_SV; 6827 /* Restore CPU state */ 6828 gen_helper_rfmci(cpu_env); 6829 ctx->base.is_jmp = DISAS_EXIT; 6830 #endif /* defined(CONFIG_USER_ONLY) */ 6831 } 6832 6833 /* TLB management - PowerPC 405 implementation */ 6834 6835 /* tlbre */ 6836 static void gen_tlbre_40x(DisasContext *ctx) 6837 { 6838 #if defined(CONFIG_USER_ONLY) 6839 GEN_PRIV; 6840 #else 6841 CHK_SV; 6842 switch (rB(ctx->opcode)) { 6843 case 0: 6844 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6845 cpu_gpr[rA(ctx->opcode)]); 6846 break; 6847 case 1: 6848 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6849 cpu_gpr[rA(ctx->opcode)]); 6850 break; 6851 default: 6852 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6853 break; 6854 } 6855 #endif /* defined(CONFIG_USER_ONLY) */ 6856 } 6857 6858 /* tlbsx - tlbsx. */ 6859 static void gen_tlbsx_40x(DisasContext *ctx) 6860 { 6861 #if defined(CONFIG_USER_ONLY) 6862 GEN_PRIV; 6863 #else 6864 TCGv t0; 6865 6866 CHK_SV; 6867 t0 = tcg_temp_new(); 6868 gen_addr_reg_index(ctx, t0); 6869 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6870 tcg_temp_free(t0); 6871 if (Rc(ctx->opcode)) { 6872 TCGLabel *l1 = gen_new_label(); 6873 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6874 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6875 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6876 gen_set_label(l1); 6877 } 6878 #endif /* defined(CONFIG_USER_ONLY) */ 6879 } 6880 6881 /* tlbwe */ 6882 static void gen_tlbwe_40x(DisasContext *ctx) 6883 { 6884 #if defined(CONFIG_USER_ONLY) 6885 GEN_PRIV; 6886 #else 6887 CHK_SV; 6888 6889 switch (rB(ctx->opcode)) { 6890 case 0: 6891 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6892 cpu_gpr[rS(ctx->opcode)]); 6893 break; 6894 case 1: 6895 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6896 cpu_gpr[rS(ctx->opcode)]); 6897 break; 6898 default: 6899 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6900 break; 6901 } 6902 #endif /* defined(CONFIG_USER_ONLY) */ 6903 } 6904 6905 /* TLB management - PowerPC 440 implementation */ 6906 6907 /* tlbre */ 6908 static void gen_tlbre_440(DisasContext *ctx) 6909 { 6910 #if defined(CONFIG_USER_ONLY) 6911 GEN_PRIV; 6912 #else 6913 CHK_SV; 6914 6915 switch (rB(ctx->opcode)) { 6916 case 0: 6917 case 1: 6918 case 2: 6919 { 6920 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6921 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6922 t0, cpu_gpr[rA(ctx->opcode)]); 6923 tcg_temp_free_i32(t0); 6924 } 6925 break; 6926 default: 6927 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6928 break; 6929 } 6930 #endif /* defined(CONFIG_USER_ONLY) */ 6931 } 6932 6933 /* tlbsx - tlbsx. */ 6934 static void gen_tlbsx_440(DisasContext *ctx) 6935 { 6936 #if defined(CONFIG_USER_ONLY) 6937 GEN_PRIV; 6938 #else 6939 TCGv t0; 6940 6941 CHK_SV; 6942 t0 = tcg_temp_new(); 6943 gen_addr_reg_index(ctx, t0); 6944 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6945 tcg_temp_free(t0); 6946 if (Rc(ctx->opcode)) { 6947 TCGLabel *l1 = gen_new_label(); 6948 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6949 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6950 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6951 gen_set_label(l1); 6952 } 6953 #endif /* defined(CONFIG_USER_ONLY) */ 6954 } 6955 6956 /* tlbwe */ 6957 static void gen_tlbwe_440(DisasContext *ctx) 6958 { 6959 #if defined(CONFIG_USER_ONLY) 6960 GEN_PRIV; 6961 #else 6962 CHK_SV; 6963 switch (rB(ctx->opcode)) { 6964 case 0: 6965 case 1: 6966 case 2: 6967 { 6968 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6969 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6970 cpu_gpr[rS(ctx->opcode)]); 6971 tcg_temp_free_i32(t0); 6972 } 6973 break; 6974 default: 6975 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6976 break; 6977 } 6978 #endif /* defined(CONFIG_USER_ONLY) */ 6979 } 6980 6981 /* TLB management - PowerPC BookE 2.06 implementation */ 6982 6983 /* tlbre */ 6984 static void gen_tlbre_booke206(DisasContext *ctx) 6985 { 6986 #if defined(CONFIG_USER_ONLY) 6987 GEN_PRIV; 6988 #else 6989 CHK_SV; 6990 gen_helper_booke206_tlbre(cpu_env); 6991 #endif /* defined(CONFIG_USER_ONLY) */ 6992 } 6993 6994 /* tlbsx - tlbsx. */ 6995 static void gen_tlbsx_booke206(DisasContext *ctx) 6996 { 6997 #if defined(CONFIG_USER_ONLY) 6998 GEN_PRIV; 6999 #else 7000 TCGv t0; 7001 7002 CHK_SV; 7003 if (rA(ctx->opcode)) { 7004 t0 = tcg_temp_new(); 7005 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 7006 } else { 7007 t0 = tcg_const_tl(0); 7008 } 7009 7010 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 7011 gen_helper_booke206_tlbsx(cpu_env, t0); 7012 tcg_temp_free(t0); 7013 #endif /* defined(CONFIG_USER_ONLY) */ 7014 } 7015 7016 /* tlbwe */ 7017 static void gen_tlbwe_booke206(DisasContext *ctx) 7018 { 7019 #if defined(CONFIG_USER_ONLY) 7020 GEN_PRIV; 7021 #else 7022 CHK_SV; 7023 gen_helper_booke206_tlbwe(cpu_env); 7024 #endif /* defined(CONFIG_USER_ONLY) */ 7025 } 7026 7027 static void gen_tlbivax_booke206(DisasContext *ctx) 7028 { 7029 #if defined(CONFIG_USER_ONLY) 7030 GEN_PRIV; 7031 #else 7032 TCGv t0; 7033 7034 CHK_SV; 7035 t0 = tcg_temp_new(); 7036 gen_addr_reg_index(ctx, t0); 7037 gen_helper_booke206_tlbivax(cpu_env, t0); 7038 tcg_temp_free(t0); 7039 #endif /* defined(CONFIG_USER_ONLY) */ 7040 } 7041 7042 static void gen_tlbilx_booke206(DisasContext *ctx) 7043 { 7044 #if defined(CONFIG_USER_ONLY) 7045 GEN_PRIV; 7046 #else 7047 TCGv t0; 7048 7049 CHK_SV; 7050 t0 = tcg_temp_new(); 7051 gen_addr_reg_index(ctx, t0); 7052 7053 switch ((ctx->opcode >> 21) & 0x3) { 7054 case 0: 7055 gen_helper_booke206_tlbilx0(cpu_env, t0); 7056 break; 7057 case 1: 7058 gen_helper_booke206_tlbilx1(cpu_env, t0); 7059 break; 7060 case 3: 7061 gen_helper_booke206_tlbilx3(cpu_env, t0); 7062 break; 7063 default: 7064 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7065 break; 7066 } 7067 7068 tcg_temp_free(t0); 7069 #endif /* defined(CONFIG_USER_ONLY) */ 7070 } 7071 7072 7073 /* wrtee */ 7074 static void gen_wrtee(DisasContext *ctx) 7075 { 7076 #if defined(CONFIG_USER_ONLY) 7077 GEN_PRIV; 7078 #else 7079 TCGv t0; 7080 7081 CHK_SV; 7082 t0 = tcg_temp_new(); 7083 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 7084 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7085 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 7086 tcg_temp_free(t0); 7087 /* 7088 * Stop translation to have a chance to raise an exception if we 7089 * just set msr_ee to 1 7090 */ 7091 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7092 #endif /* defined(CONFIG_USER_ONLY) */ 7093 } 7094 7095 /* wrteei */ 7096 static void gen_wrteei(DisasContext *ctx) 7097 { 7098 #if defined(CONFIG_USER_ONLY) 7099 GEN_PRIV; 7100 #else 7101 CHK_SV; 7102 if (ctx->opcode & 0x00008000) { 7103 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 7104 /* Stop translation to have a chance to raise an exception */ 7105 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7106 } else { 7107 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7108 } 7109 #endif /* defined(CONFIG_USER_ONLY) */ 7110 } 7111 7112 /* PowerPC 440 specific instructions */ 7113 7114 /* dlmzb */ 7115 static void gen_dlmzb(DisasContext *ctx) 7116 { 7117 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 7118 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 7119 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 7120 tcg_temp_free_i32(t0); 7121 } 7122 7123 /* mbar replaces eieio on 440 */ 7124 static void gen_mbar(DisasContext *ctx) 7125 { 7126 /* interpreted as no-op */ 7127 } 7128 7129 /* msync replaces sync on 440 */ 7130 static void gen_msync_4xx(DisasContext *ctx) 7131 { 7132 /* Only e500 seems to treat reserved bits as invalid */ 7133 if ((ctx->insns_flags2 & PPC2_BOOKE206) && 7134 (ctx->opcode & 0x03FFF801)) { 7135 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7136 } 7137 /* otherwise interpreted as no-op */ 7138 } 7139 7140 /* icbt */ 7141 static void gen_icbt_440(DisasContext *ctx) 7142 { 7143 /* 7144 * interpreted as no-op 7145 * XXX: specification say this is treated as a load by the MMU but 7146 * does not generate any exception 7147 */ 7148 } 7149 7150 /* Embedded.Processor Control */ 7151 7152 static void gen_msgclr(DisasContext *ctx) 7153 { 7154 #if defined(CONFIG_USER_ONLY) 7155 GEN_PRIV; 7156 #else 7157 CHK_HV; 7158 if (is_book3s_arch2x(ctx)) { 7159 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7160 } else { 7161 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7162 } 7163 #endif /* defined(CONFIG_USER_ONLY) */ 7164 } 7165 7166 static void gen_msgsnd(DisasContext *ctx) 7167 { 7168 #if defined(CONFIG_USER_ONLY) 7169 GEN_PRIV; 7170 #else 7171 CHK_HV; 7172 if (is_book3s_arch2x(ctx)) { 7173 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7174 } else { 7175 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7176 } 7177 #endif /* defined(CONFIG_USER_ONLY) */ 7178 } 7179 7180 #if defined(TARGET_PPC64) 7181 static void gen_msgclrp(DisasContext *ctx) 7182 { 7183 #if defined(CONFIG_USER_ONLY) 7184 GEN_PRIV; 7185 #else 7186 CHK_SV; 7187 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7188 #endif /* defined(CONFIG_USER_ONLY) */ 7189 } 7190 7191 static void gen_msgsndp(DisasContext *ctx) 7192 { 7193 #if defined(CONFIG_USER_ONLY) 7194 GEN_PRIV; 7195 #else 7196 CHK_SV; 7197 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7198 #endif /* defined(CONFIG_USER_ONLY) */ 7199 } 7200 #endif 7201 7202 static void gen_msgsync(DisasContext *ctx) 7203 { 7204 #if defined(CONFIG_USER_ONLY) 7205 GEN_PRIV; 7206 #else 7207 CHK_HV; 7208 #endif /* defined(CONFIG_USER_ONLY) */ 7209 /* interpreted as no-op */ 7210 } 7211 7212 #if defined(TARGET_PPC64) 7213 static void gen_maddld(DisasContext *ctx) 7214 { 7215 TCGv_i64 t1 = tcg_temp_new_i64(); 7216 7217 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 7218 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 7219 tcg_temp_free_i64(t1); 7220 } 7221 7222 /* maddhd maddhdu */ 7223 static void gen_maddhd_maddhdu(DisasContext *ctx) 7224 { 7225 TCGv_i64 lo = tcg_temp_new_i64(); 7226 TCGv_i64 hi = tcg_temp_new_i64(); 7227 TCGv_i64 t1 = tcg_temp_new_i64(); 7228 7229 if (Rc(ctx->opcode)) { 7230 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7231 cpu_gpr[rB(ctx->opcode)]); 7232 tcg_gen_movi_i64(t1, 0); 7233 } else { 7234 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7235 cpu_gpr[rB(ctx->opcode)]); 7236 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 7237 } 7238 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 7239 cpu_gpr[rC(ctx->opcode)], t1); 7240 tcg_temp_free_i64(lo); 7241 tcg_temp_free_i64(hi); 7242 tcg_temp_free_i64(t1); 7243 } 7244 #endif /* defined(TARGET_PPC64) */ 7245 7246 static void gen_tbegin(DisasContext *ctx) 7247 { 7248 if (unlikely(!ctx->tm_enabled)) { 7249 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7250 return; 7251 } 7252 gen_helper_tbegin(cpu_env); 7253 } 7254 7255 #define GEN_TM_NOOP(name) \ 7256 static inline void gen_##name(DisasContext *ctx) \ 7257 { \ 7258 if (unlikely(!ctx->tm_enabled)) { \ 7259 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7260 return; \ 7261 } \ 7262 /* \ 7263 * Because tbegin always fails in QEMU, these user \ 7264 * space instructions all have a simple implementation: \ 7265 * \ 7266 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7267 * = 0b0 || 0b00 || 0b0 \ 7268 */ \ 7269 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7270 } 7271 7272 GEN_TM_NOOP(tend); 7273 GEN_TM_NOOP(tabort); 7274 GEN_TM_NOOP(tabortwc); 7275 GEN_TM_NOOP(tabortwci); 7276 GEN_TM_NOOP(tabortdc); 7277 GEN_TM_NOOP(tabortdci); 7278 GEN_TM_NOOP(tsr); 7279 7280 static inline void gen_cp_abort(DisasContext *ctx) 7281 { 7282 /* Do Nothing */ 7283 } 7284 7285 #define GEN_CP_PASTE_NOOP(name) \ 7286 static inline void gen_##name(DisasContext *ctx) \ 7287 { \ 7288 /* \ 7289 * Generate invalid exception until we have an \ 7290 * implementation of the copy paste facility \ 7291 */ \ 7292 gen_invalid(ctx); \ 7293 } 7294 7295 GEN_CP_PASTE_NOOP(copy) 7296 GEN_CP_PASTE_NOOP(paste) 7297 7298 static void gen_tcheck(DisasContext *ctx) 7299 { 7300 if (unlikely(!ctx->tm_enabled)) { 7301 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7302 return; 7303 } 7304 /* 7305 * Because tbegin always fails, the tcheck implementation is 7306 * simple: 7307 * 7308 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 7309 * = 0b1 || 0b00 || 0b0 7310 */ 7311 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 7312 } 7313 7314 #if defined(CONFIG_USER_ONLY) 7315 #define GEN_TM_PRIV_NOOP(name) \ 7316 static inline void gen_##name(DisasContext *ctx) \ 7317 { \ 7318 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 7319 } 7320 7321 #else 7322 7323 #define GEN_TM_PRIV_NOOP(name) \ 7324 static inline void gen_##name(DisasContext *ctx) \ 7325 { \ 7326 CHK_SV; \ 7327 if (unlikely(!ctx->tm_enabled)) { \ 7328 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7329 return; \ 7330 } \ 7331 /* \ 7332 * Because tbegin always fails, the implementation is \ 7333 * simple: \ 7334 * \ 7335 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7336 * = 0b0 || 0b00 | 0b0 \ 7337 */ \ 7338 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7339 } 7340 7341 #endif 7342 7343 GEN_TM_PRIV_NOOP(treclaim); 7344 GEN_TM_PRIV_NOOP(trechkpt); 7345 7346 static inline void get_fpr(TCGv_i64 dst, int regno) 7347 { 7348 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno)); 7349 } 7350 7351 static inline void set_fpr(int regno, TCGv_i64 src) 7352 { 7353 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno)); 7354 } 7355 7356 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 7357 { 7358 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high)); 7359 } 7360 7361 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 7362 { 7363 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high)); 7364 } 7365 7366 /* 7367 * Helpers for decodetree used by !function for decoding arguments. 7368 */ 7369 static int times_2(DisasContext *ctx, int x) 7370 { 7371 return x * 2; 7372 } 7373 7374 static int times_4(DisasContext *ctx, int x) 7375 { 7376 return x * 4; 7377 } 7378 7379 static int times_16(DisasContext *ctx, int x) 7380 { 7381 return x * 16; 7382 } 7383 7384 /* 7385 * Helpers for trans_* functions to check for specific insns flags. 7386 * Use token pasting to ensure that we use the proper flag with the 7387 * proper variable. 7388 */ 7389 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \ 7390 do { \ 7391 if (((CTX)->insns_flags & PPC_##NAME) == 0) { \ 7392 return false; \ 7393 } \ 7394 } while (0) 7395 7396 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \ 7397 do { \ 7398 if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \ 7399 return false; \ 7400 } \ 7401 } while (0) 7402 7403 /* Then special-case the check for 64-bit so that we elide code for ppc32. */ 7404 #if TARGET_LONG_BITS == 32 7405 # define REQUIRE_64BIT(CTX) return false 7406 #else 7407 # define REQUIRE_64BIT(CTX) REQUIRE_INSNS_FLAGS(CTX, 64B) 7408 #endif 7409 7410 #define REQUIRE_VECTOR(CTX) \ 7411 do { \ 7412 if (unlikely(!(CTX)->altivec_enabled)) { \ 7413 gen_exception((CTX), POWERPC_EXCP_VPU); \ 7414 return true; \ 7415 } \ 7416 } while (0) 7417 7418 #define REQUIRE_VSX(CTX) \ 7419 do { \ 7420 if (unlikely(!(CTX)->vsx_enabled)) { \ 7421 gen_exception((CTX), POWERPC_EXCP_VSXU); \ 7422 return true; \ 7423 } \ 7424 } while (0) 7425 7426 #define REQUIRE_FPU(ctx) \ 7427 do { \ 7428 if (unlikely(!(ctx)->fpu_enabled)) { \ 7429 gen_exception((ctx), POWERPC_EXCP_FPU); \ 7430 return true; \ 7431 } \ 7432 } while (0) 7433 7434 /* 7435 * Helpers for implementing sets of trans_* functions. 7436 * Defer the implementation of NAME to FUNC, with optional extra arguments. 7437 */ 7438 #define TRANS(NAME, FUNC, ...) \ 7439 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7440 { return FUNC(ctx, a, __VA_ARGS__); } 7441 7442 #define TRANS64(NAME, FUNC, ...) \ 7443 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7444 { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); } 7445 7446 /* TODO: More TRANS* helpers for extra insn_flags checks. */ 7447 7448 7449 #include "decode-insn32.c.inc" 7450 #include "decode-insn64.c.inc" 7451 #include "power8-pmu-regs.c.inc" 7452 7453 /* 7454 * Incorporate CIA into the constant when R=1. 7455 * Validate that when R=1, RA=0. 7456 */ 7457 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a) 7458 { 7459 d->rt = a->rt; 7460 d->ra = a->ra; 7461 d->si = a->si; 7462 if (a->r) { 7463 if (unlikely(a->ra != 0)) { 7464 gen_invalid(ctx); 7465 return false; 7466 } 7467 d->si += ctx->cia; 7468 } 7469 return true; 7470 } 7471 7472 #include "translate/fixedpoint-impl.c.inc" 7473 7474 #include "translate/fp-impl.c.inc" 7475 7476 #include "translate/vmx-impl.c.inc" 7477 7478 #include "translate/vsx-impl.c.inc" 7479 7480 #include "translate/dfp-impl.c.inc" 7481 7482 #include "translate/spe-impl.c.inc" 7483 7484 #include "translate/branch-impl.c.inc" 7485 7486 /* Handles lfdp, lxsd, lxssp */ 7487 static void gen_dform39(DisasContext *ctx) 7488 { 7489 switch (ctx->opcode & 0x3) { 7490 case 0: /* lfdp */ 7491 if (ctx->insns_flags2 & PPC2_ISA205) { 7492 return gen_lfdp(ctx); 7493 } 7494 break; 7495 case 2: /* lxsd */ 7496 if (ctx->insns_flags2 & PPC2_ISA300) { 7497 return gen_lxsd(ctx); 7498 } 7499 break; 7500 case 3: /* lxssp */ 7501 if (ctx->insns_flags2 & PPC2_ISA300) { 7502 return gen_lxssp(ctx); 7503 } 7504 break; 7505 } 7506 return gen_invalid(ctx); 7507 } 7508 7509 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 7510 static void gen_dform3D(DisasContext *ctx) 7511 { 7512 if ((ctx->opcode & 3) != 1) { /* DS-FORM */ 7513 switch (ctx->opcode & 0x3) { 7514 case 0: /* stfdp */ 7515 if (ctx->insns_flags2 & PPC2_ISA205) { 7516 return gen_stfdp(ctx); 7517 } 7518 break; 7519 case 2: /* stxsd */ 7520 if (ctx->insns_flags2 & PPC2_ISA300) { 7521 return gen_stxsd(ctx); 7522 } 7523 break; 7524 case 3: /* stxssp */ 7525 if (ctx->insns_flags2 & PPC2_ISA300) { 7526 return gen_stxssp(ctx); 7527 } 7528 break; 7529 } 7530 } 7531 return gen_invalid(ctx); 7532 } 7533 7534 #if defined(TARGET_PPC64) 7535 /* brd */ 7536 static void gen_brd(DisasContext *ctx) 7537 { 7538 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7539 } 7540 7541 /* brw */ 7542 static void gen_brw(DisasContext *ctx) 7543 { 7544 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7545 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32); 7546 7547 } 7548 7549 /* brh */ 7550 static void gen_brh(DisasContext *ctx) 7551 { 7552 TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull); 7553 TCGv_i64 t1 = tcg_temp_new_i64(); 7554 TCGv_i64 t2 = tcg_temp_new_i64(); 7555 7556 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8); 7557 tcg_gen_and_i64(t2, t1, mask); 7558 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask); 7559 tcg_gen_shli_i64(t1, t1, 8); 7560 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2); 7561 7562 tcg_temp_free_i64(t1); 7563 tcg_temp_free_i64(t2); 7564 } 7565 #endif 7566 7567 static opcode_t opcodes[] = { 7568 #if defined(TARGET_PPC64) 7569 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310), 7570 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310), 7571 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310), 7572 #endif 7573 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 7574 #if defined(TARGET_PPC64) 7575 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 7576 #endif 7577 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 7578 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 7579 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 7580 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7581 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7582 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 7583 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 7584 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 7585 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 7586 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7587 #if defined(TARGET_PPC64) 7588 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 7589 #endif 7590 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 7591 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 7592 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7593 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7594 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7595 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 7596 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 7597 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 7598 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7599 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 7600 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 7601 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 7602 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7603 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7604 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7605 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7606 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 7607 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 7608 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 7609 #if defined(TARGET_PPC64) 7610 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 7611 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 7612 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 7613 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 7614 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 7615 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 7616 #endif 7617 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7618 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7619 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7620 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 7621 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 7622 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 7623 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 7624 #if defined(TARGET_PPC64) 7625 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 7626 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 7627 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 7628 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 7629 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 7630 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 7631 PPC_NONE, PPC2_ISA300), 7632 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 7633 PPC_NONE, PPC2_ISA300), 7634 #endif 7635 /* handles lfdp, lxsd, lxssp */ 7636 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7637 /* handles stfdp, stxsd, stxssp */ 7638 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7639 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7640 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7641 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 7642 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 7643 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 7644 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 7645 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 7646 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 7647 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7648 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7649 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 7650 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 7651 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 7652 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7653 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7654 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 7655 #if defined(TARGET_PPC64) 7656 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 7657 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 7658 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 7659 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 7660 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 7661 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 7662 #endif 7663 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 7664 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 7665 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 7666 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7667 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7668 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 7669 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 7670 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 7671 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 7672 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 7673 #if defined(TARGET_PPC64) 7674 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 7675 #if !defined(CONFIG_USER_ONLY) 7676 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7677 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7678 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7679 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300), 7680 #endif 7681 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7682 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7683 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7684 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7685 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7686 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 7687 #endif 7688 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7689 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW), 7690 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW), 7691 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 7692 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7693 #if defined(TARGET_PPC64) 7694 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 7695 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 7696 #endif 7697 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 7698 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 7699 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 7700 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 7701 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 7702 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 7703 #if defined(TARGET_PPC64) 7704 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 7705 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 7706 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 7707 #endif 7708 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 7709 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 7710 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 7711 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7712 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 7713 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 7714 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7715 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 7716 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7717 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 7718 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7719 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 7720 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 7721 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7722 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 7723 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 7724 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 7725 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 7726 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7727 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 7728 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 7729 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 7730 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 7731 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 7732 #if defined(TARGET_PPC64) 7733 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 7734 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 7735 PPC_SEGMENT_64B), 7736 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 7737 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 7738 PPC_SEGMENT_64B), 7739 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 7740 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 7741 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 7742 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 7743 #endif 7744 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 7745 /* 7746 * XXX Those instructions will need to be handled differently for 7747 * different ISA versions 7748 */ 7749 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 7750 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 7751 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 7752 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 7753 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 7754 #if defined(TARGET_PPC64) 7755 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 7756 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 7757 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 7758 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7759 #endif 7760 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 7761 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 7762 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 7763 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 7764 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 7765 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 7766 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 7767 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 7768 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 7769 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 7770 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 7771 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7772 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 7773 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 7774 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 7775 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 7776 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 7777 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 7778 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 7779 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7780 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 7781 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 7782 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 7783 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 7784 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 7785 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 7786 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 7787 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 7788 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 7789 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 7790 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 7791 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 7792 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 7793 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 7794 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 7795 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 7796 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 7797 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 7798 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 7799 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 7800 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 7801 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7802 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7803 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7804 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7805 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7806 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7807 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7808 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7809 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7810 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7811 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7812 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7813 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7814 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7815 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7816 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7817 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7818 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7819 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7820 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7821 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7822 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7823 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7824 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7825 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7826 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7827 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7828 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7829 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7830 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7831 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7832 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7833 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7834 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7835 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7836 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7837 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7838 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7839 PPC_NONE, PPC2_BOOKE206), 7840 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7841 PPC_NONE, PPC2_BOOKE206), 7842 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7843 PPC_NONE, PPC2_BOOKE206), 7844 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7845 PPC_NONE, PPC2_BOOKE206), 7846 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7847 PPC_NONE, PPC2_BOOKE206), 7848 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7849 PPC_NONE, PPC2_PRCNTL), 7850 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7851 PPC_NONE, PPC2_PRCNTL), 7852 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7853 PPC_NONE, PPC2_PRCNTL), 7854 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7855 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7856 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7857 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7858 PPC_BOOKE, PPC2_BOOKE206), 7859 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE), 7860 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7861 PPC_BOOKE, PPC2_BOOKE206), 7862 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7863 PPC_440_SPEC), 7864 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7865 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7866 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7867 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7868 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7869 #if defined(TARGET_PPC64) 7870 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7871 PPC2_ISA300), 7872 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7873 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001, 7874 PPC_NONE, PPC2_ISA207S), 7875 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001, 7876 PPC_NONE, PPC2_ISA207S), 7877 #endif 7878 7879 #undef GEN_INT_ARITH_ADD 7880 #undef GEN_INT_ARITH_ADD_CONST 7881 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7882 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7883 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7884 add_ca, compute_ca, compute_ov) \ 7885 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7886 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7887 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7888 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7889 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7890 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7891 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7892 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7893 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7894 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7895 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7896 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7897 7898 #undef GEN_INT_ARITH_DIVW 7899 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7900 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7901 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7902 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7903 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7904 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7905 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7906 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7907 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7908 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7909 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7910 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7911 7912 #if defined(TARGET_PPC64) 7913 #undef GEN_INT_ARITH_DIVD 7914 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7915 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7916 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7917 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7918 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7919 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7920 7921 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7922 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7923 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7924 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7925 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7926 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7927 7928 #undef GEN_INT_ARITH_MUL_HELPER 7929 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7930 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7931 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7932 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7933 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7934 #endif 7935 7936 #undef GEN_INT_ARITH_SUBF 7937 #undef GEN_INT_ARITH_SUBF_CONST 7938 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7939 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7940 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7941 add_ca, compute_ca, compute_ov) \ 7942 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7943 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7944 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7945 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7946 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7947 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7948 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7949 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7950 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7951 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7952 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7953 7954 #undef GEN_LOGICAL1 7955 #undef GEN_LOGICAL2 7956 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7957 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7958 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7959 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7960 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7961 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7962 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7963 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7964 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7965 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7966 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7967 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7968 #if defined(TARGET_PPC64) 7969 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7970 #endif 7971 7972 #if defined(TARGET_PPC64) 7973 #undef GEN_PPC64_R2 7974 #undef GEN_PPC64_R4 7975 #define GEN_PPC64_R2(name, opc1, opc2) \ 7976 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7977 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7978 PPC_64B) 7979 #define GEN_PPC64_R4(name, opc1, opc2) \ 7980 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7981 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 7982 PPC_64B), \ 7983 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7984 PPC_64B), \ 7985 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 7986 PPC_64B) 7987 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 7988 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7989 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7990 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7991 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7992 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7993 #endif 7994 7995 #undef GEN_LDX_E 7996 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7997 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7998 7999 #if defined(TARGET_PPC64) 8000 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 8001 8002 /* HV/P7 and later only */ 8003 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 8004 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 8005 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 8006 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 8007 #endif 8008 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 8009 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 8010 8011 /* External PID based load */ 8012 #undef GEN_LDEPX 8013 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 8014 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8015 0x00000001, PPC_NONE, PPC2_BOOKE206), 8016 8017 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 8018 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 8019 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 8020 #if defined(TARGET_PPC64) 8021 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00) 8022 #endif 8023 8024 #undef GEN_STX_E 8025 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 8026 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 8027 8028 #if defined(TARGET_PPC64) 8029 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 8030 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 8031 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 8032 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 8033 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 8034 #endif 8035 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 8036 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 8037 8038 #undef GEN_STEPX 8039 #define GEN_STEPX(name, ldop, opc2, opc3) \ 8040 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8041 0x00000001, PPC_NONE, PPC2_BOOKE206), 8042 8043 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 8044 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 8045 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 8046 #if defined(TARGET_PPC64) 8047 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04) 8048 #endif 8049 8050 #undef GEN_CRLOGIC 8051 #define GEN_CRLOGIC(name, tcg_op, opc) \ 8052 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 8053 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 8054 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 8055 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 8056 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 8057 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 8058 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 8059 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 8060 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 8061 8062 #undef GEN_MAC_HANDLER 8063 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 8064 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 8065 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 8066 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 8067 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 8068 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 8069 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 8070 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 8071 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 8072 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 8073 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 8074 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 8075 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 8076 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 8077 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 8078 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 8079 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 8080 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 8081 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 8082 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 8083 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 8084 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 8085 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 8086 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 8087 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 8088 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 8089 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 8090 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 8091 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 8092 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 8093 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 8094 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 8095 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 8096 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 8097 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 8098 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 8099 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 8100 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 8101 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 8102 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 8103 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 8104 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 8105 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 8106 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 8107 8108 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 8109 PPC_NONE, PPC2_TM), 8110 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 8111 PPC_NONE, PPC2_TM), 8112 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 8113 PPC_NONE, PPC2_TM), 8114 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 8115 PPC_NONE, PPC2_TM), 8116 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 8117 PPC_NONE, PPC2_TM), 8118 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 8119 PPC_NONE, PPC2_TM), 8120 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 8121 PPC_NONE, PPC2_TM), 8122 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 8123 PPC_NONE, PPC2_TM), 8124 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 8125 PPC_NONE, PPC2_TM), 8126 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 8127 PPC_NONE, PPC2_TM), 8128 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 8129 PPC_NONE, PPC2_TM), 8130 8131 #include "translate/fp-ops.c.inc" 8132 8133 #include "translate/vmx-ops.c.inc" 8134 8135 #include "translate/vsx-ops.c.inc" 8136 8137 #include "translate/spe-ops.c.inc" 8138 }; 8139 8140 /*****************************************************************************/ 8141 /* Opcode types */ 8142 enum { 8143 PPC_DIRECT = 0, /* Opcode routine */ 8144 PPC_INDIRECT = 1, /* Indirect opcode table */ 8145 }; 8146 8147 #define PPC_OPCODE_MASK 0x3 8148 8149 static inline int is_indirect_opcode(void *handler) 8150 { 8151 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT; 8152 } 8153 8154 static inline opc_handler_t **ind_table(void *handler) 8155 { 8156 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK); 8157 } 8158 8159 /* Instruction table creation */ 8160 /* Opcodes tables creation */ 8161 static void fill_new_table(opc_handler_t **table, int len) 8162 { 8163 int i; 8164 8165 for (i = 0; i < len; i++) { 8166 table[i] = &invalid_handler; 8167 } 8168 } 8169 8170 static int create_new_table(opc_handler_t **table, unsigned char idx) 8171 { 8172 opc_handler_t **tmp; 8173 8174 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN); 8175 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN); 8176 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT); 8177 8178 return 0; 8179 } 8180 8181 static int insert_in_table(opc_handler_t **table, unsigned char idx, 8182 opc_handler_t *handler) 8183 { 8184 if (table[idx] != &invalid_handler) { 8185 return -1; 8186 } 8187 table[idx] = handler; 8188 8189 return 0; 8190 } 8191 8192 static int register_direct_insn(opc_handler_t **ppc_opcodes, 8193 unsigned char idx, opc_handler_t *handler) 8194 { 8195 if (insert_in_table(ppc_opcodes, idx, handler) < 0) { 8196 printf("*** ERROR: opcode %02x already assigned in main " 8197 "opcode table\n", idx); 8198 return -1; 8199 } 8200 8201 return 0; 8202 } 8203 8204 static int register_ind_in_table(opc_handler_t **table, 8205 unsigned char idx1, unsigned char idx2, 8206 opc_handler_t *handler) 8207 { 8208 if (table[idx1] == &invalid_handler) { 8209 if (create_new_table(table, idx1) < 0) { 8210 printf("*** ERROR: unable to create indirect table " 8211 "idx=%02x\n", idx1); 8212 return -1; 8213 } 8214 } else { 8215 if (!is_indirect_opcode(table[idx1])) { 8216 printf("*** ERROR: idx %02x already assigned to a direct " 8217 "opcode\n", idx1); 8218 return -1; 8219 } 8220 } 8221 if (handler != NULL && 8222 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) { 8223 printf("*** ERROR: opcode %02x already assigned in " 8224 "opcode table %02x\n", idx2, idx1); 8225 return -1; 8226 } 8227 8228 return 0; 8229 } 8230 8231 static int register_ind_insn(opc_handler_t **ppc_opcodes, 8232 unsigned char idx1, unsigned char idx2, 8233 opc_handler_t *handler) 8234 { 8235 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler); 8236 } 8237 8238 static int register_dblind_insn(opc_handler_t **ppc_opcodes, 8239 unsigned char idx1, unsigned char idx2, 8240 unsigned char idx3, opc_handler_t *handler) 8241 { 8242 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8243 printf("*** ERROR: unable to join indirect table idx " 8244 "[%02x-%02x]\n", idx1, idx2); 8245 return -1; 8246 } 8247 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3, 8248 handler) < 0) { 8249 printf("*** ERROR: unable to insert opcode " 8250 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8251 return -1; 8252 } 8253 8254 return 0; 8255 } 8256 8257 static int register_trplind_insn(opc_handler_t **ppc_opcodes, 8258 unsigned char idx1, unsigned char idx2, 8259 unsigned char idx3, unsigned char idx4, 8260 opc_handler_t *handler) 8261 { 8262 opc_handler_t **table; 8263 8264 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8265 printf("*** ERROR: unable to join indirect table idx " 8266 "[%02x-%02x]\n", idx1, idx2); 8267 return -1; 8268 } 8269 table = ind_table(ppc_opcodes[idx1]); 8270 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) { 8271 printf("*** ERROR: unable to join 2nd-level indirect table idx " 8272 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8273 return -1; 8274 } 8275 table = ind_table(table[idx2]); 8276 if (register_ind_in_table(table, idx3, idx4, handler) < 0) { 8277 printf("*** ERROR: unable to insert opcode " 8278 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4); 8279 return -1; 8280 } 8281 return 0; 8282 } 8283 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn) 8284 { 8285 if (insn->opc2 != 0xFF) { 8286 if (insn->opc3 != 0xFF) { 8287 if (insn->opc4 != 0xFF) { 8288 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8289 insn->opc3, insn->opc4, 8290 &insn->handler) < 0) { 8291 return -1; 8292 } 8293 } else { 8294 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8295 insn->opc3, &insn->handler) < 0) { 8296 return -1; 8297 } 8298 } 8299 } else { 8300 if (register_ind_insn(ppc_opcodes, insn->opc1, 8301 insn->opc2, &insn->handler) < 0) { 8302 return -1; 8303 } 8304 } 8305 } else { 8306 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) { 8307 return -1; 8308 } 8309 } 8310 8311 return 0; 8312 } 8313 8314 static int test_opcode_table(opc_handler_t **table, int len) 8315 { 8316 int i, count, tmp; 8317 8318 for (i = 0, count = 0; i < len; i++) { 8319 /* Consistency fixup */ 8320 if (table[i] == NULL) { 8321 table[i] = &invalid_handler; 8322 } 8323 if (table[i] != &invalid_handler) { 8324 if (is_indirect_opcode(table[i])) { 8325 tmp = test_opcode_table(ind_table(table[i]), 8326 PPC_CPU_INDIRECT_OPCODES_LEN); 8327 if (tmp == 0) { 8328 free(table[i]); 8329 table[i] = &invalid_handler; 8330 } else { 8331 count++; 8332 } 8333 } else { 8334 count++; 8335 } 8336 } 8337 } 8338 8339 return count; 8340 } 8341 8342 static void fix_opcode_tables(opc_handler_t **ppc_opcodes) 8343 { 8344 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) { 8345 printf("*** WARNING: no opcode defined !\n"); 8346 } 8347 } 8348 8349 /*****************************************************************************/ 8350 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp) 8351 { 8352 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu); 8353 opcode_t *opc; 8354 8355 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN); 8356 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) { 8357 if (((opc->handler.type & pcc->insns_flags) != 0) || 8358 ((opc->handler.type2 & pcc->insns_flags2) != 0)) { 8359 if (register_insn(cpu->opcodes, opc) < 0) { 8360 error_setg(errp, "ERROR initializing PowerPC instruction " 8361 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2, 8362 opc->opc3); 8363 return; 8364 } 8365 } 8366 } 8367 fix_opcode_tables(cpu->opcodes); 8368 fflush(stdout); 8369 fflush(stderr); 8370 } 8371 8372 void destroy_ppc_opcodes(PowerPCCPU *cpu) 8373 { 8374 opc_handler_t **table, **table_2; 8375 int i, j, k; 8376 8377 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) { 8378 if (cpu->opcodes[i] == &invalid_handler) { 8379 continue; 8380 } 8381 if (is_indirect_opcode(cpu->opcodes[i])) { 8382 table = ind_table(cpu->opcodes[i]); 8383 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) { 8384 if (table[j] == &invalid_handler) { 8385 continue; 8386 } 8387 if (is_indirect_opcode(table[j])) { 8388 table_2 = ind_table(table[j]); 8389 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) { 8390 if (table_2[k] != &invalid_handler && 8391 is_indirect_opcode(table_2[k])) { 8392 g_free((opc_handler_t *)((uintptr_t)table_2[k] & 8393 ~PPC_INDIRECT)); 8394 } 8395 } 8396 g_free((opc_handler_t *)((uintptr_t)table[j] & 8397 ~PPC_INDIRECT)); 8398 } 8399 } 8400 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] & 8401 ~PPC_INDIRECT)); 8402 } 8403 } 8404 } 8405 8406 int ppc_fixup_cpu(PowerPCCPU *cpu) 8407 { 8408 CPUPPCState *env = &cpu->env; 8409 8410 /* 8411 * TCG doesn't (yet) emulate some groups of instructions that are 8412 * implemented on some otherwise supported CPUs (e.g. VSX and 8413 * decimal floating point instructions on POWER7). We remove 8414 * unsupported instruction groups from the cpu state's instruction 8415 * masks and hope the guest can cope. For at least the pseries 8416 * machine, the unavailability of these instructions can be 8417 * advertised to the guest via the device tree. 8418 */ 8419 if ((env->insns_flags & ~PPC_TCG_INSNS) 8420 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) { 8421 warn_report("Disabling some instructions which are not " 8422 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")", 8423 env->insns_flags & ~PPC_TCG_INSNS, 8424 env->insns_flags2 & ~PPC_TCG_INSNS2); 8425 } 8426 env->insns_flags &= PPC_TCG_INSNS; 8427 env->insns_flags2 &= PPC_TCG_INSNS2; 8428 return 0; 8429 } 8430 8431 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn) 8432 { 8433 opc_handler_t **table, *handler; 8434 uint32_t inval; 8435 8436 ctx->opcode = insn; 8437 8438 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 8439 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8440 ctx->le_mode ? "little" : "big"); 8441 8442 table = cpu->opcodes; 8443 handler = table[opc1(insn)]; 8444 if (is_indirect_opcode(handler)) { 8445 table = ind_table(handler); 8446 handler = table[opc2(insn)]; 8447 if (is_indirect_opcode(handler)) { 8448 table = ind_table(handler); 8449 handler = table[opc3(insn)]; 8450 if (is_indirect_opcode(handler)) { 8451 table = ind_table(handler); 8452 handler = table[opc4(insn)]; 8453 } 8454 } 8455 } 8456 8457 /* Is opcode *REALLY* valid ? */ 8458 if (unlikely(handler->handler == &gen_invalid)) { 8459 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 8460 "%02x - %02x - %02x - %02x (%08x) " 8461 TARGET_FMT_lx "\n", 8462 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8463 insn, ctx->cia); 8464 return false; 8465 } 8466 8467 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 8468 && Rc(insn))) { 8469 inval = handler->inval2; 8470 } else { 8471 inval = handler->inval1; 8472 } 8473 8474 if (unlikely((insn & inval) != 0)) { 8475 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 8476 "%02x - %02x - %02x - %02x (%08x) " 8477 TARGET_FMT_lx "\n", insn & inval, 8478 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8479 insn, ctx->cia); 8480 return false; 8481 } 8482 8483 handler->handler(ctx); 8484 return true; 8485 } 8486 8487 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 8488 { 8489 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8490 CPUPPCState *env = cs->env_ptr; 8491 uint32_t hflags = ctx->base.tb->flags; 8492 8493 ctx->spr_cb = env->spr_cb; 8494 ctx->pr = (hflags >> HFLAGS_PR) & 1; 8495 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7; 8496 ctx->dr = (hflags >> HFLAGS_DR) & 1; 8497 ctx->hv = (hflags >> HFLAGS_HV) & 1; 8498 ctx->insns_flags = env->insns_flags; 8499 ctx->insns_flags2 = env->insns_flags2; 8500 ctx->access_type = -1; 8501 ctx->need_access_type = !mmu_is_64bit(env->mmu_model); 8502 ctx->le_mode = (hflags >> HFLAGS_LE) & 1; 8503 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 8504 ctx->flags = env->flags; 8505 #if defined(TARGET_PPC64) 8506 ctx->sf_mode = (hflags >> HFLAGS_64) & 1; 8507 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 8508 #endif 8509 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 8510 || env->mmu_model == POWERPC_MMU_601 8511 || env->mmu_model & POWERPC_MMU_64; 8512 8513 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1; 8514 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1; 8515 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1; 8516 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1; 8517 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1; 8518 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1; 8519 ctx->hr = (hflags >> HFLAGS_HR) & 1; 8520 ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1; 8521 ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1; 8522 ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1; 8523 8524 ctx->singlestep_enabled = 0; 8525 if ((hflags >> HFLAGS_SE) & 1) { 8526 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 8527 ctx->base.max_insns = 1; 8528 } 8529 if ((hflags >> HFLAGS_BE) & 1) { 8530 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 8531 } 8532 } 8533 8534 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 8535 { 8536 } 8537 8538 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 8539 { 8540 tcg_gen_insn_start(dcbase->pc_next); 8541 } 8542 8543 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn) 8544 { 8545 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 8546 return opc1(insn) == 1; 8547 } 8548 8549 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 8550 { 8551 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8552 PowerPCCPU *cpu = POWERPC_CPU(cs); 8553 CPUPPCState *env = cs->env_ptr; 8554 target_ulong pc; 8555 uint32_t insn; 8556 bool ok; 8557 8558 LOG_DISAS("----------------\n"); 8559 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 8560 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 8561 8562 ctx->cia = pc = ctx->base.pc_next; 8563 insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx)); 8564 ctx->base.pc_next = pc += 4; 8565 8566 if (!is_prefix_insn(ctx, insn)) { 8567 ok = (decode_insn32(ctx, insn) || 8568 decode_legacy(cpu, ctx, insn)); 8569 } else if ((pc & 63) == 0) { 8570 /* 8571 * Power v3.1, section 1.9 Exceptions: 8572 * attempt to execute a prefixed instruction that crosses a 8573 * 64-byte address boundary (system alignment error). 8574 */ 8575 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN); 8576 ok = true; 8577 } else { 8578 uint32_t insn2 = translator_ldl_swap(env, dcbase, pc, 8579 need_byteswap(ctx)); 8580 ctx->base.pc_next = pc += 4; 8581 ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn)); 8582 } 8583 if (!ok) { 8584 gen_invalid(ctx); 8585 } 8586 8587 /* End the TB when crossing a page boundary. */ 8588 if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) { 8589 ctx->base.is_jmp = DISAS_TOO_MANY; 8590 } 8591 8592 translator_loop_temp_check(&ctx->base); 8593 } 8594 8595 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 8596 { 8597 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8598 DisasJumpType is_jmp = ctx->base.is_jmp; 8599 target_ulong nip = ctx->base.pc_next; 8600 8601 if (is_jmp == DISAS_NORETURN) { 8602 /* We have already exited the TB. */ 8603 return; 8604 } 8605 8606 /* Honor single stepping. */ 8607 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP) 8608 && (nip <= 0x100 || nip > 0xf00)) { 8609 switch (is_jmp) { 8610 case DISAS_TOO_MANY: 8611 case DISAS_EXIT_UPDATE: 8612 case DISAS_CHAIN_UPDATE: 8613 gen_update_nip(ctx, nip); 8614 break; 8615 case DISAS_EXIT: 8616 case DISAS_CHAIN: 8617 break; 8618 default: 8619 g_assert_not_reached(); 8620 } 8621 8622 gen_debug_exception(ctx); 8623 return; 8624 } 8625 8626 switch (is_jmp) { 8627 case DISAS_TOO_MANY: 8628 if (use_goto_tb(ctx, nip)) { 8629 pmu_count_insns(ctx); 8630 tcg_gen_goto_tb(0); 8631 gen_update_nip(ctx, nip); 8632 tcg_gen_exit_tb(ctx->base.tb, 0); 8633 break; 8634 } 8635 /* fall through */ 8636 case DISAS_CHAIN_UPDATE: 8637 gen_update_nip(ctx, nip); 8638 /* fall through */ 8639 case DISAS_CHAIN: 8640 /* 8641 * tcg_gen_lookup_and_goto_ptr will exit the TB if 8642 * CF_NO_GOTO_PTR is set. Count insns now. 8643 */ 8644 if (ctx->base.tb->flags & CF_NO_GOTO_PTR) { 8645 pmu_count_insns(ctx); 8646 } 8647 8648 tcg_gen_lookup_and_goto_ptr(); 8649 break; 8650 8651 case DISAS_EXIT_UPDATE: 8652 gen_update_nip(ctx, nip); 8653 /* fall through */ 8654 case DISAS_EXIT: 8655 pmu_count_insns(ctx); 8656 tcg_gen_exit_tb(NULL, 0); 8657 break; 8658 8659 default: 8660 g_assert_not_reached(); 8661 } 8662 } 8663 8664 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 8665 { 8666 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 8667 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 8668 } 8669 8670 static const TranslatorOps ppc_tr_ops = { 8671 .init_disas_context = ppc_tr_init_disas_context, 8672 .tb_start = ppc_tr_tb_start, 8673 .insn_start = ppc_tr_insn_start, 8674 .translate_insn = ppc_tr_translate_insn, 8675 .tb_stop = ppc_tr_tb_stop, 8676 .disas_log = ppc_tr_disas_log, 8677 }; 8678 8679 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns) 8680 { 8681 DisasContext ctx; 8682 8683 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns); 8684 } 8685 8686 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 8687 target_ulong *data) 8688 { 8689 env->nip = data[0]; 8690 } 8691