1 /* 2 * PowerPC emulation for qemu: main translation routines. 3 * 4 * Copyright (c) 2003-2007 Jocelyn Mayer 5 * Copyright (C) 2011 Freescale Semiconductor, Inc. 6 * 7 * This library is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU Lesser General Public 9 * License as published by the Free Software Foundation; either 10 * version 2.1 of the License, or (at your option) any later version. 11 * 12 * This library is distributed in the hope that it will be useful, 13 * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 * Lesser General Public License for more details. 16 * 17 * You should have received a copy of the GNU Lesser General Public 18 * License along with this library; if not, see <http://www.gnu.org/licenses/>. 19 */ 20 21 #include "qemu/osdep.h" 22 #include "cpu.h" 23 #include "internal.h" 24 #include "disas/disas.h" 25 #include "exec/exec-all.h" 26 #include "tcg/tcg-op.h" 27 #include "tcg/tcg-op-gvec.h" 28 #include "qemu/host-utils.h" 29 #include "qemu/main-loop.h" 30 #include "exec/cpu_ldst.h" 31 32 #include "exec/helper-proto.h" 33 #include "exec/helper-gen.h" 34 35 #include "exec/translator.h" 36 #include "exec/log.h" 37 #include "qemu/atomic128.h" 38 #include "spr_tcg.h" 39 40 #include "qemu/qemu-print.h" 41 #include "qapi/error.h" 42 43 #define CPU_SINGLE_STEP 0x1 44 #define CPU_BRANCH_STEP 0x2 45 46 /* Include definitions for instructions classes and implementations flags */ 47 /* #define PPC_DEBUG_DISAS */ 48 49 #ifdef PPC_DEBUG_DISAS 50 # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__) 51 #else 52 # define LOG_DISAS(...) do { } while (0) 53 #endif 54 /*****************************************************************************/ 55 /* Code translation helpers */ 56 57 /* global register indexes */ 58 static char cpu_reg_names[10 * 3 + 22 * 4 /* GPR */ 59 + 10 * 4 + 22 * 5 /* SPE GPRh */ 60 + 8 * 5 /* CRF */]; 61 static TCGv cpu_gpr[32]; 62 static TCGv cpu_gprh[32]; 63 static TCGv_i32 cpu_crf[8]; 64 static TCGv cpu_nip; 65 static TCGv cpu_msr; 66 static TCGv cpu_ctr; 67 static TCGv cpu_lr; 68 #if defined(TARGET_PPC64) 69 static TCGv cpu_cfar; 70 #endif 71 static TCGv cpu_xer, cpu_so, cpu_ov, cpu_ca, cpu_ov32, cpu_ca32; 72 static TCGv cpu_reserve; 73 static TCGv cpu_reserve_val; 74 static TCGv cpu_fpscr; 75 static TCGv_i32 cpu_access_type; 76 77 #include "exec/gen-icount.h" 78 79 void ppc_translate_init(void) 80 { 81 int i; 82 char *p; 83 size_t cpu_reg_names_size; 84 85 p = cpu_reg_names; 86 cpu_reg_names_size = sizeof(cpu_reg_names); 87 88 for (i = 0; i < 8; i++) { 89 snprintf(p, cpu_reg_names_size, "crf%d", i); 90 cpu_crf[i] = tcg_global_mem_new_i32(cpu_env, 91 offsetof(CPUPPCState, crf[i]), p); 92 p += 5; 93 cpu_reg_names_size -= 5; 94 } 95 96 for (i = 0; i < 32; i++) { 97 snprintf(p, cpu_reg_names_size, "r%d", i); 98 cpu_gpr[i] = tcg_global_mem_new(cpu_env, 99 offsetof(CPUPPCState, gpr[i]), p); 100 p += (i < 10) ? 3 : 4; 101 cpu_reg_names_size -= (i < 10) ? 3 : 4; 102 snprintf(p, cpu_reg_names_size, "r%dH", i); 103 cpu_gprh[i] = tcg_global_mem_new(cpu_env, 104 offsetof(CPUPPCState, gprh[i]), p); 105 p += (i < 10) ? 4 : 5; 106 cpu_reg_names_size -= (i < 10) ? 4 : 5; 107 } 108 109 cpu_nip = tcg_global_mem_new(cpu_env, 110 offsetof(CPUPPCState, nip), "nip"); 111 112 cpu_msr = tcg_global_mem_new(cpu_env, 113 offsetof(CPUPPCState, msr), "msr"); 114 115 cpu_ctr = tcg_global_mem_new(cpu_env, 116 offsetof(CPUPPCState, ctr), "ctr"); 117 118 cpu_lr = tcg_global_mem_new(cpu_env, 119 offsetof(CPUPPCState, lr), "lr"); 120 121 #if defined(TARGET_PPC64) 122 cpu_cfar = tcg_global_mem_new(cpu_env, 123 offsetof(CPUPPCState, cfar), "cfar"); 124 #endif 125 126 cpu_xer = tcg_global_mem_new(cpu_env, 127 offsetof(CPUPPCState, xer), "xer"); 128 cpu_so = tcg_global_mem_new(cpu_env, 129 offsetof(CPUPPCState, so), "SO"); 130 cpu_ov = tcg_global_mem_new(cpu_env, 131 offsetof(CPUPPCState, ov), "OV"); 132 cpu_ca = tcg_global_mem_new(cpu_env, 133 offsetof(CPUPPCState, ca), "CA"); 134 cpu_ov32 = tcg_global_mem_new(cpu_env, 135 offsetof(CPUPPCState, ov32), "OV32"); 136 cpu_ca32 = tcg_global_mem_new(cpu_env, 137 offsetof(CPUPPCState, ca32), "CA32"); 138 139 cpu_reserve = tcg_global_mem_new(cpu_env, 140 offsetof(CPUPPCState, reserve_addr), 141 "reserve_addr"); 142 cpu_reserve_val = tcg_global_mem_new(cpu_env, 143 offsetof(CPUPPCState, reserve_val), 144 "reserve_val"); 145 146 cpu_fpscr = tcg_global_mem_new(cpu_env, 147 offsetof(CPUPPCState, fpscr), "fpscr"); 148 149 cpu_access_type = tcg_global_mem_new_i32(cpu_env, 150 offsetof(CPUPPCState, access_type), 151 "access_type"); 152 } 153 154 /* internal defines */ 155 struct DisasContext { 156 DisasContextBase base; 157 target_ulong cia; /* current instruction address */ 158 uint32_t opcode; 159 /* Routine used to access memory */ 160 bool pr, hv, dr, le_mode; 161 bool lazy_tlb_flush; 162 bool need_access_type; 163 int mem_idx; 164 int access_type; 165 /* Translation flags */ 166 MemOp default_tcg_memop_mask; 167 #if defined(TARGET_PPC64) 168 bool sf_mode; 169 bool has_cfar; 170 #endif 171 bool fpu_enabled; 172 bool altivec_enabled; 173 bool vsx_enabled; 174 bool spe_enabled; 175 bool tm_enabled; 176 bool gtse; 177 bool hr; 178 bool mmcr0_pmcc0; 179 bool mmcr0_pmcc1; 180 bool pmu_insn_cnt; 181 ppc_spr_t *spr_cb; /* Needed to check rights for mfspr/mtspr */ 182 int singlestep_enabled; 183 uint32_t flags; 184 uint64_t insns_flags; 185 uint64_t insns_flags2; 186 }; 187 188 #define DISAS_EXIT DISAS_TARGET_0 /* exit to main loop, pc updated */ 189 #define DISAS_EXIT_UPDATE DISAS_TARGET_1 /* exit to main loop, pc stale */ 190 #define DISAS_CHAIN DISAS_TARGET_2 /* lookup next tb, pc updated */ 191 #define DISAS_CHAIN_UPDATE DISAS_TARGET_3 /* lookup next tb, pc stale */ 192 193 /* Return true iff byteswap is needed in a scalar memop */ 194 static inline bool need_byteswap(const DisasContext *ctx) 195 { 196 #if defined(TARGET_WORDS_BIGENDIAN) 197 return ctx->le_mode; 198 #else 199 return !ctx->le_mode; 200 #endif 201 } 202 203 /* True when active word size < size of target_long. */ 204 #ifdef TARGET_PPC64 205 # define NARROW_MODE(C) (!(C)->sf_mode) 206 #else 207 # define NARROW_MODE(C) 0 208 #endif 209 210 struct opc_handler_t { 211 /* invalid bits for instruction 1 (Rc(opcode) == 0) */ 212 uint32_t inval1; 213 /* invalid bits for instruction 2 (Rc(opcode) == 1) */ 214 uint32_t inval2; 215 /* instruction type */ 216 uint64_t type; 217 /* extended instruction type */ 218 uint64_t type2; 219 /* handler */ 220 void (*handler)(DisasContext *ctx); 221 }; 222 223 /* SPR load/store helpers */ 224 static inline void gen_load_spr(TCGv t, int reg) 225 { 226 tcg_gen_ld_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 227 } 228 229 static inline void gen_store_spr(int reg, TCGv t) 230 { 231 tcg_gen_st_tl(t, cpu_env, offsetof(CPUPPCState, spr[reg])); 232 } 233 234 static inline void gen_set_access_type(DisasContext *ctx, int access_type) 235 { 236 if (ctx->need_access_type && ctx->access_type != access_type) { 237 tcg_gen_movi_i32(cpu_access_type, access_type); 238 ctx->access_type = access_type; 239 } 240 } 241 242 static inline void gen_update_nip(DisasContext *ctx, target_ulong nip) 243 { 244 if (NARROW_MODE(ctx)) { 245 nip = (uint32_t)nip; 246 } 247 tcg_gen_movi_tl(cpu_nip, nip); 248 } 249 250 static void gen_exception_err(DisasContext *ctx, uint32_t excp, uint32_t error) 251 { 252 TCGv_i32 t0, t1; 253 254 /* 255 * These are all synchronous exceptions, we set the PC back to the 256 * faulting instruction 257 */ 258 gen_update_nip(ctx, ctx->cia); 259 t0 = tcg_const_i32(excp); 260 t1 = tcg_const_i32(error); 261 gen_helper_raise_exception_err(cpu_env, t0, t1); 262 tcg_temp_free_i32(t0); 263 tcg_temp_free_i32(t1); 264 ctx->base.is_jmp = DISAS_NORETURN; 265 } 266 267 static void gen_exception(DisasContext *ctx, uint32_t excp) 268 { 269 TCGv_i32 t0; 270 271 /* 272 * These are all synchronous exceptions, we set the PC back to the 273 * faulting instruction 274 */ 275 gen_update_nip(ctx, ctx->cia); 276 t0 = tcg_const_i32(excp); 277 gen_helper_raise_exception(cpu_env, t0); 278 tcg_temp_free_i32(t0); 279 ctx->base.is_jmp = DISAS_NORETURN; 280 } 281 282 static void gen_exception_nip(DisasContext *ctx, uint32_t excp, 283 target_ulong nip) 284 { 285 TCGv_i32 t0; 286 287 gen_update_nip(ctx, nip); 288 t0 = tcg_const_i32(excp); 289 gen_helper_raise_exception(cpu_env, t0); 290 tcg_temp_free_i32(t0); 291 ctx->base.is_jmp = DISAS_NORETURN; 292 } 293 294 static void gen_icount_io_start(DisasContext *ctx) 295 { 296 if (tb_cflags(ctx->base.tb) & CF_USE_ICOUNT) { 297 gen_io_start(); 298 /* 299 * An I/O instruction must be last in the TB. 300 * Chain to the next TB, and let the code from gen_tb_start 301 * decide if we need to return to the main loop. 302 * Doing this first also allows this value to be overridden. 303 */ 304 ctx->base.is_jmp = DISAS_TOO_MANY; 305 } 306 } 307 308 /* 309 * Tells the caller what is the appropriate exception to generate and prepares 310 * SPR registers for this exception. 311 * 312 * The exception can be either POWERPC_EXCP_TRACE (on most PowerPCs) or 313 * POWERPC_EXCP_DEBUG (on BookE). 314 */ 315 static uint32_t gen_prep_dbgex(DisasContext *ctx) 316 { 317 if (ctx->flags & POWERPC_FLAG_DE) { 318 target_ulong dbsr = 0; 319 if (ctx->singlestep_enabled & CPU_SINGLE_STEP) { 320 dbsr = DBCR0_ICMP; 321 } else { 322 /* Must have been branch */ 323 dbsr = DBCR0_BRT; 324 } 325 TCGv t0 = tcg_temp_new(); 326 gen_load_spr(t0, SPR_BOOKE_DBSR); 327 tcg_gen_ori_tl(t0, t0, dbsr); 328 gen_store_spr(SPR_BOOKE_DBSR, t0); 329 tcg_temp_free(t0); 330 return POWERPC_EXCP_DEBUG; 331 } else { 332 return POWERPC_EXCP_TRACE; 333 } 334 } 335 336 static void gen_debug_exception(DisasContext *ctx) 337 { 338 gen_helper_raise_exception(cpu_env, tcg_constant_i32(gen_prep_dbgex(ctx))); 339 ctx->base.is_jmp = DISAS_NORETURN; 340 } 341 342 static inline void gen_inval_exception(DisasContext *ctx, uint32_t error) 343 { 344 /* Will be converted to program check if needed */ 345 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_INVAL | error); 346 } 347 348 static inline void gen_priv_exception(DisasContext *ctx, uint32_t error) 349 { 350 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_PRIV | error); 351 } 352 353 static inline void gen_hvpriv_exception(DisasContext *ctx, uint32_t error) 354 { 355 /* Will be converted to program check if needed */ 356 gen_exception_err(ctx, POWERPC_EXCP_HV_EMU, POWERPC_EXCP_PRIV | error); 357 } 358 359 /*****************************************************************************/ 360 /* SPR READ/WRITE CALLBACKS */ 361 362 void spr_noaccess(DisasContext *ctx, int gprn, int sprn) 363 { 364 #if 0 365 sprn = ((sprn >> 5) & 0x1F) | ((sprn & 0x1F) << 5); 366 printf("ERROR: try to access SPR %d !\n", sprn); 367 #endif 368 } 369 370 /* #define PPC_DUMP_SPR_ACCESSES */ 371 372 /* 373 * Generic callbacks: 374 * do nothing but store/retrieve spr value 375 */ 376 static void spr_load_dump_spr(int sprn) 377 { 378 #ifdef PPC_DUMP_SPR_ACCESSES 379 TCGv_i32 t0 = tcg_const_i32(sprn); 380 gen_helper_load_dump_spr(cpu_env, t0); 381 tcg_temp_free_i32(t0); 382 #endif 383 } 384 385 void spr_read_generic(DisasContext *ctx, int gprn, int sprn) 386 { 387 gen_load_spr(cpu_gpr[gprn], sprn); 388 spr_load_dump_spr(sprn); 389 } 390 391 static void spr_store_dump_spr(int sprn) 392 { 393 #ifdef PPC_DUMP_SPR_ACCESSES 394 TCGv_i32 t0 = tcg_const_i32(sprn); 395 gen_helper_store_dump_spr(cpu_env, t0); 396 tcg_temp_free_i32(t0); 397 #endif 398 } 399 400 void spr_write_generic(DisasContext *ctx, int sprn, int gprn) 401 { 402 gen_store_spr(sprn, cpu_gpr[gprn]); 403 spr_store_dump_spr(sprn); 404 } 405 406 void spr_write_CTRL(DisasContext *ctx, int sprn, int gprn) 407 { 408 spr_write_generic(ctx, sprn, gprn); 409 410 /* 411 * SPR_CTRL writes must force a new translation block, 412 * allowing the PMU to calculate the run latch events with 413 * more accuracy. 414 */ 415 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 416 } 417 418 #if !defined(CONFIG_USER_ONLY) 419 void spr_write_generic32(DisasContext *ctx, int sprn, int gprn) 420 { 421 #ifdef TARGET_PPC64 422 TCGv t0 = tcg_temp_new(); 423 tcg_gen_ext32u_tl(t0, cpu_gpr[gprn]); 424 gen_store_spr(sprn, t0); 425 tcg_temp_free(t0); 426 spr_store_dump_spr(sprn); 427 #else 428 spr_write_generic(ctx, sprn, gprn); 429 #endif 430 } 431 432 void spr_write_clear(DisasContext *ctx, int sprn, int gprn) 433 { 434 TCGv t0 = tcg_temp_new(); 435 TCGv t1 = tcg_temp_new(); 436 gen_load_spr(t0, sprn); 437 tcg_gen_neg_tl(t1, cpu_gpr[gprn]); 438 tcg_gen_and_tl(t0, t0, t1); 439 gen_store_spr(sprn, t0); 440 tcg_temp_free(t0); 441 tcg_temp_free(t1); 442 } 443 444 void spr_access_nop(DisasContext *ctx, int sprn, int gprn) 445 { 446 } 447 448 #endif 449 450 /* SPR common to all PowerPC */ 451 /* XER */ 452 void spr_read_xer(DisasContext *ctx, int gprn, int sprn) 453 { 454 TCGv dst = cpu_gpr[gprn]; 455 TCGv t0 = tcg_temp_new(); 456 TCGv t1 = tcg_temp_new(); 457 TCGv t2 = tcg_temp_new(); 458 tcg_gen_mov_tl(dst, cpu_xer); 459 tcg_gen_shli_tl(t0, cpu_so, XER_SO); 460 tcg_gen_shli_tl(t1, cpu_ov, XER_OV); 461 tcg_gen_shli_tl(t2, cpu_ca, XER_CA); 462 tcg_gen_or_tl(t0, t0, t1); 463 tcg_gen_or_tl(dst, dst, t2); 464 tcg_gen_or_tl(dst, dst, t0); 465 if (is_isa300(ctx)) { 466 tcg_gen_shli_tl(t0, cpu_ov32, XER_OV32); 467 tcg_gen_or_tl(dst, dst, t0); 468 tcg_gen_shli_tl(t0, cpu_ca32, XER_CA32); 469 tcg_gen_or_tl(dst, dst, t0); 470 } 471 tcg_temp_free(t0); 472 tcg_temp_free(t1); 473 tcg_temp_free(t2); 474 } 475 476 void spr_write_xer(DisasContext *ctx, int sprn, int gprn) 477 { 478 TCGv src = cpu_gpr[gprn]; 479 /* Write all flags, while reading back check for isa300 */ 480 tcg_gen_andi_tl(cpu_xer, src, 481 ~((1u << XER_SO) | 482 (1u << XER_OV) | (1u << XER_OV32) | 483 (1u << XER_CA) | (1u << XER_CA32))); 484 tcg_gen_extract_tl(cpu_ov32, src, XER_OV32, 1); 485 tcg_gen_extract_tl(cpu_ca32, src, XER_CA32, 1); 486 tcg_gen_extract_tl(cpu_so, src, XER_SO, 1); 487 tcg_gen_extract_tl(cpu_ov, src, XER_OV, 1); 488 tcg_gen_extract_tl(cpu_ca, src, XER_CA, 1); 489 } 490 491 /* LR */ 492 void spr_read_lr(DisasContext *ctx, int gprn, int sprn) 493 { 494 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_lr); 495 } 496 497 void spr_write_lr(DisasContext *ctx, int sprn, int gprn) 498 { 499 tcg_gen_mov_tl(cpu_lr, cpu_gpr[gprn]); 500 } 501 502 /* CFAR */ 503 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 504 void spr_read_cfar(DisasContext *ctx, int gprn, int sprn) 505 { 506 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_cfar); 507 } 508 509 void spr_write_cfar(DisasContext *ctx, int sprn, int gprn) 510 { 511 tcg_gen_mov_tl(cpu_cfar, cpu_gpr[gprn]); 512 } 513 #endif /* defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) */ 514 515 /* CTR */ 516 void spr_read_ctr(DisasContext *ctx, int gprn, int sprn) 517 { 518 tcg_gen_mov_tl(cpu_gpr[gprn], cpu_ctr); 519 } 520 521 void spr_write_ctr(DisasContext *ctx, int sprn, int gprn) 522 { 523 tcg_gen_mov_tl(cpu_ctr, cpu_gpr[gprn]); 524 } 525 526 /* User read access to SPR */ 527 /* USPRx */ 528 /* UMMCRx */ 529 /* UPMCx */ 530 /* USIA */ 531 /* UDECR */ 532 void spr_read_ureg(DisasContext *ctx, int gprn, int sprn) 533 { 534 gen_load_spr(cpu_gpr[gprn], sprn + 0x10); 535 } 536 537 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 538 void spr_write_ureg(DisasContext *ctx, int sprn, int gprn) 539 { 540 gen_store_spr(sprn + 0x10, cpu_gpr[gprn]); 541 } 542 #endif 543 544 /* SPR common to all non-embedded PowerPC */ 545 /* DECR */ 546 #if !defined(CONFIG_USER_ONLY) 547 void spr_read_decr(DisasContext *ctx, int gprn, int sprn) 548 { 549 gen_icount_io_start(ctx); 550 gen_helper_load_decr(cpu_gpr[gprn], cpu_env); 551 } 552 553 void spr_write_decr(DisasContext *ctx, int sprn, int gprn) 554 { 555 gen_icount_io_start(ctx); 556 gen_helper_store_decr(cpu_env, cpu_gpr[gprn]); 557 } 558 #endif 559 560 /* SPR common to all non-embedded PowerPC, except 601 */ 561 /* Time base */ 562 void spr_read_tbl(DisasContext *ctx, int gprn, int sprn) 563 { 564 gen_icount_io_start(ctx); 565 gen_helper_load_tbl(cpu_gpr[gprn], cpu_env); 566 } 567 568 void spr_read_tbu(DisasContext *ctx, int gprn, int sprn) 569 { 570 gen_icount_io_start(ctx); 571 gen_helper_load_tbu(cpu_gpr[gprn], cpu_env); 572 } 573 574 void spr_read_atbl(DisasContext *ctx, int gprn, int sprn) 575 { 576 gen_helper_load_atbl(cpu_gpr[gprn], cpu_env); 577 } 578 579 void spr_read_atbu(DisasContext *ctx, int gprn, int sprn) 580 { 581 gen_helper_load_atbu(cpu_gpr[gprn], cpu_env); 582 } 583 584 #if !defined(CONFIG_USER_ONLY) 585 void spr_write_tbl(DisasContext *ctx, int sprn, int gprn) 586 { 587 gen_icount_io_start(ctx); 588 gen_helper_store_tbl(cpu_env, cpu_gpr[gprn]); 589 } 590 591 void spr_write_tbu(DisasContext *ctx, int sprn, int gprn) 592 { 593 gen_icount_io_start(ctx); 594 gen_helper_store_tbu(cpu_env, cpu_gpr[gprn]); 595 } 596 597 void spr_write_atbl(DisasContext *ctx, int sprn, int gprn) 598 { 599 gen_helper_store_atbl(cpu_env, cpu_gpr[gprn]); 600 } 601 602 void spr_write_atbu(DisasContext *ctx, int sprn, int gprn) 603 { 604 gen_helper_store_atbu(cpu_env, cpu_gpr[gprn]); 605 } 606 607 #if defined(TARGET_PPC64) 608 void spr_read_purr(DisasContext *ctx, int gprn, int sprn) 609 { 610 gen_icount_io_start(ctx); 611 gen_helper_load_purr(cpu_gpr[gprn], cpu_env); 612 } 613 614 void spr_write_purr(DisasContext *ctx, int sprn, int gprn) 615 { 616 gen_icount_io_start(ctx); 617 gen_helper_store_purr(cpu_env, cpu_gpr[gprn]); 618 } 619 620 /* HDECR */ 621 void spr_read_hdecr(DisasContext *ctx, int gprn, int sprn) 622 { 623 gen_icount_io_start(ctx); 624 gen_helper_load_hdecr(cpu_gpr[gprn], cpu_env); 625 } 626 627 void spr_write_hdecr(DisasContext *ctx, int sprn, int gprn) 628 { 629 gen_icount_io_start(ctx); 630 gen_helper_store_hdecr(cpu_env, cpu_gpr[gprn]); 631 } 632 633 void spr_read_vtb(DisasContext *ctx, int gprn, int sprn) 634 { 635 gen_icount_io_start(ctx); 636 gen_helper_load_vtb(cpu_gpr[gprn], cpu_env); 637 } 638 639 void spr_write_vtb(DisasContext *ctx, int sprn, int gprn) 640 { 641 gen_icount_io_start(ctx); 642 gen_helper_store_vtb(cpu_env, cpu_gpr[gprn]); 643 } 644 645 void spr_write_tbu40(DisasContext *ctx, int sprn, int gprn) 646 { 647 gen_icount_io_start(ctx); 648 gen_helper_store_tbu40(cpu_env, cpu_gpr[gprn]); 649 } 650 651 #endif 652 #endif 653 654 #if !defined(CONFIG_USER_ONLY) 655 /* IBAT0U...IBAT0U */ 656 /* IBAT0L...IBAT7L */ 657 void spr_read_ibat(DisasContext *ctx, int gprn, int sprn) 658 { 659 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 660 offsetof(CPUPPCState, 661 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 662 } 663 664 void spr_read_ibat_h(DisasContext *ctx, int gprn, int sprn) 665 { 666 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 667 offsetof(CPUPPCState, 668 IBAT[sprn & 1][((sprn - SPR_IBAT4U) / 2) + 4])); 669 } 670 671 void spr_write_ibatu(DisasContext *ctx, int sprn, int gprn) 672 { 673 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 674 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 675 tcg_temp_free_i32(t0); 676 } 677 678 void spr_write_ibatu_h(DisasContext *ctx, int sprn, int gprn) 679 { 680 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4U) / 2) + 4); 681 gen_helper_store_ibatu(cpu_env, t0, cpu_gpr[gprn]); 682 tcg_temp_free_i32(t0); 683 } 684 685 void spr_write_ibatl(DisasContext *ctx, int sprn, int gprn) 686 { 687 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0L) / 2); 688 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 689 tcg_temp_free_i32(t0); 690 } 691 692 void spr_write_ibatl_h(DisasContext *ctx, int sprn, int gprn) 693 { 694 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_IBAT4L) / 2) + 4); 695 gen_helper_store_ibatl(cpu_env, t0, cpu_gpr[gprn]); 696 tcg_temp_free_i32(t0); 697 } 698 699 /* DBAT0U...DBAT7U */ 700 /* DBAT0L...DBAT7L */ 701 void spr_read_dbat(DisasContext *ctx, int gprn, int sprn) 702 { 703 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 704 offsetof(CPUPPCState, 705 DBAT[sprn & 1][(sprn - SPR_DBAT0U) / 2])); 706 } 707 708 void spr_read_dbat_h(DisasContext *ctx, int gprn, int sprn) 709 { 710 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 711 offsetof(CPUPPCState, 712 DBAT[sprn & 1][((sprn - SPR_DBAT4U) / 2) + 4])); 713 } 714 715 void spr_write_dbatu(DisasContext *ctx, int sprn, int gprn) 716 { 717 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0U) / 2); 718 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 719 tcg_temp_free_i32(t0); 720 } 721 722 void spr_write_dbatu_h(DisasContext *ctx, int sprn, int gprn) 723 { 724 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4U) / 2) + 4); 725 gen_helper_store_dbatu(cpu_env, t0, cpu_gpr[gprn]); 726 tcg_temp_free_i32(t0); 727 } 728 729 void spr_write_dbatl(DisasContext *ctx, int sprn, int gprn) 730 { 731 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_DBAT0L) / 2); 732 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 733 tcg_temp_free_i32(t0); 734 } 735 736 void spr_write_dbatl_h(DisasContext *ctx, int sprn, int gprn) 737 { 738 TCGv_i32 t0 = tcg_const_i32(((sprn - SPR_DBAT4L) / 2) + 4); 739 gen_helper_store_dbatl(cpu_env, t0, cpu_gpr[gprn]); 740 tcg_temp_free_i32(t0); 741 } 742 743 /* SDR1 */ 744 void spr_write_sdr1(DisasContext *ctx, int sprn, int gprn) 745 { 746 gen_helper_store_sdr1(cpu_env, cpu_gpr[gprn]); 747 } 748 749 #if defined(TARGET_PPC64) 750 /* 64 bits PowerPC specific SPRs */ 751 /* PIDR */ 752 void spr_write_pidr(DisasContext *ctx, int sprn, int gprn) 753 { 754 gen_helper_store_pidr(cpu_env, cpu_gpr[gprn]); 755 } 756 757 void spr_write_lpidr(DisasContext *ctx, int sprn, int gprn) 758 { 759 gen_helper_store_lpidr(cpu_env, cpu_gpr[gprn]); 760 } 761 762 void spr_read_hior(DisasContext *ctx, int gprn, int sprn) 763 { 764 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, offsetof(CPUPPCState, excp_prefix)); 765 } 766 767 void spr_write_hior(DisasContext *ctx, int sprn, int gprn) 768 { 769 TCGv t0 = tcg_temp_new(); 770 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0x3FFFFF00000ULL); 771 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 772 tcg_temp_free(t0); 773 } 774 void spr_write_ptcr(DisasContext *ctx, int sprn, int gprn) 775 { 776 gen_helper_store_ptcr(cpu_env, cpu_gpr[gprn]); 777 } 778 779 void spr_write_pcr(DisasContext *ctx, int sprn, int gprn) 780 { 781 gen_helper_store_pcr(cpu_env, cpu_gpr[gprn]); 782 } 783 784 /* DPDES */ 785 void spr_read_dpdes(DisasContext *ctx, int gprn, int sprn) 786 { 787 gen_helper_load_dpdes(cpu_gpr[gprn], cpu_env); 788 } 789 790 void spr_write_dpdes(DisasContext *ctx, int sprn, int gprn) 791 { 792 gen_helper_store_dpdes(cpu_env, cpu_gpr[gprn]); 793 } 794 #endif 795 #endif 796 797 /* PowerPC 601 specific registers */ 798 /* RTC */ 799 void spr_read_601_rtcl(DisasContext *ctx, int gprn, int sprn) 800 { 801 gen_helper_load_601_rtcl(cpu_gpr[gprn], cpu_env); 802 } 803 804 void spr_read_601_rtcu(DisasContext *ctx, int gprn, int sprn) 805 { 806 gen_helper_load_601_rtcu(cpu_gpr[gprn], cpu_env); 807 } 808 809 #if !defined(CONFIG_USER_ONLY) 810 void spr_write_601_rtcu(DisasContext *ctx, int sprn, int gprn) 811 { 812 gen_helper_store_601_rtcu(cpu_env, cpu_gpr[gprn]); 813 } 814 815 void spr_write_601_rtcl(DisasContext *ctx, int sprn, int gprn) 816 { 817 gen_helper_store_601_rtcl(cpu_env, cpu_gpr[gprn]); 818 } 819 820 void spr_write_hid0_601(DisasContext *ctx, int sprn, int gprn) 821 { 822 gen_helper_store_hid0_601(cpu_env, cpu_gpr[gprn]); 823 /* Must stop the translation as endianness may have changed */ 824 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 825 } 826 #endif 827 828 /* Unified bats */ 829 #if !defined(CONFIG_USER_ONLY) 830 void spr_read_601_ubat(DisasContext *ctx, int gprn, int sprn) 831 { 832 tcg_gen_ld_tl(cpu_gpr[gprn], cpu_env, 833 offsetof(CPUPPCState, 834 IBAT[sprn & 1][(sprn - SPR_IBAT0U) / 2])); 835 } 836 837 void spr_write_601_ubatu(DisasContext *ctx, int sprn, int gprn) 838 { 839 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 840 gen_helper_store_601_batl(cpu_env, t0, cpu_gpr[gprn]); 841 tcg_temp_free_i32(t0); 842 } 843 844 void spr_write_601_ubatl(DisasContext *ctx, int sprn, int gprn) 845 { 846 TCGv_i32 t0 = tcg_const_i32((sprn - SPR_IBAT0U) / 2); 847 gen_helper_store_601_batu(cpu_env, t0, cpu_gpr[gprn]); 848 tcg_temp_free_i32(t0); 849 } 850 #endif 851 852 /* PowerPC 40x specific registers */ 853 #if !defined(CONFIG_USER_ONLY) 854 void spr_read_40x_pit(DisasContext *ctx, int gprn, int sprn) 855 { 856 gen_icount_io_start(ctx); 857 gen_helper_load_40x_pit(cpu_gpr[gprn], cpu_env); 858 } 859 860 void spr_write_40x_pit(DisasContext *ctx, int sprn, int gprn) 861 { 862 gen_icount_io_start(ctx); 863 gen_helper_store_40x_pit(cpu_env, cpu_gpr[gprn]); 864 } 865 866 void spr_write_40x_dbcr0(DisasContext *ctx, int sprn, int gprn) 867 { 868 gen_icount_io_start(ctx); 869 gen_store_spr(sprn, cpu_gpr[gprn]); 870 gen_helper_store_40x_dbcr0(cpu_env, cpu_gpr[gprn]); 871 /* We must stop translation as we may have rebooted */ 872 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 873 } 874 875 void spr_write_40x_sler(DisasContext *ctx, int sprn, int gprn) 876 { 877 gen_icount_io_start(ctx); 878 gen_helper_store_40x_sler(cpu_env, cpu_gpr[gprn]); 879 } 880 881 void spr_write_40x_tcr(DisasContext *ctx, int sprn, int gprn) 882 { 883 gen_icount_io_start(ctx); 884 gen_helper_store_40x_tcr(cpu_env, cpu_gpr[gprn]); 885 } 886 887 void spr_write_40x_tsr(DisasContext *ctx, int sprn, int gprn) 888 { 889 gen_icount_io_start(ctx); 890 gen_helper_store_40x_tsr(cpu_env, cpu_gpr[gprn]); 891 } 892 893 void spr_write_40x_pid(DisasContext *ctx, int sprn, int gprn) 894 { 895 TCGv t0 = tcg_temp_new(); 896 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xFF); 897 gen_store_spr(SPR_40x_PID, t0); 898 tcg_temp_free(t0); 899 } 900 901 void spr_write_booke_tcr(DisasContext *ctx, int sprn, int gprn) 902 { 903 gen_icount_io_start(ctx); 904 gen_helper_store_booke_tcr(cpu_env, cpu_gpr[gprn]); 905 } 906 907 void spr_write_booke_tsr(DisasContext *ctx, int sprn, int gprn) 908 { 909 gen_icount_io_start(ctx); 910 gen_helper_store_booke_tsr(cpu_env, cpu_gpr[gprn]); 911 } 912 #endif 913 914 /* PIR */ 915 #if !defined(CONFIG_USER_ONLY) 916 void spr_write_pir(DisasContext *ctx, int sprn, int gprn) 917 { 918 TCGv t0 = tcg_temp_new(); 919 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 0xF); 920 gen_store_spr(SPR_PIR, t0); 921 tcg_temp_free(t0); 922 } 923 #endif 924 925 /* SPE specific registers */ 926 void spr_read_spefscr(DisasContext *ctx, int gprn, int sprn) 927 { 928 TCGv_i32 t0 = tcg_temp_new_i32(); 929 tcg_gen_ld_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 930 tcg_gen_extu_i32_tl(cpu_gpr[gprn], t0); 931 tcg_temp_free_i32(t0); 932 } 933 934 void spr_write_spefscr(DisasContext *ctx, int sprn, int gprn) 935 { 936 TCGv_i32 t0 = tcg_temp_new_i32(); 937 tcg_gen_trunc_tl_i32(t0, cpu_gpr[gprn]); 938 tcg_gen_st_i32(t0, cpu_env, offsetof(CPUPPCState, spe_fscr)); 939 tcg_temp_free_i32(t0); 940 } 941 942 #if !defined(CONFIG_USER_ONLY) 943 /* Callback used to write the exception vector base */ 944 void spr_write_excp_prefix(DisasContext *ctx, int sprn, int gprn) 945 { 946 TCGv t0 = tcg_temp_new(); 947 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivpr_mask)); 948 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 949 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_prefix)); 950 gen_store_spr(sprn, t0); 951 tcg_temp_free(t0); 952 } 953 954 void spr_write_excp_vector(DisasContext *ctx, int sprn, int gprn) 955 { 956 int sprn_offs; 957 958 if (sprn >= SPR_BOOKE_IVOR0 && sprn <= SPR_BOOKE_IVOR15) { 959 sprn_offs = sprn - SPR_BOOKE_IVOR0; 960 } else if (sprn >= SPR_BOOKE_IVOR32 && sprn <= SPR_BOOKE_IVOR37) { 961 sprn_offs = sprn - SPR_BOOKE_IVOR32 + 32; 962 } else if (sprn >= SPR_BOOKE_IVOR38 && sprn <= SPR_BOOKE_IVOR42) { 963 sprn_offs = sprn - SPR_BOOKE_IVOR38 + 38; 964 } else { 965 printf("Trying to write an unknown exception vector %d %03x\n", 966 sprn, sprn); 967 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 968 return; 969 } 970 971 TCGv t0 = tcg_temp_new(); 972 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUPPCState, ivor_mask)); 973 tcg_gen_and_tl(t0, t0, cpu_gpr[gprn]); 974 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUPPCState, excp_vectors[sprn_offs])); 975 gen_store_spr(sprn, t0); 976 tcg_temp_free(t0); 977 } 978 #endif 979 980 #ifdef TARGET_PPC64 981 #ifndef CONFIG_USER_ONLY 982 void spr_write_amr(DisasContext *ctx, int sprn, int gprn) 983 { 984 TCGv t0 = tcg_temp_new(); 985 TCGv t1 = tcg_temp_new(); 986 TCGv t2 = tcg_temp_new(); 987 988 /* 989 * Note, the HV=1 PR=0 case is handled earlier by simply using 990 * spr_write_generic for HV mode in the SPR table 991 */ 992 993 /* Build insertion mask into t1 based on context */ 994 if (ctx->pr) { 995 gen_load_spr(t1, SPR_UAMOR); 996 } else { 997 gen_load_spr(t1, SPR_AMOR); 998 } 999 1000 /* Mask new bits into t2 */ 1001 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1002 1003 /* Load AMR and clear new bits in t0 */ 1004 gen_load_spr(t0, SPR_AMR); 1005 tcg_gen_andc_tl(t0, t0, t1); 1006 1007 /* Or'in new bits and write it out */ 1008 tcg_gen_or_tl(t0, t0, t2); 1009 gen_store_spr(SPR_AMR, t0); 1010 spr_store_dump_spr(SPR_AMR); 1011 1012 tcg_temp_free(t0); 1013 tcg_temp_free(t1); 1014 tcg_temp_free(t2); 1015 } 1016 1017 void spr_write_uamor(DisasContext *ctx, int sprn, int gprn) 1018 { 1019 TCGv t0 = tcg_temp_new(); 1020 TCGv t1 = tcg_temp_new(); 1021 TCGv t2 = tcg_temp_new(); 1022 1023 /* 1024 * Note, the HV=1 case is handled earlier by simply using 1025 * spr_write_generic for HV mode in the SPR table 1026 */ 1027 1028 /* Build insertion mask into t1 based on context */ 1029 gen_load_spr(t1, SPR_AMOR); 1030 1031 /* Mask new bits into t2 */ 1032 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1033 1034 /* Load AMR and clear new bits in t0 */ 1035 gen_load_spr(t0, SPR_UAMOR); 1036 tcg_gen_andc_tl(t0, t0, t1); 1037 1038 /* Or'in new bits and write it out */ 1039 tcg_gen_or_tl(t0, t0, t2); 1040 gen_store_spr(SPR_UAMOR, t0); 1041 spr_store_dump_spr(SPR_UAMOR); 1042 1043 tcg_temp_free(t0); 1044 tcg_temp_free(t1); 1045 tcg_temp_free(t2); 1046 } 1047 1048 void spr_write_iamr(DisasContext *ctx, int sprn, int gprn) 1049 { 1050 TCGv t0 = tcg_temp_new(); 1051 TCGv t1 = tcg_temp_new(); 1052 TCGv t2 = tcg_temp_new(); 1053 1054 /* 1055 * Note, the HV=1 case is handled earlier by simply using 1056 * spr_write_generic for HV mode in the SPR table 1057 */ 1058 1059 /* Build insertion mask into t1 based on context */ 1060 gen_load_spr(t1, SPR_AMOR); 1061 1062 /* Mask new bits into t2 */ 1063 tcg_gen_and_tl(t2, t1, cpu_gpr[gprn]); 1064 1065 /* Load AMR and clear new bits in t0 */ 1066 gen_load_spr(t0, SPR_IAMR); 1067 tcg_gen_andc_tl(t0, t0, t1); 1068 1069 /* Or'in new bits and write it out */ 1070 tcg_gen_or_tl(t0, t0, t2); 1071 gen_store_spr(SPR_IAMR, t0); 1072 spr_store_dump_spr(SPR_IAMR); 1073 1074 tcg_temp_free(t0); 1075 tcg_temp_free(t1); 1076 tcg_temp_free(t2); 1077 } 1078 #endif 1079 #endif 1080 1081 #ifndef CONFIG_USER_ONLY 1082 void spr_read_thrm(DisasContext *ctx, int gprn, int sprn) 1083 { 1084 gen_helper_fixup_thrm(cpu_env); 1085 gen_load_spr(cpu_gpr[gprn], sprn); 1086 spr_load_dump_spr(sprn); 1087 } 1088 #endif /* !CONFIG_USER_ONLY */ 1089 1090 #if !defined(CONFIG_USER_ONLY) 1091 void spr_write_e500_l1csr0(DisasContext *ctx, int sprn, int gprn) 1092 { 1093 TCGv t0 = tcg_temp_new(); 1094 1095 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR0_DCE | L1CSR0_CPE); 1096 gen_store_spr(sprn, t0); 1097 tcg_temp_free(t0); 1098 } 1099 1100 void spr_write_e500_l1csr1(DisasContext *ctx, int sprn, int gprn) 1101 { 1102 TCGv t0 = tcg_temp_new(); 1103 1104 tcg_gen_andi_tl(t0, cpu_gpr[gprn], L1CSR1_ICE | L1CSR1_CPE); 1105 gen_store_spr(sprn, t0); 1106 tcg_temp_free(t0); 1107 } 1108 1109 void spr_write_e500_l2csr0(DisasContext *ctx, int sprn, int gprn) 1110 { 1111 TCGv t0 = tcg_temp_new(); 1112 1113 tcg_gen_andi_tl(t0, cpu_gpr[gprn], 1114 ~(E500_L2CSR0_L2FI | E500_L2CSR0_L2FL | E500_L2CSR0_L2LFC)); 1115 gen_store_spr(sprn, t0); 1116 tcg_temp_free(t0); 1117 } 1118 1119 void spr_write_booke206_mmucsr0(DisasContext *ctx, int sprn, int gprn) 1120 { 1121 gen_helper_booke206_tlbflush(cpu_env, cpu_gpr[gprn]); 1122 } 1123 1124 void spr_write_booke_pid(DisasContext *ctx, int sprn, int gprn) 1125 { 1126 TCGv_i32 t0 = tcg_const_i32(sprn); 1127 gen_helper_booke_setpid(cpu_env, t0, cpu_gpr[gprn]); 1128 tcg_temp_free_i32(t0); 1129 } 1130 void spr_write_eplc(DisasContext *ctx, int sprn, int gprn) 1131 { 1132 gen_helper_booke_set_eplc(cpu_env, cpu_gpr[gprn]); 1133 } 1134 void spr_write_epsc(DisasContext *ctx, int sprn, int gprn) 1135 { 1136 gen_helper_booke_set_epsc(cpu_env, cpu_gpr[gprn]); 1137 } 1138 1139 #endif 1140 1141 #if !defined(CONFIG_USER_ONLY) 1142 void spr_write_mas73(DisasContext *ctx, int sprn, int gprn) 1143 { 1144 TCGv val = tcg_temp_new(); 1145 tcg_gen_ext32u_tl(val, cpu_gpr[gprn]); 1146 gen_store_spr(SPR_BOOKE_MAS3, val); 1147 tcg_gen_shri_tl(val, cpu_gpr[gprn], 32); 1148 gen_store_spr(SPR_BOOKE_MAS7, val); 1149 tcg_temp_free(val); 1150 } 1151 1152 void spr_read_mas73(DisasContext *ctx, int gprn, int sprn) 1153 { 1154 TCGv mas7 = tcg_temp_new(); 1155 TCGv mas3 = tcg_temp_new(); 1156 gen_load_spr(mas7, SPR_BOOKE_MAS7); 1157 tcg_gen_shli_tl(mas7, mas7, 32); 1158 gen_load_spr(mas3, SPR_BOOKE_MAS3); 1159 tcg_gen_or_tl(cpu_gpr[gprn], mas3, mas7); 1160 tcg_temp_free(mas3); 1161 tcg_temp_free(mas7); 1162 } 1163 1164 #endif 1165 1166 #ifdef TARGET_PPC64 1167 static void gen_fscr_facility_check(DisasContext *ctx, int facility_sprn, 1168 int bit, int sprn, int cause) 1169 { 1170 TCGv_i32 t1 = tcg_const_i32(bit); 1171 TCGv_i32 t2 = tcg_const_i32(sprn); 1172 TCGv_i32 t3 = tcg_const_i32(cause); 1173 1174 gen_helper_fscr_facility_check(cpu_env, t1, t2, t3); 1175 1176 tcg_temp_free_i32(t3); 1177 tcg_temp_free_i32(t2); 1178 tcg_temp_free_i32(t1); 1179 } 1180 1181 static void gen_msr_facility_check(DisasContext *ctx, int facility_sprn, 1182 int bit, int sprn, int cause) 1183 { 1184 TCGv_i32 t1 = tcg_const_i32(bit); 1185 TCGv_i32 t2 = tcg_const_i32(sprn); 1186 TCGv_i32 t3 = tcg_const_i32(cause); 1187 1188 gen_helper_msr_facility_check(cpu_env, t1, t2, t3); 1189 1190 tcg_temp_free_i32(t3); 1191 tcg_temp_free_i32(t2); 1192 tcg_temp_free_i32(t1); 1193 } 1194 1195 void spr_read_prev_upper32(DisasContext *ctx, int gprn, int sprn) 1196 { 1197 TCGv spr_up = tcg_temp_new(); 1198 TCGv spr = tcg_temp_new(); 1199 1200 gen_load_spr(spr, sprn - 1); 1201 tcg_gen_shri_tl(spr_up, spr, 32); 1202 tcg_gen_ext32u_tl(cpu_gpr[gprn], spr_up); 1203 1204 tcg_temp_free(spr); 1205 tcg_temp_free(spr_up); 1206 } 1207 1208 void spr_write_prev_upper32(DisasContext *ctx, int sprn, int gprn) 1209 { 1210 TCGv spr = tcg_temp_new(); 1211 1212 gen_load_spr(spr, sprn - 1); 1213 tcg_gen_deposit_tl(spr, spr, cpu_gpr[gprn], 32, 32); 1214 gen_store_spr(sprn - 1, spr); 1215 1216 tcg_temp_free(spr); 1217 } 1218 1219 #if !defined(CONFIG_USER_ONLY) 1220 void spr_write_hmer(DisasContext *ctx, int sprn, int gprn) 1221 { 1222 TCGv hmer = tcg_temp_new(); 1223 1224 gen_load_spr(hmer, sprn); 1225 tcg_gen_and_tl(hmer, cpu_gpr[gprn], hmer); 1226 gen_store_spr(sprn, hmer); 1227 spr_store_dump_spr(sprn); 1228 tcg_temp_free(hmer); 1229 } 1230 1231 void spr_write_lpcr(DisasContext *ctx, int sprn, int gprn) 1232 { 1233 gen_helper_store_lpcr(cpu_env, cpu_gpr[gprn]); 1234 } 1235 #endif /* !defined(CONFIG_USER_ONLY) */ 1236 1237 void spr_read_tar(DisasContext *ctx, int gprn, int sprn) 1238 { 1239 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1240 spr_read_generic(ctx, gprn, sprn); 1241 } 1242 1243 void spr_write_tar(DisasContext *ctx, int sprn, int gprn) 1244 { 1245 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_TAR, sprn, FSCR_IC_TAR); 1246 spr_write_generic(ctx, sprn, gprn); 1247 } 1248 1249 void spr_read_tm(DisasContext *ctx, int gprn, int sprn) 1250 { 1251 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1252 spr_read_generic(ctx, gprn, sprn); 1253 } 1254 1255 void spr_write_tm(DisasContext *ctx, int sprn, int gprn) 1256 { 1257 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1258 spr_write_generic(ctx, sprn, gprn); 1259 } 1260 1261 void spr_read_tm_upper32(DisasContext *ctx, int gprn, int sprn) 1262 { 1263 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1264 spr_read_prev_upper32(ctx, gprn, sprn); 1265 } 1266 1267 void spr_write_tm_upper32(DisasContext *ctx, int sprn, int gprn) 1268 { 1269 gen_msr_facility_check(ctx, SPR_FSCR, MSR_TM, sprn, FSCR_IC_TM); 1270 spr_write_prev_upper32(ctx, sprn, gprn); 1271 } 1272 1273 void spr_read_ebb(DisasContext *ctx, int gprn, int sprn) 1274 { 1275 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1276 spr_read_generic(ctx, gprn, sprn); 1277 } 1278 1279 void spr_write_ebb(DisasContext *ctx, int sprn, int gprn) 1280 { 1281 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1282 spr_write_generic(ctx, sprn, gprn); 1283 } 1284 1285 void spr_read_ebb_upper32(DisasContext *ctx, int gprn, int sprn) 1286 { 1287 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1288 spr_read_prev_upper32(ctx, gprn, sprn); 1289 } 1290 1291 void spr_write_ebb_upper32(DisasContext *ctx, int sprn, int gprn) 1292 { 1293 gen_fscr_facility_check(ctx, SPR_FSCR, FSCR_EBB, sprn, FSCR_IC_EBB); 1294 spr_write_prev_upper32(ctx, sprn, gprn); 1295 } 1296 #endif 1297 1298 #define GEN_HANDLER(name, opc1, opc2, opc3, inval, type) \ 1299 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, PPC_NONE) 1300 1301 #define GEN_HANDLER_E(name, opc1, opc2, opc3, inval, type, type2) \ 1302 GEN_OPCODE(name, opc1, opc2, opc3, inval, type, type2) 1303 1304 #define GEN_HANDLER2(name, onam, opc1, opc2, opc3, inval, type) \ 1305 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, PPC_NONE) 1306 1307 #define GEN_HANDLER2_E(name, onam, opc1, opc2, opc3, inval, type, type2) \ 1308 GEN_OPCODE2(name, onam, opc1, opc2, opc3, inval, type, type2) 1309 1310 #define GEN_HANDLER_E_2(name, opc1, opc2, opc3, opc4, inval, type, type2) \ 1311 GEN_OPCODE3(name, opc1, opc2, opc3, opc4, inval, type, type2) 1312 1313 #define GEN_HANDLER2_E_2(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) \ 1314 GEN_OPCODE4(name, onam, opc1, opc2, opc3, opc4, inval, typ, typ2) 1315 1316 typedef struct opcode_t { 1317 unsigned char opc1, opc2, opc3, opc4; 1318 #if HOST_LONG_BITS == 64 /* Explicitly align to 64 bits */ 1319 unsigned char pad[4]; 1320 #endif 1321 opc_handler_t handler; 1322 const char *oname; 1323 } opcode_t; 1324 1325 /* Helpers for priv. check */ 1326 #define GEN_PRIV \ 1327 do { \ 1328 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); return; \ 1329 } while (0) 1330 1331 #if defined(CONFIG_USER_ONLY) 1332 #define CHK_HV GEN_PRIV 1333 #define CHK_SV GEN_PRIV 1334 #define CHK_HVRM GEN_PRIV 1335 #else 1336 #define CHK_HV \ 1337 do { \ 1338 if (unlikely(ctx->pr || !ctx->hv)) { \ 1339 GEN_PRIV; \ 1340 } \ 1341 } while (0) 1342 #define CHK_SV \ 1343 do { \ 1344 if (unlikely(ctx->pr)) { \ 1345 GEN_PRIV; \ 1346 } \ 1347 } while (0) 1348 #define CHK_HVRM \ 1349 do { \ 1350 if (unlikely(ctx->pr || !ctx->hv || ctx->dr)) { \ 1351 GEN_PRIV; \ 1352 } \ 1353 } while (0) 1354 #endif 1355 1356 #define CHK_NONE 1357 1358 /*****************************************************************************/ 1359 /* PowerPC instructions table */ 1360 1361 #define GEN_OPCODE(name, op1, op2, op3, invl, _typ, _typ2) \ 1362 { \ 1363 .opc1 = op1, \ 1364 .opc2 = op2, \ 1365 .opc3 = op3, \ 1366 .opc4 = 0xff, \ 1367 .handler = { \ 1368 .inval1 = invl, \ 1369 .type = _typ, \ 1370 .type2 = _typ2, \ 1371 .handler = &gen_##name, \ 1372 }, \ 1373 .oname = stringify(name), \ 1374 } 1375 #define GEN_OPCODE_DUAL(name, op1, op2, op3, invl1, invl2, _typ, _typ2) \ 1376 { \ 1377 .opc1 = op1, \ 1378 .opc2 = op2, \ 1379 .opc3 = op3, \ 1380 .opc4 = 0xff, \ 1381 .handler = { \ 1382 .inval1 = invl1, \ 1383 .inval2 = invl2, \ 1384 .type = _typ, \ 1385 .type2 = _typ2, \ 1386 .handler = &gen_##name, \ 1387 }, \ 1388 .oname = stringify(name), \ 1389 } 1390 #define GEN_OPCODE2(name, onam, op1, op2, op3, invl, _typ, _typ2) \ 1391 { \ 1392 .opc1 = op1, \ 1393 .opc2 = op2, \ 1394 .opc3 = op3, \ 1395 .opc4 = 0xff, \ 1396 .handler = { \ 1397 .inval1 = invl, \ 1398 .type = _typ, \ 1399 .type2 = _typ2, \ 1400 .handler = &gen_##name, \ 1401 }, \ 1402 .oname = onam, \ 1403 } 1404 #define GEN_OPCODE3(name, op1, op2, op3, op4, invl, _typ, _typ2) \ 1405 { \ 1406 .opc1 = op1, \ 1407 .opc2 = op2, \ 1408 .opc3 = op3, \ 1409 .opc4 = op4, \ 1410 .handler = { \ 1411 .inval1 = invl, \ 1412 .type = _typ, \ 1413 .type2 = _typ2, \ 1414 .handler = &gen_##name, \ 1415 }, \ 1416 .oname = stringify(name), \ 1417 } 1418 #define GEN_OPCODE4(name, onam, op1, op2, op3, op4, invl, _typ, _typ2) \ 1419 { \ 1420 .opc1 = op1, \ 1421 .opc2 = op2, \ 1422 .opc3 = op3, \ 1423 .opc4 = op4, \ 1424 .handler = { \ 1425 .inval1 = invl, \ 1426 .type = _typ, \ 1427 .type2 = _typ2, \ 1428 .handler = &gen_##name, \ 1429 }, \ 1430 .oname = onam, \ 1431 } 1432 1433 /* Invalid instruction */ 1434 static void gen_invalid(DisasContext *ctx) 1435 { 1436 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 1437 } 1438 1439 static opc_handler_t invalid_handler = { 1440 .inval1 = 0xFFFFFFFF, 1441 .inval2 = 0xFFFFFFFF, 1442 .type = PPC_NONE, 1443 .type2 = PPC_NONE, 1444 .handler = gen_invalid, 1445 }; 1446 1447 /*** Integer comparison ***/ 1448 1449 static inline void gen_op_cmp(TCGv arg0, TCGv arg1, int s, int crf) 1450 { 1451 TCGv t0 = tcg_temp_new(); 1452 TCGv t1 = tcg_temp_new(); 1453 TCGv_i32 t = tcg_temp_new_i32(); 1454 1455 tcg_gen_movi_tl(t0, CRF_EQ); 1456 tcg_gen_movi_tl(t1, CRF_LT); 1457 tcg_gen_movcond_tl((s ? TCG_COND_LT : TCG_COND_LTU), 1458 t0, arg0, arg1, t1, t0); 1459 tcg_gen_movi_tl(t1, CRF_GT); 1460 tcg_gen_movcond_tl((s ? TCG_COND_GT : TCG_COND_GTU), 1461 t0, arg0, arg1, t1, t0); 1462 1463 tcg_gen_trunc_tl_i32(t, t0); 1464 tcg_gen_trunc_tl_i32(cpu_crf[crf], cpu_so); 1465 tcg_gen_or_i32(cpu_crf[crf], cpu_crf[crf], t); 1466 1467 tcg_temp_free(t0); 1468 tcg_temp_free(t1); 1469 tcg_temp_free_i32(t); 1470 } 1471 1472 static inline void gen_op_cmpi(TCGv arg0, target_ulong arg1, int s, int crf) 1473 { 1474 TCGv t0 = tcg_const_tl(arg1); 1475 gen_op_cmp(arg0, t0, s, crf); 1476 tcg_temp_free(t0); 1477 } 1478 1479 static inline void gen_op_cmp32(TCGv arg0, TCGv arg1, int s, int crf) 1480 { 1481 TCGv t0, t1; 1482 t0 = tcg_temp_new(); 1483 t1 = tcg_temp_new(); 1484 if (s) { 1485 tcg_gen_ext32s_tl(t0, arg0); 1486 tcg_gen_ext32s_tl(t1, arg1); 1487 } else { 1488 tcg_gen_ext32u_tl(t0, arg0); 1489 tcg_gen_ext32u_tl(t1, arg1); 1490 } 1491 gen_op_cmp(t0, t1, s, crf); 1492 tcg_temp_free(t1); 1493 tcg_temp_free(t0); 1494 } 1495 1496 static inline void gen_op_cmpi32(TCGv arg0, target_ulong arg1, int s, int crf) 1497 { 1498 TCGv t0 = tcg_const_tl(arg1); 1499 gen_op_cmp32(arg0, t0, s, crf); 1500 tcg_temp_free(t0); 1501 } 1502 1503 static inline void gen_set_Rc0(DisasContext *ctx, TCGv reg) 1504 { 1505 if (NARROW_MODE(ctx)) { 1506 gen_op_cmpi32(reg, 0, 1, 0); 1507 } else { 1508 gen_op_cmpi(reg, 0, 1, 0); 1509 } 1510 } 1511 1512 /* cmprb - range comparison: isupper, isaplha, islower*/ 1513 static void gen_cmprb(DisasContext *ctx) 1514 { 1515 TCGv_i32 src1 = tcg_temp_new_i32(); 1516 TCGv_i32 src2 = tcg_temp_new_i32(); 1517 TCGv_i32 src2lo = tcg_temp_new_i32(); 1518 TCGv_i32 src2hi = tcg_temp_new_i32(); 1519 TCGv_i32 crf = cpu_crf[crfD(ctx->opcode)]; 1520 1521 tcg_gen_trunc_tl_i32(src1, cpu_gpr[rA(ctx->opcode)]); 1522 tcg_gen_trunc_tl_i32(src2, cpu_gpr[rB(ctx->opcode)]); 1523 1524 tcg_gen_andi_i32(src1, src1, 0xFF); 1525 tcg_gen_ext8u_i32(src2lo, src2); 1526 tcg_gen_shri_i32(src2, src2, 8); 1527 tcg_gen_ext8u_i32(src2hi, src2); 1528 1529 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1530 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1531 tcg_gen_and_i32(crf, src2lo, src2hi); 1532 1533 if (ctx->opcode & 0x00200000) { 1534 tcg_gen_shri_i32(src2, src2, 8); 1535 tcg_gen_ext8u_i32(src2lo, src2); 1536 tcg_gen_shri_i32(src2, src2, 8); 1537 tcg_gen_ext8u_i32(src2hi, src2); 1538 tcg_gen_setcond_i32(TCG_COND_LEU, src2lo, src2lo, src1); 1539 tcg_gen_setcond_i32(TCG_COND_LEU, src2hi, src1, src2hi); 1540 tcg_gen_and_i32(src2lo, src2lo, src2hi); 1541 tcg_gen_or_i32(crf, crf, src2lo); 1542 } 1543 tcg_gen_shli_i32(crf, crf, CRF_GT_BIT); 1544 tcg_temp_free_i32(src1); 1545 tcg_temp_free_i32(src2); 1546 tcg_temp_free_i32(src2lo); 1547 tcg_temp_free_i32(src2hi); 1548 } 1549 1550 #if defined(TARGET_PPC64) 1551 /* cmpeqb */ 1552 static void gen_cmpeqb(DisasContext *ctx) 1553 { 1554 gen_helper_cmpeqb(cpu_crf[crfD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1555 cpu_gpr[rB(ctx->opcode)]); 1556 } 1557 #endif 1558 1559 /* isel (PowerPC 2.03 specification) */ 1560 static void gen_isel(DisasContext *ctx) 1561 { 1562 uint32_t bi = rC(ctx->opcode); 1563 uint32_t mask = 0x08 >> (bi & 0x03); 1564 TCGv t0 = tcg_temp_new(); 1565 TCGv zr; 1566 1567 tcg_gen_extu_i32_tl(t0, cpu_crf[bi >> 2]); 1568 tcg_gen_andi_tl(t0, t0, mask); 1569 1570 zr = tcg_const_tl(0); 1571 tcg_gen_movcond_tl(TCG_COND_NE, cpu_gpr[rD(ctx->opcode)], t0, zr, 1572 rA(ctx->opcode) ? cpu_gpr[rA(ctx->opcode)] : zr, 1573 cpu_gpr[rB(ctx->opcode)]); 1574 tcg_temp_free(zr); 1575 tcg_temp_free(t0); 1576 } 1577 1578 /* cmpb: PowerPC 2.05 specification */ 1579 static void gen_cmpb(DisasContext *ctx) 1580 { 1581 gen_helper_cmpb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 1582 cpu_gpr[rB(ctx->opcode)]); 1583 } 1584 1585 /*** Integer arithmetic ***/ 1586 1587 static inline void gen_op_arith_compute_ov(DisasContext *ctx, TCGv arg0, 1588 TCGv arg1, TCGv arg2, int sub) 1589 { 1590 TCGv t0 = tcg_temp_new(); 1591 1592 tcg_gen_xor_tl(cpu_ov, arg0, arg2); 1593 tcg_gen_xor_tl(t0, arg1, arg2); 1594 if (sub) { 1595 tcg_gen_and_tl(cpu_ov, cpu_ov, t0); 1596 } else { 1597 tcg_gen_andc_tl(cpu_ov, cpu_ov, t0); 1598 } 1599 tcg_temp_free(t0); 1600 if (NARROW_MODE(ctx)) { 1601 tcg_gen_extract_tl(cpu_ov, cpu_ov, 31, 1); 1602 if (is_isa300(ctx)) { 1603 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 1604 } 1605 } else { 1606 if (is_isa300(ctx)) { 1607 tcg_gen_extract_tl(cpu_ov32, cpu_ov, 31, 1); 1608 } 1609 tcg_gen_extract_tl(cpu_ov, cpu_ov, TARGET_LONG_BITS - 1, 1); 1610 } 1611 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1612 } 1613 1614 static inline void gen_op_arith_compute_ca32(DisasContext *ctx, 1615 TCGv res, TCGv arg0, TCGv arg1, 1616 TCGv ca32, int sub) 1617 { 1618 TCGv t0; 1619 1620 if (!is_isa300(ctx)) { 1621 return; 1622 } 1623 1624 t0 = tcg_temp_new(); 1625 if (sub) { 1626 tcg_gen_eqv_tl(t0, arg0, arg1); 1627 } else { 1628 tcg_gen_xor_tl(t0, arg0, arg1); 1629 } 1630 tcg_gen_xor_tl(t0, t0, res); 1631 tcg_gen_extract_tl(ca32, t0, 32, 1); 1632 tcg_temp_free(t0); 1633 } 1634 1635 /* Common add function */ 1636 static inline void gen_op_arith_add(DisasContext *ctx, TCGv ret, TCGv arg1, 1637 TCGv arg2, TCGv ca, TCGv ca32, 1638 bool add_ca, bool compute_ca, 1639 bool compute_ov, bool compute_rc0) 1640 { 1641 TCGv t0 = ret; 1642 1643 if (compute_ca || compute_ov) { 1644 t0 = tcg_temp_new(); 1645 } 1646 1647 if (compute_ca) { 1648 if (NARROW_MODE(ctx)) { 1649 /* 1650 * Caution: a non-obvious corner case of the spec is that 1651 * we must produce the *entire* 64-bit addition, but 1652 * produce the carry into bit 32. 1653 */ 1654 TCGv t1 = tcg_temp_new(); 1655 tcg_gen_xor_tl(t1, arg1, arg2); /* add without carry */ 1656 tcg_gen_add_tl(t0, arg1, arg2); 1657 if (add_ca) { 1658 tcg_gen_add_tl(t0, t0, ca); 1659 } 1660 tcg_gen_xor_tl(ca, t0, t1); /* bits changed w/ carry */ 1661 tcg_temp_free(t1); 1662 tcg_gen_extract_tl(ca, ca, 32, 1); 1663 if (is_isa300(ctx)) { 1664 tcg_gen_mov_tl(ca32, ca); 1665 } 1666 } else { 1667 TCGv zero = tcg_const_tl(0); 1668 if (add_ca) { 1669 tcg_gen_add2_tl(t0, ca, arg1, zero, ca, zero); 1670 tcg_gen_add2_tl(t0, ca, t0, ca, arg2, zero); 1671 } else { 1672 tcg_gen_add2_tl(t0, ca, arg1, zero, arg2, zero); 1673 } 1674 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, ca32, 0); 1675 tcg_temp_free(zero); 1676 } 1677 } else { 1678 tcg_gen_add_tl(t0, arg1, arg2); 1679 if (add_ca) { 1680 tcg_gen_add_tl(t0, t0, ca); 1681 } 1682 } 1683 1684 if (compute_ov) { 1685 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 0); 1686 } 1687 if (unlikely(compute_rc0)) { 1688 gen_set_Rc0(ctx, t0); 1689 } 1690 1691 if (t0 != ret) { 1692 tcg_gen_mov_tl(ret, t0); 1693 tcg_temp_free(t0); 1694 } 1695 } 1696 /* Add functions with two operands */ 1697 #define GEN_INT_ARITH_ADD(name, opc3, ca, add_ca, compute_ca, compute_ov) \ 1698 static void glue(gen_, name)(DisasContext *ctx) \ 1699 { \ 1700 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1701 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1702 ca, glue(ca, 32), \ 1703 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1704 } 1705 /* Add functions with one operand and one immediate */ 1706 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, ca, \ 1707 add_ca, compute_ca, compute_ov) \ 1708 static void glue(gen_, name)(DisasContext *ctx) \ 1709 { \ 1710 TCGv t0 = tcg_const_tl(const_val); \ 1711 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], \ 1712 cpu_gpr[rA(ctx->opcode)], t0, \ 1713 ca, glue(ca, 32), \ 1714 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 1715 tcg_temp_free(t0); \ 1716 } 1717 1718 /* add add. addo addo. */ 1719 GEN_INT_ARITH_ADD(add, 0x08, cpu_ca, 0, 0, 0) 1720 GEN_INT_ARITH_ADD(addo, 0x18, cpu_ca, 0, 0, 1) 1721 /* addc addc. addco addco. */ 1722 GEN_INT_ARITH_ADD(addc, 0x00, cpu_ca, 0, 1, 0) 1723 GEN_INT_ARITH_ADD(addco, 0x10, cpu_ca, 0, 1, 1) 1724 /* adde adde. addeo addeo. */ 1725 GEN_INT_ARITH_ADD(adde, 0x04, cpu_ca, 1, 1, 0) 1726 GEN_INT_ARITH_ADD(addeo, 0x14, cpu_ca, 1, 1, 1) 1727 /* addme addme. addmeo addmeo. */ 1728 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, cpu_ca, 1, 1, 0) 1729 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, cpu_ca, 1, 1, 1) 1730 /* addex */ 1731 GEN_INT_ARITH_ADD(addex, 0x05, cpu_ov, 1, 1, 0); 1732 /* addze addze. addzeo addzeo.*/ 1733 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, cpu_ca, 1, 1, 0) 1734 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, cpu_ca, 1, 1, 1) 1735 /* addic addic.*/ 1736 static inline void gen_op_addic(DisasContext *ctx, bool compute_rc0) 1737 { 1738 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 1739 gen_op_arith_add(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 1740 c, cpu_ca, cpu_ca32, 0, 1, 0, compute_rc0); 1741 tcg_temp_free(c); 1742 } 1743 1744 static void gen_addic(DisasContext *ctx) 1745 { 1746 gen_op_addic(ctx, 0); 1747 } 1748 1749 static void gen_addic_(DisasContext *ctx) 1750 { 1751 gen_op_addic(ctx, 1); 1752 } 1753 1754 static inline void gen_op_arith_divw(DisasContext *ctx, TCGv ret, TCGv arg1, 1755 TCGv arg2, int sign, int compute_ov) 1756 { 1757 TCGv_i32 t0 = tcg_temp_new_i32(); 1758 TCGv_i32 t1 = tcg_temp_new_i32(); 1759 TCGv_i32 t2 = tcg_temp_new_i32(); 1760 TCGv_i32 t3 = tcg_temp_new_i32(); 1761 1762 tcg_gen_trunc_tl_i32(t0, arg1); 1763 tcg_gen_trunc_tl_i32(t1, arg2); 1764 if (sign) { 1765 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1766 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1767 tcg_gen_and_i32(t2, t2, t3); 1768 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1769 tcg_gen_or_i32(t2, t2, t3); 1770 tcg_gen_movi_i32(t3, 0); 1771 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1772 tcg_gen_div_i32(t3, t0, t1); 1773 tcg_gen_extu_i32_tl(ret, t3); 1774 } else { 1775 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t1, 0); 1776 tcg_gen_movi_i32(t3, 0); 1777 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1778 tcg_gen_divu_i32(t3, t0, t1); 1779 tcg_gen_extu_i32_tl(ret, t3); 1780 } 1781 if (compute_ov) { 1782 tcg_gen_extu_i32_tl(cpu_ov, t2); 1783 if (is_isa300(ctx)) { 1784 tcg_gen_extu_i32_tl(cpu_ov32, t2); 1785 } 1786 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1787 } 1788 tcg_temp_free_i32(t0); 1789 tcg_temp_free_i32(t1); 1790 tcg_temp_free_i32(t2); 1791 tcg_temp_free_i32(t3); 1792 1793 if (unlikely(Rc(ctx->opcode) != 0)) { 1794 gen_set_Rc0(ctx, ret); 1795 } 1796 } 1797 /* Div functions */ 1798 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 1799 static void glue(gen_, name)(DisasContext *ctx) \ 1800 { \ 1801 gen_op_arith_divw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1802 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1803 sign, compute_ov); \ 1804 } 1805 /* divwu divwu. divwuo divwuo. */ 1806 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0); 1807 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1); 1808 /* divw divw. divwo divwo. */ 1809 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0); 1810 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1); 1811 1812 /* div[wd]eu[o][.] */ 1813 #define GEN_DIVE(name, hlpr, compute_ov) \ 1814 static void gen_##name(DisasContext *ctx) \ 1815 { \ 1816 TCGv_i32 t0 = tcg_const_i32(compute_ov); \ 1817 gen_helper_##hlpr(cpu_gpr[rD(ctx->opcode)], cpu_env, \ 1818 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); \ 1819 tcg_temp_free_i32(t0); \ 1820 if (unlikely(Rc(ctx->opcode) != 0)) { \ 1821 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); \ 1822 } \ 1823 } 1824 1825 GEN_DIVE(divweu, divweu, 0); 1826 GEN_DIVE(divweuo, divweu, 1); 1827 GEN_DIVE(divwe, divwe, 0); 1828 GEN_DIVE(divweo, divwe, 1); 1829 1830 #if defined(TARGET_PPC64) 1831 static inline void gen_op_arith_divd(DisasContext *ctx, TCGv ret, TCGv arg1, 1832 TCGv arg2, int sign, int compute_ov) 1833 { 1834 TCGv_i64 t0 = tcg_temp_new_i64(); 1835 TCGv_i64 t1 = tcg_temp_new_i64(); 1836 TCGv_i64 t2 = tcg_temp_new_i64(); 1837 TCGv_i64 t3 = tcg_temp_new_i64(); 1838 1839 tcg_gen_mov_i64(t0, arg1); 1840 tcg_gen_mov_i64(t1, arg2); 1841 if (sign) { 1842 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1843 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1844 tcg_gen_and_i64(t2, t2, t3); 1845 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1846 tcg_gen_or_i64(t2, t2, t3); 1847 tcg_gen_movi_i64(t3, 0); 1848 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1849 tcg_gen_div_i64(ret, t0, t1); 1850 } else { 1851 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t1, 0); 1852 tcg_gen_movi_i64(t3, 0); 1853 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1854 tcg_gen_divu_i64(ret, t0, t1); 1855 } 1856 if (compute_ov) { 1857 tcg_gen_mov_tl(cpu_ov, t2); 1858 if (is_isa300(ctx)) { 1859 tcg_gen_mov_tl(cpu_ov32, t2); 1860 } 1861 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 1862 } 1863 tcg_temp_free_i64(t0); 1864 tcg_temp_free_i64(t1); 1865 tcg_temp_free_i64(t2); 1866 tcg_temp_free_i64(t3); 1867 1868 if (unlikely(Rc(ctx->opcode) != 0)) { 1869 gen_set_Rc0(ctx, ret); 1870 } 1871 } 1872 1873 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 1874 static void glue(gen_, name)(DisasContext *ctx) \ 1875 { \ 1876 gen_op_arith_divd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1877 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1878 sign, compute_ov); \ 1879 } 1880 /* divdu divdu. divduo divduo. */ 1881 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0); 1882 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1); 1883 /* divd divd. divdo divdo. */ 1884 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0); 1885 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1); 1886 1887 GEN_DIVE(divdeu, divdeu, 0); 1888 GEN_DIVE(divdeuo, divdeu, 1); 1889 GEN_DIVE(divde, divde, 0); 1890 GEN_DIVE(divdeo, divde, 1); 1891 #endif 1892 1893 static inline void gen_op_arith_modw(DisasContext *ctx, TCGv ret, TCGv arg1, 1894 TCGv arg2, int sign) 1895 { 1896 TCGv_i32 t0 = tcg_temp_new_i32(); 1897 TCGv_i32 t1 = tcg_temp_new_i32(); 1898 1899 tcg_gen_trunc_tl_i32(t0, arg1); 1900 tcg_gen_trunc_tl_i32(t1, arg2); 1901 if (sign) { 1902 TCGv_i32 t2 = tcg_temp_new_i32(); 1903 TCGv_i32 t3 = tcg_temp_new_i32(); 1904 tcg_gen_setcondi_i32(TCG_COND_EQ, t2, t0, INT_MIN); 1905 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, -1); 1906 tcg_gen_and_i32(t2, t2, t3); 1907 tcg_gen_setcondi_i32(TCG_COND_EQ, t3, t1, 0); 1908 tcg_gen_or_i32(t2, t2, t3); 1909 tcg_gen_movi_i32(t3, 0); 1910 tcg_gen_movcond_i32(TCG_COND_NE, t1, t2, t3, t2, t1); 1911 tcg_gen_rem_i32(t3, t0, t1); 1912 tcg_gen_ext_i32_tl(ret, t3); 1913 tcg_temp_free_i32(t2); 1914 tcg_temp_free_i32(t3); 1915 } else { 1916 TCGv_i32 t2 = tcg_const_i32(1); 1917 TCGv_i32 t3 = tcg_const_i32(0); 1918 tcg_gen_movcond_i32(TCG_COND_EQ, t1, t1, t3, t2, t1); 1919 tcg_gen_remu_i32(t3, t0, t1); 1920 tcg_gen_extu_i32_tl(ret, t3); 1921 tcg_temp_free_i32(t2); 1922 tcg_temp_free_i32(t3); 1923 } 1924 tcg_temp_free_i32(t0); 1925 tcg_temp_free_i32(t1); 1926 } 1927 1928 #define GEN_INT_ARITH_MODW(name, opc3, sign) \ 1929 static void glue(gen_, name)(DisasContext *ctx) \ 1930 { \ 1931 gen_op_arith_modw(ctx, cpu_gpr[rD(ctx->opcode)], \ 1932 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1933 sign); \ 1934 } 1935 1936 GEN_INT_ARITH_MODW(moduw, 0x08, 0); 1937 GEN_INT_ARITH_MODW(modsw, 0x18, 1); 1938 1939 #if defined(TARGET_PPC64) 1940 static inline void gen_op_arith_modd(DisasContext *ctx, TCGv ret, TCGv arg1, 1941 TCGv arg2, int sign) 1942 { 1943 TCGv_i64 t0 = tcg_temp_new_i64(); 1944 TCGv_i64 t1 = tcg_temp_new_i64(); 1945 1946 tcg_gen_mov_i64(t0, arg1); 1947 tcg_gen_mov_i64(t1, arg2); 1948 if (sign) { 1949 TCGv_i64 t2 = tcg_temp_new_i64(); 1950 TCGv_i64 t3 = tcg_temp_new_i64(); 1951 tcg_gen_setcondi_i64(TCG_COND_EQ, t2, t0, INT64_MIN); 1952 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, -1); 1953 tcg_gen_and_i64(t2, t2, t3); 1954 tcg_gen_setcondi_i64(TCG_COND_EQ, t3, t1, 0); 1955 tcg_gen_or_i64(t2, t2, t3); 1956 tcg_gen_movi_i64(t3, 0); 1957 tcg_gen_movcond_i64(TCG_COND_NE, t1, t2, t3, t2, t1); 1958 tcg_gen_rem_i64(ret, t0, t1); 1959 tcg_temp_free_i64(t2); 1960 tcg_temp_free_i64(t3); 1961 } else { 1962 TCGv_i64 t2 = tcg_const_i64(1); 1963 TCGv_i64 t3 = tcg_const_i64(0); 1964 tcg_gen_movcond_i64(TCG_COND_EQ, t1, t1, t3, t2, t1); 1965 tcg_gen_remu_i64(ret, t0, t1); 1966 tcg_temp_free_i64(t2); 1967 tcg_temp_free_i64(t3); 1968 } 1969 tcg_temp_free_i64(t0); 1970 tcg_temp_free_i64(t1); 1971 } 1972 1973 #define GEN_INT_ARITH_MODD(name, opc3, sign) \ 1974 static void glue(gen_, name)(DisasContext *ctx) \ 1975 { \ 1976 gen_op_arith_modd(ctx, cpu_gpr[rD(ctx->opcode)], \ 1977 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 1978 sign); \ 1979 } 1980 1981 GEN_INT_ARITH_MODD(modud, 0x08, 0); 1982 GEN_INT_ARITH_MODD(modsd, 0x18, 1); 1983 #endif 1984 1985 /* mulhw mulhw. */ 1986 static void gen_mulhw(DisasContext *ctx) 1987 { 1988 TCGv_i32 t0 = tcg_temp_new_i32(); 1989 TCGv_i32 t1 = tcg_temp_new_i32(); 1990 1991 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 1992 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 1993 tcg_gen_muls2_i32(t0, t1, t0, t1); 1994 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 1995 tcg_temp_free_i32(t0); 1996 tcg_temp_free_i32(t1); 1997 if (unlikely(Rc(ctx->opcode) != 0)) { 1998 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 1999 } 2000 } 2001 2002 /* mulhwu mulhwu. */ 2003 static void gen_mulhwu(DisasContext *ctx) 2004 { 2005 TCGv_i32 t0 = tcg_temp_new_i32(); 2006 TCGv_i32 t1 = tcg_temp_new_i32(); 2007 2008 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2009 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2010 tcg_gen_mulu2_i32(t0, t1, t0, t1); 2011 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t1); 2012 tcg_temp_free_i32(t0); 2013 tcg_temp_free_i32(t1); 2014 if (unlikely(Rc(ctx->opcode) != 0)) { 2015 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2016 } 2017 } 2018 2019 /* mullw mullw. */ 2020 static void gen_mullw(DisasContext *ctx) 2021 { 2022 #if defined(TARGET_PPC64) 2023 TCGv_i64 t0, t1; 2024 t0 = tcg_temp_new_i64(); 2025 t1 = tcg_temp_new_i64(); 2026 tcg_gen_ext32s_tl(t0, cpu_gpr[rA(ctx->opcode)]); 2027 tcg_gen_ext32s_tl(t1, cpu_gpr[rB(ctx->opcode)]); 2028 tcg_gen_mul_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2029 tcg_temp_free(t0); 2030 tcg_temp_free(t1); 2031 #else 2032 tcg_gen_mul_i32(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2033 cpu_gpr[rB(ctx->opcode)]); 2034 #endif 2035 if (unlikely(Rc(ctx->opcode) != 0)) { 2036 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2037 } 2038 } 2039 2040 /* mullwo mullwo. */ 2041 static void gen_mullwo(DisasContext *ctx) 2042 { 2043 TCGv_i32 t0 = tcg_temp_new_i32(); 2044 TCGv_i32 t1 = tcg_temp_new_i32(); 2045 2046 tcg_gen_trunc_tl_i32(t0, cpu_gpr[rA(ctx->opcode)]); 2047 tcg_gen_trunc_tl_i32(t1, cpu_gpr[rB(ctx->opcode)]); 2048 tcg_gen_muls2_i32(t0, t1, t0, t1); 2049 #if defined(TARGET_PPC64) 2050 tcg_gen_concat_i32_i64(cpu_gpr[rD(ctx->opcode)], t0, t1); 2051 #else 2052 tcg_gen_mov_i32(cpu_gpr[rD(ctx->opcode)], t0); 2053 #endif 2054 2055 tcg_gen_sari_i32(t0, t0, 31); 2056 tcg_gen_setcond_i32(TCG_COND_NE, t0, t0, t1); 2057 tcg_gen_extu_i32_tl(cpu_ov, t0); 2058 if (is_isa300(ctx)) { 2059 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2060 } 2061 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2062 2063 tcg_temp_free_i32(t0); 2064 tcg_temp_free_i32(t1); 2065 if (unlikely(Rc(ctx->opcode) != 0)) { 2066 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2067 } 2068 } 2069 2070 /* mulli */ 2071 static void gen_mulli(DisasContext *ctx) 2072 { 2073 tcg_gen_muli_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2074 SIMM(ctx->opcode)); 2075 } 2076 2077 #if defined(TARGET_PPC64) 2078 /* mulhd mulhd. */ 2079 static void gen_mulhd(DisasContext *ctx) 2080 { 2081 TCGv lo = tcg_temp_new(); 2082 tcg_gen_muls2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2083 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2084 tcg_temp_free(lo); 2085 if (unlikely(Rc(ctx->opcode) != 0)) { 2086 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2087 } 2088 } 2089 2090 /* mulhdu mulhdu. */ 2091 static void gen_mulhdu(DisasContext *ctx) 2092 { 2093 TCGv lo = tcg_temp_new(); 2094 tcg_gen_mulu2_tl(lo, cpu_gpr[rD(ctx->opcode)], 2095 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2096 tcg_temp_free(lo); 2097 if (unlikely(Rc(ctx->opcode) != 0)) { 2098 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2099 } 2100 } 2101 2102 /* mulld mulld. */ 2103 static void gen_mulld(DisasContext *ctx) 2104 { 2105 tcg_gen_mul_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2106 cpu_gpr[rB(ctx->opcode)]); 2107 if (unlikely(Rc(ctx->opcode) != 0)) { 2108 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2109 } 2110 } 2111 2112 /* mulldo mulldo. */ 2113 static void gen_mulldo(DisasContext *ctx) 2114 { 2115 TCGv_i64 t0 = tcg_temp_new_i64(); 2116 TCGv_i64 t1 = tcg_temp_new_i64(); 2117 2118 tcg_gen_muls2_i64(t0, t1, cpu_gpr[rA(ctx->opcode)], 2119 cpu_gpr[rB(ctx->opcode)]); 2120 tcg_gen_mov_i64(cpu_gpr[rD(ctx->opcode)], t0); 2121 2122 tcg_gen_sari_i64(t0, t0, 63); 2123 tcg_gen_setcond_i64(TCG_COND_NE, cpu_ov, t0, t1); 2124 if (is_isa300(ctx)) { 2125 tcg_gen_mov_tl(cpu_ov32, cpu_ov); 2126 } 2127 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 2128 2129 tcg_temp_free_i64(t0); 2130 tcg_temp_free_i64(t1); 2131 2132 if (unlikely(Rc(ctx->opcode) != 0)) { 2133 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2134 } 2135 } 2136 #endif 2137 2138 /* Common subf function */ 2139 static inline void gen_op_arith_subf(DisasContext *ctx, TCGv ret, TCGv arg1, 2140 TCGv arg2, bool add_ca, bool compute_ca, 2141 bool compute_ov, bool compute_rc0) 2142 { 2143 TCGv t0 = ret; 2144 2145 if (compute_ca || compute_ov) { 2146 t0 = tcg_temp_new(); 2147 } 2148 2149 if (compute_ca) { 2150 /* dest = ~arg1 + arg2 [+ ca]. */ 2151 if (NARROW_MODE(ctx)) { 2152 /* 2153 * Caution: a non-obvious corner case of the spec is that 2154 * we must produce the *entire* 64-bit addition, but 2155 * produce the carry into bit 32. 2156 */ 2157 TCGv inv1 = tcg_temp_new(); 2158 TCGv t1 = tcg_temp_new(); 2159 tcg_gen_not_tl(inv1, arg1); 2160 if (add_ca) { 2161 tcg_gen_add_tl(t0, arg2, cpu_ca); 2162 } else { 2163 tcg_gen_addi_tl(t0, arg2, 1); 2164 } 2165 tcg_gen_xor_tl(t1, arg2, inv1); /* add without carry */ 2166 tcg_gen_add_tl(t0, t0, inv1); 2167 tcg_temp_free(inv1); 2168 tcg_gen_xor_tl(cpu_ca, t0, t1); /* bits changes w/ carry */ 2169 tcg_temp_free(t1); 2170 tcg_gen_extract_tl(cpu_ca, cpu_ca, 32, 1); 2171 if (is_isa300(ctx)) { 2172 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 2173 } 2174 } else if (add_ca) { 2175 TCGv zero, inv1 = tcg_temp_new(); 2176 tcg_gen_not_tl(inv1, arg1); 2177 zero = tcg_const_tl(0); 2178 tcg_gen_add2_tl(t0, cpu_ca, arg2, zero, cpu_ca, zero); 2179 tcg_gen_add2_tl(t0, cpu_ca, t0, cpu_ca, inv1, zero); 2180 gen_op_arith_compute_ca32(ctx, t0, inv1, arg2, cpu_ca32, 0); 2181 tcg_temp_free(zero); 2182 tcg_temp_free(inv1); 2183 } else { 2184 tcg_gen_setcond_tl(TCG_COND_GEU, cpu_ca, arg2, arg1); 2185 tcg_gen_sub_tl(t0, arg2, arg1); 2186 gen_op_arith_compute_ca32(ctx, t0, arg1, arg2, cpu_ca32, 1); 2187 } 2188 } else if (add_ca) { 2189 /* 2190 * Since we're ignoring carry-out, we can simplify the 2191 * standard ~arg1 + arg2 + ca to arg2 - arg1 + ca - 1. 2192 */ 2193 tcg_gen_sub_tl(t0, arg2, arg1); 2194 tcg_gen_add_tl(t0, t0, cpu_ca); 2195 tcg_gen_subi_tl(t0, t0, 1); 2196 } else { 2197 tcg_gen_sub_tl(t0, arg2, arg1); 2198 } 2199 2200 if (compute_ov) { 2201 gen_op_arith_compute_ov(ctx, t0, arg1, arg2, 1); 2202 } 2203 if (unlikely(compute_rc0)) { 2204 gen_set_Rc0(ctx, t0); 2205 } 2206 2207 if (t0 != ret) { 2208 tcg_gen_mov_tl(ret, t0); 2209 tcg_temp_free(t0); 2210 } 2211 } 2212 /* Sub functions with Two operands functions */ 2213 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 2214 static void glue(gen_, name)(DisasContext *ctx) \ 2215 { \ 2216 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2217 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], \ 2218 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2219 } 2220 /* Sub functions with one operand and one immediate */ 2221 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 2222 add_ca, compute_ca, compute_ov) \ 2223 static void glue(gen_, name)(DisasContext *ctx) \ 2224 { \ 2225 TCGv t0 = tcg_const_tl(const_val); \ 2226 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], \ 2227 cpu_gpr[rA(ctx->opcode)], t0, \ 2228 add_ca, compute_ca, compute_ov, Rc(ctx->opcode)); \ 2229 tcg_temp_free(t0); \ 2230 } 2231 /* subf subf. subfo subfo. */ 2232 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 2233 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 2234 /* subfc subfc. subfco subfco. */ 2235 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 2236 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 2237 /* subfe subfe. subfeo subfo. */ 2238 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 2239 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 2240 /* subfme subfme. subfmeo subfmeo. */ 2241 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 2242 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 2243 /* subfze subfze. subfzeo subfzeo.*/ 2244 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 2245 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 2246 2247 /* subfic */ 2248 static void gen_subfic(DisasContext *ctx) 2249 { 2250 TCGv c = tcg_const_tl(SIMM(ctx->opcode)); 2251 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2252 c, 0, 1, 0, 0); 2253 tcg_temp_free(c); 2254 } 2255 2256 /* neg neg. nego nego. */ 2257 static inline void gen_op_arith_neg(DisasContext *ctx, bool compute_ov) 2258 { 2259 TCGv zero = tcg_const_tl(0); 2260 gen_op_arith_subf(ctx, cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 2261 zero, 0, 0, compute_ov, Rc(ctx->opcode)); 2262 tcg_temp_free(zero); 2263 } 2264 2265 static void gen_neg(DisasContext *ctx) 2266 { 2267 tcg_gen_neg_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2268 if (unlikely(Rc(ctx->opcode))) { 2269 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 2270 } 2271 } 2272 2273 static void gen_nego(DisasContext *ctx) 2274 { 2275 gen_op_arith_neg(ctx, 1); 2276 } 2277 2278 /*** Integer logical ***/ 2279 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 2280 static void glue(gen_, name)(DisasContext *ctx) \ 2281 { \ 2282 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], \ 2283 cpu_gpr[rB(ctx->opcode)]); \ 2284 if (unlikely(Rc(ctx->opcode) != 0)) \ 2285 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2286 } 2287 2288 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 2289 static void glue(gen_, name)(DisasContext *ctx) \ 2290 { \ 2291 tcg_op(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); \ 2292 if (unlikely(Rc(ctx->opcode) != 0)) \ 2293 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); \ 2294 } 2295 2296 /* and & and. */ 2297 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER); 2298 /* andc & andc. */ 2299 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER); 2300 2301 /* andi. */ 2302 static void gen_andi_(DisasContext *ctx) 2303 { 2304 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2305 UIMM(ctx->opcode)); 2306 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2307 } 2308 2309 /* andis. */ 2310 static void gen_andis_(DisasContext *ctx) 2311 { 2312 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2313 UIMM(ctx->opcode) << 16); 2314 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2315 } 2316 2317 /* cntlzw */ 2318 static void gen_cntlzw(DisasContext *ctx) 2319 { 2320 TCGv_i32 t = tcg_temp_new_i32(); 2321 2322 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2323 tcg_gen_clzi_i32(t, t, 32); 2324 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2325 tcg_temp_free_i32(t); 2326 2327 if (unlikely(Rc(ctx->opcode) != 0)) { 2328 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2329 } 2330 } 2331 2332 /* cnttzw */ 2333 static void gen_cnttzw(DisasContext *ctx) 2334 { 2335 TCGv_i32 t = tcg_temp_new_i32(); 2336 2337 tcg_gen_trunc_tl_i32(t, cpu_gpr[rS(ctx->opcode)]); 2338 tcg_gen_ctzi_i32(t, t, 32); 2339 tcg_gen_extu_i32_tl(cpu_gpr[rA(ctx->opcode)], t); 2340 tcg_temp_free_i32(t); 2341 2342 if (unlikely(Rc(ctx->opcode) != 0)) { 2343 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2344 } 2345 } 2346 2347 /* eqv & eqv. */ 2348 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER); 2349 /* extsb & extsb. */ 2350 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER); 2351 /* extsh & extsh. */ 2352 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER); 2353 /* nand & nand. */ 2354 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER); 2355 /* nor & nor. */ 2356 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER); 2357 2358 #if defined(TARGET_PPC64) && !defined(CONFIG_USER_ONLY) 2359 static void gen_pause(DisasContext *ctx) 2360 { 2361 TCGv_i32 t0 = tcg_const_i32(0); 2362 tcg_gen_st_i32(t0, cpu_env, 2363 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 2364 tcg_temp_free_i32(t0); 2365 2366 /* Stop translation, this gives other CPUs a chance to run */ 2367 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 2368 } 2369 #endif /* defined(TARGET_PPC64) */ 2370 2371 /* or & or. */ 2372 static void gen_or(DisasContext *ctx) 2373 { 2374 int rs, ra, rb; 2375 2376 rs = rS(ctx->opcode); 2377 ra = rA(ctx->opcode); 2378 rb = rB(ctx->opcode); 2379 /* Optimisation for mr. ri case */ 2380 if (rs != ra || rs != rb) { 2381 if (rs != rb) { 2382 tcg_gen_or_tl(cpu_gpr[ra], cpu_gpr[rs], cpu_gpr[rb]); 2383 } else { 2384 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rs]); 2385 } 2386 if (unlikely(Rc(ctx->opcode) != 0)) { 2387 gen_set_Rc0(ctx, cpu_gpr[ra]); 2388 } 2389 } else if (unlikely(Rc(ctx->opcode) != 0)) { 2390 gen_set_Rc0(ctx, cpu_gpr[rs]); 2391 #if defined(TARGET_PPC64) 2392 } else if (rs != 0) { /* 0 is nop */ 2393 int prio = 0; 2394 2395 switch (rs) { 2396 case 1: 2397 /* Set process priority to low */ 2398 prio = 2; 2399 break; 2400 case 6: 2401 /* Set process priority to medium-low */ 2402 prio = 3; 2403 break; 2404 case 2: 2405 /* Set process priority to normal */ 2406 prio = 4; 2407 break; 2408 #if !defined(CONFIG_USER_ONLY) 2409 case 31: 2410 if (!ctx->pr) { 2411 /* Set process priority to very low */ 2412 prio = 1; 2413 } 2414 break; 2415 case 5: 2416 if (!ctx->pr) { 2417 /* Set process priority to medium-hight */ 2418 prio = 5; 2419 } 2420 break; 2421 case 3: 2422 if (!ctx->pr) { 2423 /* Set process priority to high */ 2424 prio = 6; 2425 } 2426 break; 2427 case 7: 2428 if (ctx->hv && !ctx->pr) { 2429 /* Set process priority to very high */ 2430 prio = 7; 2431 } 2432 break; 2433 #endif 2434 default: 2435 break; 2436 } 2437 if (prio) { 2438 TCGv t0 = tcg_temp_new(); 2439 gen_load_spr(t0, SPR_PPR); 2440 tcg_gen_andi_tl(t0, t0, ~0x001C000000000000ULL); 2441 tcg_gen_ori_tl(t0, t0, ((uint64_t)prio) << 50); 2442 gen_store_spr(SPR_PPR, t0); 2443 tcg_temp_free(t0); 2444 } 2445 #if !defined(CONFIG_USER_ONLY) 2446 /* 2447 * Pause out of TCG otherwise spin loops with smt_low eat too 2448 * much CPU and the kernel hangs. This applies to all 2449 * encodings other than no-op, e.g., miso(rs=26), yield(27), 2450 * mdoio(29), mdoom(30), and all currently undefined. 2451 */ 2452 gen_pause(ctx); 2453 #endif 2454 #endif 2455 } 2456 } 2457 /* orc & orc. */ 2458 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER); 2459 2460 /* xor & xor. */ 2461 static void gen_xor(DisasContext *ctx) 2462 { 2463 /* Optimisation for "set to zero" case */ 2464 if (rS(ctx->opcode) != rB(ctx->opcode)) { 2465 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2466 cpu_gpr[rB(ctx->opcode)]); 2467 } else { 2468 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 2469 } 2470 if (unlikely(Rc(ctx->opcode) != 0)) { 2471 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2472 } 2473 } 2474 2475 /* ori */ 2476 static void gen_ori(DisasContext *ctx) 2477 { 2478 target_ulong uimm = UIMM(ctx->opcode); 2479 2480 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2481 return; 2482 } 2483 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2484 } 2485 2486 /* oris */ 2487 static void gen_oris(DisasContext *ctx) 2488 { 2489 target_ulong uimm = UIMM(ctx->opcode); 2490 2491 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2492 /* NOP */ 2493 return; 2494 } 2495 tcg_gen_ori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2496 uimm << 16); 2497 } 2498 2499 /* xori */ 2500 static void gen_xori(DisasContext *ctx) 2501 { 2502 target_ulong uimm = UIMM(ctx->opcode); 2503 2504 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2505 /* NOP */ 2506 return; 2507 } 2508 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], uimm); 2509 } 2510 2511 /* xoris */ 2512 static void gen_xoris(DisasContext *ctx) 2513 { 2514 target_ulong uimm = UIMM(ctx->opcode); 2515 2516 if (rS(ctx->opcode) == rA(ctx->opcode) && uimm == 0) { 2517 /* NOP */ 2518 return; 2519 } 2520 tcg_gen_xori_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 2521 uimm << 16); 2522 } 2523 2524 /* popcntb : PowerPC 2.03 specification */ 2525 static void gen_popcntb(DisasContext *ctx) 2526 { 2527 gen_helper_popcntb(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2528 } 2529 2530 static void gen_popcntw(DisasContext *ctx) 2531 { 2532 #if defined(TARGET_PPC64) 2533 gen_helper_popcntw(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2534 #else 2535 tcg_gen_ctpop_i32(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2536 #endif 2537 } 2538 2539 #if defined(TARGET_PPC64) 2540 /* popcntd: PowerPC 2.06 specification */ 2541 static void gen_popcntd(DisasContext *ctx) 2542 { 2543 tcg_gen_ctpop_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 2544 } 2545 #endif 2546 2547 /* prtyw: PowerPC 2.05 specification */ 2548 static void gen_prtyw(DisasContext *ctx) 2549 { 2550 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2551 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2552 TCGv t0 = tcg_temp_new(); 2553 tcg_gen_shri_tl(t0, rs, 16); 2554 tcg_gen_xor_tl(ra, rs, t0); 2555 tcg_gen_shri_tl(t0, ra, 8); 2556 tcg_gen_xor_tl(ra, ra, t0); 2557 tcg_gen_andi_tl(ra, ra, (target_ulong)0x100000001ULL); 2558 tcg_temp_free(t0); 2559 } 2560 2561 #if defined(TARGET_PPC64) 2562 /* prtyd: PowerPC 2.05 specification */ 2563 static void gen_prtyd(DisasContext *ctx) 2564 { 2565 TCGv ra = cpu_gpr[rA(ctx->opcode)]; 2566 TCGv rs = cpu_gpr[rS(ctx->opcode)]; 2567 TCGv t0 = tcg_temp_new(); 2568 tcg_gen_shri_tl(t0, rs, 32); 2569 tcg_gen_xor_tl(ra, rs, t0); 2570 tcg_gen_shri_tl(t0, ra, 16); 2571 tcg_gen_xor_tl(ra, ra, t0); 2572 tcg_gen_shri_tl(t0, ra, 8); 2573 tcg_gen_xor_tl(ra, ra, t0); 2574 tcg_gen_andi_tl(ra, ra, 1); 2575 tcg_temp_free(t0); 2576 } 2577 #endif 2578 2579 #if defined(TARGET_PPC64) 2580 /* bpermd */ 2581 static void gen_bpermd(DisasContext *ctx) 2582 { 2583 gen_helper_bpermd(cpu_gpr[rA(ctx->opcode)], 2584 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2585 } 2586 #endif 2587 2588 #if defined(TARGET_PPC64) 2589 /* extsw & extsw. */ 2590 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B); 2591 2592 /* cntlzd */ 2593 static void gen_cntlzd(DisasContext *ctx) 2594 { 2595 tcg_gen_clzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2596 if (unlikely(Rc(ctx->opcode) != 0)) { 2597 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2598 } 2599 } 2600 2601 /* cnttzd */ 2602 static void gen_cnttzd(DisasContext *ctx) 2603 { 2604 tcg_gen_ctzi_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], 64); 2605 if (unlikely(Rc(ctx->opcode) != 0)) { 2606 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2607 } 2608 } 2609 2610 /* darn */ 2611 static void gen_darn(DisasContext *ctx) 2612 { 2613 int l = L(ctx->opcode); 2614 2615 if (l > 2) { 2616 tcg_gen_movi_i64(cpu_gpr[rD(ctx->opcode)], -1); 2617 } else { 2618 gen_icount_io_start(ctx); 2619 if (l == 0) { 2620 gen_helper_darn32(cpu_gpr[rD(ctx->opcode)]); 2621 } else { 2622 /* Return 64-bit random for both CRN and RRN */ 2623 gen_helper_darn64(cpu_gpr[rD(ctx->opcode)]); 2624 } 2625 } 2626 } 2627 #endif 2628 2629 /*** Integer rotate ***/ 2630 2631 /* rlwimi & rlwimi. */ 2632 static void gen_rlwimi(DisasContext *ctx) 2633 { 2634 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2635 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2636 uint32_t sh = SH(ctx->opcode); 2637 uint32_t mb = MB(ctx->opcode); 2638 uint32_t me = ME(ctx->opcode); 2639 2640 if (sh == (31 - me) && mb <= me) { 2641 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2642 } else { 2643 target_ulong mask; 2644 bool mask_in_32b = true; 2645 TCGv t1; 2646 2647 #if defined(TARGET_PPC64) 2648 mb += 32; 2649 me += 32; 2650 #endif 2651 mask = MASK(mb, me); 2652 2653 #if defined(TARGET_PPC64) 2654 if (mask > 0xffffffffu) { 2655 mask_in_32b = false; 2656 } 2657 #endif 2658 t1 = tcg_temp_new(); 2659 if (mask_in_32b) { 2660 TCGv_i32 t0 = tcg_temp_new_i32(); 2661 tcg_gen_trunc_tl_i32(t0, t_rs); 2662 tcg_gen_rotli_i32(t0, t0, sh); 2663 tcg_gen_extu_i32_tl(t1, t0); 2664 tcg_temp_free_i32(t0); 2665 } else { 2666 #if defined(TARGET_PPC64) 2667 tcg_gen_deposit_i64(t1, t_rs, t_rs, 32, 32); 2668 tcg_gen_rotli_i64(t1, t1, sh); 2669 #else 2670 g_assert_not_reached(); 2671 #endif 2672 } 2673 2674 tcg_gen_andi_tl(t1, t1, mask); 2675 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2676 tcg_gen_or_tl(t_ra, t_ra, t1); 2677 tcg_temp_free(t1); 2678 } 2679 if (unlikely(Rc(ctx->opcode) != 0)) { 2680 gen_set_Rc0(ctx, t_ra); 2681 } 2682 } 2683 2684 /* rlwinm & rlwinm. */ 2685 static void gen_rlwinm(DisasContext *ctx) 2686 { 2687 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2688 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2689 int sh = SH(ctx->opcode); 2690 int mb = MB(ctx->opcode); 2691 int me = ME(ctx->opcode); 2692 int len = me - mb + 1; 2693 int rsh = (32 - sh) & 31; 2694 2695 if (sh != 0 && len > 0 && me == (31 - sh)) { 2696 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2697 } else if (me == 31 && rsh + len <= 32) { 2698 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2699 } else { 2700 target_ulong mask; 2701 bool mask_in_32b = true; 2702 #if defined(TARGET_PPC64) 2703 mb += 32; 2704 me += 32; 2705 #endif 2706 mask = MASK(mb, me); 2707 #if defined(TARGET_PPC64) 2708 if (mask > 0xffffffffu) { 2709 mask_in_32b = false; 2710 } 2711 #endif 2712 if (mask_in_32b) { 2713 if (sh == 0) { 2714 tcg_gen_andi_tl(t_ra, t_rs, mask); 2715 } else { 2716 TCGv_i32 t0 = tcg_temp_new_i32(); 2717 tcg_gen_trunc_tl_i32(t0, t_rs); 2718 tcg_gen_rotli_i32(t0, t0, sh); 2719 tcg_gen_andi_i32(t0, t0, mask); 2720 tcg_gen_extu_i32_tl(t_ra, t0); 2721 tcg_temp_free_i32(t0); 2722 } 2723 } else { 2724 #if defined(TARGET_PPC64) 2725 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2726 tcg_gen_rotli_i64(t_ra, t_ra, sh); 2727 tcg_gen_andi_i64(t_ra, t_ra, mask); 2728 #else 2729 g_assert_not_reached(); 2730 #endif 2731 } 2732 } 2733 if (unlikely(Rc(ctx->opcode) != 0)) { 2734 gen_set_Rc0(ctx, t_ra); 2735 } 2736 } 2737 2738 /* rlwnm & rlwnm. */ 2739 static void gen_rlwnm(DisasContext *ctx) 2740 { 2741 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2742 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2743 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2744 uint32_t mb = MB(ctx->opcode); 2745 uint32_t me = ME(ctx->opcode); 2746 target_ulong mask; 2747 bool mask_in_32b = true; 2748 2749 #if defined(TARGET_PPC64) 2750 mb += 32; 2751 me += 32; 2752 #endif 2753 mask = MASK(mb, me); 2754 2755 #if defined(TARGET_PPC64) 2756 if (mask > 0xffffffffu) { 2757 mask_in_32b = false; 2758 } 2759 #endif 2760 if (mask_in_32b) { 2761 TCGv_i32 t0 = tcg_temp_new_i32(); 2762 TCGv_i32 t1 = tcg_temp_new_i32(); 2763 tcg_gen_trunc_tl_i32(t0, t_rb); 2764 tcg_gen_trunc_tl_i32(t1, t_rs); 2765 tcg_gen_andi_i32(t0, t0, 0x1f); 2766 tcg_gen_rotl_i32(t1, t1, t0); 2767 tcg_gen_extu_i32_tl(t_ra, t1); 2768 tcg_temp_free_i32(t0); 2769 tcg_temp_free_i32(t1); 2770 } else { 2771 #if defined(TARGET_PPC64) 2772 TCGv_i64 t0 = tcg_temp_new_i64(); 2773 tcg_gen_andi_i64(t0, t_rb, 0x1f); 2774 tcg_gen_deposit_i64(t_ra, t_rs, t_rs, 32, 32); 2775 tcg_gen_rotl_i64(t_ra, t_ra, t0); 2776 tcg_temp_free_i64(t0); 2777 #else 2778 g_assert_not_reached(); 2779 #endif 2780 } 2781 2782 tcg_gen_andi_tl(t_ra, t_ra, mask); 2783 2784 if (unlikely(Rc(ctx->opcode) != 0)) { 2785 gen_set_Rc0(ctx, t_ra); 2786 } 2787 } 2788 2789 #if defined(TARGET_PPC64) 2790 #define GEN_PPC64_R2(name, opc1, opc2) \ 2791 static void glue(gen_, name##0)(DisasContext *ctx) \ 2792 { \ 2793 gen_##name(ctx, 0); \ 2794 } \ 2795 \ 2796 static void glue(gen_, name##1)(DisasContext *ctx) \ 2797 { \ 2798 gen_##name(ctx, 1); \ 2799 } 2800 #define GEN_PPC64_R4(name, opc1, opc2) \ 2801 static void glue(gen_, name##0)(DisasContext *ctx) \ 2802 { \ 2803 gen_##name(ctx, 0, 0); \ 2804 } \ 2805 \ 2806 static void glue(gen_, name##1)(DisasContext *ctx) \ 2807 { \ 2808 gen_##name(ctx, 0, 1); \ 2809 } \ 2810 \ 2811 static void glue(gen_, name##2)(DisasContext *ctx) \ 2812 { \ 2813 gen_##name(ctx, 1, 0); \ 2814 } \ 2815 \ 2816 static void glue(gen_, name##3)(DisasContext *ctx) \ 2817 { \ 2818 gen_##name(ctx, 1, 1); \ 2819 } 2820 2821 static void gen_rldinm(DisasContext *ctx, int mb, int me, int sh) 2822 { 2823 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2824 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2825 int len = me - mb + 1; 2826 int rsh = (64 - sh) & 63; 2827 2828 if (sh != 0 && len > 0 && me == (63 - sh)) { 2829 tcg_gen_deposit_z_tl(t_ra, t_rs, sh, len); 2830 } else if (me == 63 && rsh + len <= 64) { 2831 tcg_gen_extract_tl(t_ra, t_rs, rsh, len); 2832 } else { 2833 tcg_gen_rotli_tl(t_ra, t_rs, sh); 2834 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2835 } 2836 if (unlikely(Rc(ctx->opcode) != 0)) { 2837 gen_set_Rc0(ctx, t_ra); 2838 } 2839 } 2840 2841 /* rldicl - rldicl. */ 2842 static inline void gen_rldicl(DisasContext *ctx, int mbn, int shn) 2843 { 2844 uint32_t sh, mb; 2845 2846 sh = SH(ctx->opcode) | (shn << 5); 2847 mb = MB(ctx->opcode) | (mbn << 5); 2848 gen_rldinm(ctx, mb, 63, sh); 2849 } 2850 GEN_PPC64_R4(rldicl, 0x1E, 0x00); 2851 2852 /* rldicr - rldicr. */ 2853 static inline void gen_rldicr(DisasContext *ctx, int men, int shn) 2854 { 2855 uint32_t sh, me; 2856 2857 sh = SH(ctx->opcode) | (shn << 5); 2858 me = MB(ctx->opcode) | (men << 5); 2859 gen_rldinm(ctx, 0, me, sh); 2860 } 2861 GEN_PPC64_R4(rldicr, 0x1E, 0x02); 2862 2863 /* rldic - rldic. */ 2864 static inline void gen_rldic(DisasContext *ctx, int mbn, int shn) 2865 { 2866 uint32_t sh, mb; 2867 2868 sh = SH(ctx->opcode) | (shn << 5); 2869 mb = MB(ctx->opcode) | (mbn << 5); 2870 gen_rldinm(ctx, mb, 63 - sh, sh); 2871 } 2872 GEN_PPC64_R4(rldic, 0x1E, 0x04); 2873 2874 static void gen_rldnm(DisasContext *ctx, int mb, int me) 2875 { 2876 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2877 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2878 TCGv t_rb = cpu_gpr[rB(ctx->opcode)]; 2879 TCGv t0; 2880 2881 t0 = tcg_temp_new(); 2882 tcg_gen_andi_tl(t0, t_rb, 0x3f); 2883 tcg_gen_rotl_tl(t_ra, t_rs, t0); 2884 tcg_temp_free(t0); 2885 2886 tcg_gen_andi_tl(t_ra, t_ra, MASK(mb, me)); 2887 if (unlikely(Rc(ctx->opcode) != 0)) { 2888 gen_set_Rc0(ctx, t_ra); 2889 } 2890 } 2891 2892 /* rldcl - rldcl. */ 2893 static inline void gen_rldcl(DisasContext *ctx, int mbn) 2894 { 2895 uint32_t mb; 2896 2897 mb = MB(ctx->opcode) | (mbn << 5); 2898 gen_rldnm(ctx, mb, 63); 2899 } 2900 GEN_PPC64_R2(rldcl, 0x1E, 0x08); 2901 2902 /* rldcr - rldcr. */ 2903 static inline void gen_rldcr(DisasContext *ctx, int men) 2904 { 2905 uint32_t me; 2906 2907 me = MB(ctx->opcode) | (men << 5); 2908 gen_rldnm(ctx, 0, me); 2909 } 2910 GEN_PPC64_R2(rldcr, 0x1E, 0x09); 2911 2912 /* rldimi - rldimi. */ 2913 static void gen_rldimi(DisasContext *ctx, int mbn, int shn) 2914 { 2915 TCGv t_ra = cpu_gpr[rA(ctx->opcode)]; 2916 TCGv t_rs = cpu_gpr[rS(ctx->opcode)]; 2917 uint32_t sh = SH(ctx->opcode) | (shn << 5); 2918 uint32_t mb = MB(ctx->opcode) | (mbn << 5); 2919 uint32_t me = 63 - sh; 2920 2921 if (mb <= me) { 2922 tcg_gen_deposit_tl(t_ra, t_ra, t_rs, sh, me - mb + 1); 2923 } else { 2924 target_ulong mask = MASK(mb, me); 2925 TCGv t1 = tcg_temp_new(); 2926 2927 tcg_gen_rotli_tl(t1, t_rs, sh); 2928 tcg_gen_andi_tl(t1, t1, mask); 2929 tcg_gen_andi_tl(t_ra, t_ra, ~mask); 2930 tcg_gen_or_tl(t_ra, t_ra, t1); 2931 tcg_temp_free(t1); 2932 } 2933 if (unlikely(Rc(ctx->opcode) != 0)) { 2934 gen_set_Rc0(ctx, t_ra); 2935 } 2936 } 2937 GEN_PPC64_R4(rldimi, 0x1E, 0x06); 2938 #endif 2939 2940 /*** Integer shift ***/ 2941 2942 /* slw & slw. */ 2943 static void gen_slw(DisasContext *ctx) 2944 { 2945 TCGv t0, t1; 2946 2947 t0 = tcg_temp_new(); 2948 /* AND rS with a mask that is 0 when rB >= 0x20 */ 2949 #if defined(TARGET_PPC64) 2950 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 2951 tcg_gen_sari_tl(t0, t0, 0x3f); 2952 #else 2953 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 2954 tcg_gen_sari_tl(t0, t0, 0x1f); 2955 #endif 2956 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 2957 t1 = tcg_temp_new(); 2958 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 2959 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 2960 tcg_temp_free(t1); 2961 tcg_temp_free(t0); 2962 tcg_gen_ext32u_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 2963 if (unlikely(Rc(ctx->opcode) != 0)) { 2964 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2965 } 2966 } 2967 2968 /* sraw & sraw. */ 2969 static void gen_sraw(DisasContext *ctx) 2970 { 2971 gen_helper_sraw(cpu_gpr[rA(ctx->opcode)], cpu_env, 2972 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 2973 if (unlikely(Rc(ctx->opcode) != 0)) { 2974 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 2975 } 2976 } 2977 2978 /* srawi & srawi. */ 2979 static void gen_srawi(DisasContext *ctx) 2980 { 2981 int sh = SH(ctx->opcode); 2982 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 2983 TCGv src = cpu_gpr[rS(ctx->opcode)]; 2984 if (sh == 0) { 2985 tcg_gen_ext32s_tl(dst, src); 2986 tcg_gen_movi_tl(cpu_ca, 0); 2987 if (is_isa300(ctx)) { 2988 tcg_gen_movi_tl(cpu_ca32, 0); 2989 } 2990 } else { 2991 TCGv t0; 2992 tcg_gen_ext32s_tl(dst, src); 2993 tcg_gen_andi_tl(cpu_ca, dst, (1ULL << sh) - 1); 2994 t0 = tcg_temp_new(); 2995 tcg_gen_sari_tl(t0, dst, TARGET_LONG_BITS - 1); 2996 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 2997 tcg_temp_free(t0); 2998 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 2999 if (is_isa300(ctx)) { 3000 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 3001 } 3002 tcg_gen_sari_tl(dst, dst, sh); 3003 } 3004 if (unlikely(Rc(ctx->opcode) != 0)) { 3005 gen_set_Rc0(ctx, dst); 3006 } 3007 } 3008 3009 /* srw & srw. */ 3010 static void gen_srw(DisasContext *ctx) 3011 { 3012 TCGv t0, t1; 3013 3014 t0 = tcg_temp_new(); 3015 /* AND rS with a mask that is 0 when rB >= 0x20 */ 3016 #if defined(TARGET_PPC64) 3017 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x3a); 3018 tcg_gen_sari_tl(t0, t0, 0x3f); 3019 #else 3020 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1a); 3021 tcg_gen_sari_tl(t0, t0, 0x1f); 3022 #endif 3023 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3024 tcg_gen_ext32u_tl(t0, t0); 3025 t1 = tcg_temp_new(); 3026 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1f); 3027 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3028 tcg_temp_free(t1); 3029 tcg_temp_free(t0); 3030 if (unlikely(Rc(ctx->opcode) != 0)) { 3031 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3032 } 3033 } 3034 3035 #if defined(TARGET_PPC64) 3036 /* sld & sld. */ 3037 static void gen_sld(DisasContext *ctx) 3038 { 3039 TCGv t0, t1; 3040 3041 t0 = tcg_temp_new(); 3042 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3043 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3044 tcg_gen_sari_tl(t0, t0, 0x3f); 3045 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3046 t1 = tcg_temp_new(); 3047 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3048 tcg_gen_shl_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3049 tcg_temp_free(t1); 3050 tcg_temp_free(t0); 3051 if (unlikely(Rc(ctx->opcode) != 0)) { 3052 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3053 } 3054 } 3055 3056 /* srad & srad. */ 3057 static void gen_srad(DisasContext *ctx) 3058 { 3059 gen_helper_srad(cpu_gpr[rA(ctx->opcode)], cpu_env, 3060 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3061 if (unlikely(Rc(ctx->opcode) != 0)) { 3062 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3063 } 3064 } 3065 /* sradi & sradi. */ 3066 static inline void gen_sradi(DisasContext *ctx, int n) 3067 { 3068 int sh = SH(ctx->opcode) + (n << 5); 3069 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3070 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3071 if (sh == 0) { 3072 tcg_gen_mov_tl(dst, src); 3073 tcg_gen_movi_tl(cpu_ca, 0); 3074 if (is_isa300(ctx)) { 3075 tcg_gen_movi_tl(cpu_ca32, 0); 3076 } 3077 } else { 3078 TCGv t0; 3079 tcg_gen_andi_tl(cpu_ca, src, (1ULL << sh) - 1); 3080 t0 = tcg_temp_new(); 3081 tcg_gen_sari_tl(t0, src, TARGET_LONG_BITS - 1); 3082 tcg_gen_and_tl(cpu_ca, cpu_ca, t0); 3083 tcg_temp_free(t0); 3084 tcg_gen_setcondi_tl(TCG_COND_NE, cpu_ca, cpu_ca, 0); 3085 if (is_isa300(ctx)) { 3086 tcg_gen_mov_tl(cpu_ca32, cpu_ca); 3087 } 3088 tcg_gen_sari_tl(dst, src, sh); 3089 } 3090 if (unlikely(Rc(ctx->opcode) != 0)) { 3091 gen_set_Rc0(ctx, dst); 3092 } 3093 } 3094 3095 static void gen_sradi0(DisasContext *ctx) 3096 { 3097 gen_sradi(ctx, 0); 3098 } 3099 3100 static void gen_sradi1(DisasContext *ctx) 3101 { 3102 gen_sradi(ctx, 1); 3103 } 3104 3105 /* extswsli & extswsli. */ 3106 static inline void gen_extswsli(DisasContext *ctx, int n) 3107 { 3108 int sh = SH(ctx->opcode) + (n << 5); 3109 TCGv dst = cpu_gpr[rA(ctx->opcode)]; 3110 TCGv src = cpu_gpr[rS(ctx->opcode)]; 3111 3112 tcg_gen_ext32s_tl(dst, src); 3113 tcg_gen_shli_tl(dst, dst, sh); 3114 if (unlikely(Rc(ctx->opcode) != 0)) { 3115 gen_set_Rc0(ctx, dst); 3116 } 3117 } 3118 3119 static void gen_extswsli0(DisasContext *ctx) 3120 { 3121 gen_extswsli(ctx, 0); 3122 } 3123 3124 static void gen_extswsli1(DisasContext *ctx) 3125 { 3126 gen_extswsli(ctx, 1); 3127 } 3128 3129 /* srd & srd. */ 3130 static void gen_srd(DisasContext *ctx) 3131 { 3132 TCGv t0, t1; 3133 3134 t0 = tcg_temp_new(); 3135 /* AND rS with a mask that is 0 when rB >= 0x40 */ 3136 tcg_gen_shli_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x39); 3137 tcg_gen_sari_tl(t0, t0, 0x3f); 3138 tcg_gen_andc_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 3139 t1 = tcg_temp_new(); 3140 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x3f); 3141 tcg_gen_shr_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 3142 tcg_temp_free(t1); 3143 tcg_temp_free(t0); 3144 if (unlikely(Rc(ctx->opcode) != 0)) { 3145 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 3146 } 3147 } 3148 #endif 3149 3150 /*** Addressing modes ***/ 3151 /* Register indirect with immediate index : EA = (rA|0) + SIMM */ 3152 static inline void gen_addr_imm_index(DisasContext *ctx, TCGv EA, 3153 target_long maskl) 3154 { 3155 target_long simm = SIMM(ctx->opcode); 3156 3157 simm &= ~maskl; 3158 if (rA(ctx->opcode) == 0) { 3159 if (NARROW_MODE(ctx)) { 3160 simm = (uint32_t)simm; 3161 } 3162 tcg_gen_movi_tl(EA, simm); 3163 } else if (likely(simm != 0)) { 3164 tcg_gen_addi_tl(EA, cpu_gpr[rA(ctx->opcode)], simm); 3165 if (NARROW_MODE(ctx)) { 3166 tcg_gen_ext32u_tl(EA, EA); 3167 } 3168 } else { 3169 if (NARROW_MODE(ctx)) { 3170 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3171 } else { 3172 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3173 } 3174 } 3175 } 3176 3177 static inline void gen_addr_reg_index(DisasContext *ctx, TCGv EA) 3178 { 3179 if (rA(ctx->opcode) == 0) { 3180 if (NARROW_MODE(ctx)) { 3181 tcg_gen_ext32u_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3182 } else { 3183 tcg_gen_mov_tl(EA, cpu_gpr[rB(ctx->opcode)]); 3184 } 3185 } else { 3186 tcg_gen_add_tl(EA, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 3187 if (NARROW_MODE(ctx)) { 3188 tcg_gen_ext32u_tl(EA, EA); 3189 } 3190 } 3191 } 3192 3193 static inline void gen_addr_register(DisasContext *ctx, TCGv EA) 3194 { 3195 if (rA(ctx->opcode) == 0) { 3196 tcg_gen_movi_tl(EA, 0); 3197 } else if (NARROW_MODE(ctx)) { 3198 tcg_gen_ext32u_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3199 } else { 3200 tcg_gen_mov_tl(EA, cpu_gpr[rA(ctx->opcode)]); 3201 } 3202 } 3203 3204 static inline void gen_addr_add(DisasContext *ctx, TCGv ret, TCGv arg1, 3205 target_long val) 3206 { 3207 tcg_gen_addi_tl(ret, arg1, val); 3208 if (NARROW_MODE(ctx)) { 3209 tcg_gen_ext32u_tl(ret, ret); 3210 } 3211 } 3212 3213 static inline void gen_align_no_le(DisasContext *ctx) 3214 { 3215 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, 3216 (ctx->opcode & 0x03FF0000) | POWERPC_EXCP_ALIGN_LE); 3217 } 3218 3219 static TCGv do_ea_calc(DisasContext *ctx, int ra, TCGv displ) 3220 { 3221 TCGv ea = tcg_temp_new(); 3222 if (ra) { 3223 tcg_gen_add_tl(ea, cpu_gpr[ra], displ); 3224 } else { 3225 tcg_gen_mov_tl(ea, displ); 3226 } 3227 if (NARROW_MODE(ctx)) { 3228 tcg_gen_ext32u_tl(ea, ea); 3229 } 3230 return ea; 3231 } 3232 3233 /*** Integer load ***/ 3234 #define DEF_MEMOP(op) ((op) | ctx->default_tcg_memop_mask) 3235 #define BSWAP_MEMOP(op) ((op) | (ctx->default_tcg_memop_mask ^ MO_BSWAP)) 3236 3237 #define GEN_QEMU_LOAD_TL(ldop, op) \ 3238 static void glue(gen_qemu_, ldop)(DisasContext *ctx, \ 3239 TCGv val, \ 3240 TCGv addr) \ 3241 { \ 3242 tcg_gen_qemu_ld_tl(val, addr, ctx->mem_idx, op); \ 3243 } 3244 3245 GEN_QEMU_LOAD_TL(ld8u, DEF_MEMOP(MO_UB)) 3246 GEN_QEMU_LOAD_TL(ld16u, DEF_MEMOP(MO_UW)) 3247 GEN_QEMU_LOAD_TL(ld16s, DEF_MEMOP(MO_SW)) 3248 GEN_QEMU_LOAD_TL(ld32u, DEF_MEMOP(MO_UL)) 3249 GEN_QEMU_LOAD_TL(ld32s, DEF_MEMOP(MO_SL)) 3250 3251 GEN_QEMU_LOAD_TL(ld16ur, BSWAP_MEMOP(MO_UW)) 3252 GEN_QEMU_LOAD_TL(ld32ur, BSWAP_MEMOP(MO_UL)) 3253 3254 #define GEN_QEMU_LOAD_64(ldop, op) \ 3255 static void glue(gen_qemu_, glue(ldop, _i64))(DisasContext *ctx, \ 3256 TCGv_i64 val, \ 3257 TCGv addr) \ 3258 { \ 3259 tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, op); \ 3260 } 3261 3262 GEN_QEMU_LOAD_64(ld8u, DEF_MEMOP(MO_UB)) 3263 GEN_QEMU_LOAD_64(ld16u, DEF_MEMOP(MO_UW)) 3264 GEN_QEMU_LOAD_64(ld32u, DEF_MEMOP(MO_UL)) 3265 GEN_QEMU_LOAD_64(ld32s, DEF_MEMOP(MO_SL)) 3266 GEN_QEMU_LOAD_64(ld64, DEF_MEMOP(MO_UQ)) 3267 3268 #if defined(TARGET_PPC64) 3269 GEN_QEMU_LOAD_64(ld64ur, BSWAP_MEMOP(MO_UQ)) 3270 #endif 3271 3272 #define GEN_QEMU_STORE_TL(stop, op) \ 3273 static void glue(gen_qemu_, stop)(DisasContext *ctx, \ 3274 TCGv val, \ 3275 TCGv addr) \ 3276 { \ 3277 tcg_gen_qemu_st_tl(val, addr, ctx->mem_idx, op); \ 3278 } 3279 3280 #if defined(TARGET_PPC64) || !defined(CONFIG_USER_ONLY) 3281 GEN_QEMU_STORE_TL(st8, DEF_MEMOP(MO_UB)) 3282 #endif 3283 GEN_QEMU_STORE_TL(st16, DEF_MEMOP(MO_UW)) 3284 GEN_QEMU_STORE_TL(st32, DEF_MEMOP(MO_UL)) 3285 3286 GEN_QEMU_STORE_TL(st16r, BSWAP_MEMOP(MO_UW)) 3287 GEN_QEMU_STORE_TL(st32r, BSWAP_MEMOP(MO_UL)) 3288 3289 #define GEN_QEMU_STORE_64(stop, op) \ 3290 static void glue(gen_qemu_, glue(stop, _i64))(DisasContext *ctx, \ 3291 TCGv_i64 val, \ 3292 TCGv addr) \ 3293 { \ 3294 tcg_gen_qemu_st_i64(val, addr, ctx->mem_idx, op); \ 3295 } 3296 3297 GEN_QEMU_STORE_64(st8, DEF_MEMOP(MO_UB)) 3298 GEN_QEMU_STORE_64(st16, DEF_MEMOP(MO_UW)) 3299 GEN_QEMU_STORE_64(st32, DEF_MEMOP(MO_UL)) 3300 GEN_QEMU_STORE_64(st64, DEF_MEMOP(MO_UQ)) 3301 3302 #if defined(TARGET_PPC64) 3303 GEN_QEMU_STORE_64(st64r, BSWAP_MEMOP(MO_UQ)) 3304 #endif 3305 3306 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 3307 static void glue(gen_, name##x)(DisasContext *ctx) \ 3308 { \ 3309 TCGv EA; \ 3310 chk; \ 3311 gen_set_access_type(ctx, ACCESS_INT); \ 3312 EA = tcg_temp_new(); \ 3313 gen_addr_reg_index(ctx, EA); \ 3314 gen_qemu_##ldop(ctx, cpu_gpr[rD(ctx->opcode)], EA); \ 3315 tcg_temp_free(EA); \ 3316 } 3317 3318 #define GEN_LDX(name, ldop, opc2, opc3, type) \ 3319 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3320 3321 #define GEN_LDX_HVRM(name, ldop, opc2, opc3, type) \ 3322 GEN_LDX_E(name, ldop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3323 3324 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 3325 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3326 { \ 3327 TCGv EA; \ 3328 CHK_SV; \ 3329 gen_set_access_type(ctx, ACCESS_INT); \ 3330 EA = tcg_temp_new(); \ 3331 gen_addr_reg_index(ctx, EA); \ 3332 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_LOAD, ldop);\ 3333 tcg_temp_free(EA); \ 3334 } 3335 3336 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 3337 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 3338 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 3339 #if defined(TARGET_PPC64) 3340 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00) 3341 #endif 3342 3343 #if defined(TARGET_PPC64) 3344 /* CI load/store variants */ 3345 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 3346 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x15, PPC_CILDST) 3347 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 3348 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 3349 #endif 3350 3351 /*** Integer store ***/ 3352 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 3353 static void glue(gen_, name##x)(DisasContext *ctx) \ 3354 { \ 3355 TCGv EA; \ 3356 chk; \ 3357 gen_set_access_type(ctx, ACCESS_INT); \ 3358 EA = tcg_temp_new(); \ 3359 gen_addr_reg_index(ctx, EA); \ 3360 gen_qemu_##stop(ctx, cpu_gpr[rS(ctx->opcode)], EA); \ 3361 tcg_temp_free(EA); \ 3362 } 3363 #define GEN_STX(name, stop, opc2, opc3, type) \ 3364 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_NONE) 3365 3366 #define GEN_STX_HVRM(name, stop, opc2, opc3, type) \ 3367 GEN_STX_E(name, stop, opc2, opc3, type, PPC_NONE, CHK_HVRM) 3368 3369 #define GEN_STEPX(name, stop, opc2, opc3) \ 3370 static void glue(gen_, name##epx)(DisasContext *ctx) \ 3371 { \ 3372 TCGv EA; \ 3373 CHK_SV; \ 3374 gen_set_access_type(ctx, ACCESS_INT); \ 3375 EA = tcg_temp_new(); \ 3376 gen_addr_reg_index(ctx, EA); \ 3377 tcg_gen_qemu_st_tl( \ 3378 cpu_gpr[rD(ctx->opcode)], EA, PPC_TLB_EPID_STORE, stop); \ 3379 tcg_temp_free(EA); \ 3380 } 3381 3382 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 3383 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 3384 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 3385 #if defined(TARGET_PPC64) 3386 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1d, 0x04) 3387 #endif 3388 3389 #if defined(TARGET_PPC64) 3390 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 3391 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 3392 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 3393 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 3394 #endif 3395 /*** Integer load and store with byte reverse ***/ 3396 3397 /* lhbrx */ 3398 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER); 3399 3400 /* lwbrx */ 3401 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER); 3402 3403 #if defined(TARGET_PPC64) 3404 /* ldbrx */ 3405 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE); 3406 /* stdbrx */ 3407 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE); 3408 #endif /* TARGET_PPC64 */ 3409 3410 /* sthbrx */ 3411 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER); 3412 /* stwbrx */ 3413 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER); 3414 3415 /*** Integer load and store multiple ***/ 3416 3417 /* lmw */ 3418 static void gen_lmw(DisasContext *ctx) 3419 { 3420 TCGv t0; 3421 TCGv_i32 t1; 3422 3423 if (ctx->le_mode) { 3424 gen_align_no_le(ctx); 3425 return; 3426 } 3427 gen_set_access_type(ctx, ACCESS_INT); 3428 t0 = tcg_temp_new(); 3429 t1 = tcg_const_i32(rD(ctx->opcode)); 3430 gen_addr_imm_index(ctx, t0, 0); 3431 gen_helper_lmw(cpu_env, t0, t1); 3432 tcg_temp_free(t0); 3433 tcg_temp_free_i32(t1); 3434 } 3435 3436 /* stmw */ 3437 static void gen_stmw(DisasContext *ctx) 3438 { 3439 TCGv t0; 3440 TCGv_i32 t1; 3441 3442 if (ctx->le_mode) { 3443 gen_align_no_le(ctx); 3444 return; 3445 } 3446 gen_set_access_type(ctx, ACCESS_INT); 3447 t0 = tcg_temp_new(); 3448 t1 = tcg_const_i32(rS(ctx->opcode)); 3449 gen_addr_imm_index(ctx, t0, 0); 3450 gen_helper_stmw(cpu_env, t0, t1); 3451 tcg_temp_free(t0); 3452 tcg_temp_free_i32(t1); 3453 } 3454 3455 /*** Integer load and store strings ***/ 3456 3457 /* lswi */ 3458 /* 3459 * PowerPC32 specification says we must generate an exception if rA is 3460 * in the range of registers to be loaded. In an other hand, IBM says 3461 * this is valid, but rA won't be loaded. For now, I'll follow the 3462 * spec... 3463 */ 3464 static void gen_lswi(DisasContext *ctx) 3465 { 3466 TCGv t0; 3467 TCGv_i32 t1, t2; 3468 int nb = NB(ctx->opcode); 3469 int start = rD(ctx->opcode); 3470 int ra = rA(ctx->opcode); 3471 int nr; 3472 3473 if (ctx->le_mode) { 3474 gen_align_no_le(ctx); 3475 return; 3476 } 3477 if (nb == 0) { 3478 nb = 32; 3479 } 3480 nr = DIV_ROUND_UP(nb, 4); 3481 if (unlikely(lsw_reg_in_range(start, nr, ra))) { 3482 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_LSWX); 3483 return; 3484 } 3485 gen_set_access_type(ctx, ACCESS_INT); 3486 t0 = tcg_temp_new(); 3487 gen_addr_register(ctx, t0); 3488 t1 = tcg_const_i32(nb); 3489 t2 = tcg_const_i32(start); 3490 gen_helper_lsw(cpu_env, t0, t1, t2); 3491 tcg_temp_free(t0); 3492 tcg_temp_free_i32(t1); 3493 tcg_temp_free_i32(t2); 3494 } 3495 3496 /* lswx */ 3497 static void gen_lswx(DisasContext *ctx) 3498 { 3499 TCGv t0; 3500 TCGv_i32 t1, t2, t3; 3501 3502 if (ctx->le_mode) { 3503 gen_align_no_le(ctx); 3504 return; 3505 } 3506 gen_set_access_type(ctx, ACCESS_INT); 3507 t0 = tcg_temp_new(); 3508 gen_addr_reg_index(ctx, t0); 3509 t1 = tcg_const_i32(rD(ctx->opcode)); 3510 t2 = tcg_const_i32(rA(ctx->opcode)); 3511 t3 = tcg_const_i32(rB(ctx->opcode)); 3512 gen_helper_lswx(cpu_env, t0, t1, t2, t3); 3513 tcg_temp_free(t0); 3514 tcg_temp_free_i32(t1); 3515 tcg_temp_free_i32(t2); 3516 tcg_temp_free_i32(t3); 3517 } 3518 3519 /* stswi */ 3520 static void gen_stswi(DisasContext *ctx) 3521 { 3522 TCGv t0; 3523 TCGv_i32 t1, t2; 3524 int nb = NB(ctx->opcode); 3525 3526 if (ctx->le_mode) { 3527 gen_align_no_le(ctx); 3528 return; 3529 } 3530 gen_set_access_type(ctx, ACCESS_INT); 3531 t0 = tcg_temp_new(); 3532 gen_addr_register(ctx, t0); 3533 if (nb == 0) { 3534 nb = 32; 3535 } 3536 t1 = tcg_const_i32(nb); 3537 t2 = tcg_const_i32(rS(ctx->opcode)); 3538 gen_helper_stsw(cpu_env, t0, t1, t2); 3539 tcg_temp_free(t0); 3540 tcg_temp_free_i32(t1); 3541 tcg_temp_free_i32(t2); 3542 } 3543 3544 /* stswx */ 3545 static void gen_stswx(DisasContext *ctx) 3546 { 3547 TCGv t0; 3548 TCGv_i32 t1, t2; 3549 3550 if (ctx->le_mode) { 3551 gen_align_no_le(ctx); 3552 return; 3553 } 3554 gen_set_access_type(ctx, ACCESS_INT); 3555 t0 = tcg_temp_new(); 3556 gen_addr_reg_index(ctx, t0); 3557 t1 = tcg_temp_new_i32(); 3558 tcg_gen_trunc_tl_i32(t1, cpu_xer); 3559 tcg_gen_andi_i32(t1, t1, 0x7F); 3560 t2 = tcg_const_i32(rS(ctx->opcode)); 3561 gen_helper_stsw(cpu_env, t0, t1, t2); 3562 tcg_temp_free(t0); 3563 tcg_temp_free_i32(t1); 3564 tcg_temp_free_i32(t2); 3565 } 3566 3567 /*** Memory synchronisation ***/ 3568 /* eieio */ 3569 static void gen_eieio(DisasContext *ctx) 3570 { 3571 TCGBar bar = TCG_MO_LD_ST; 3572 3573 /* 3574 * POWER9 has a eieio instruction variant using bit 6 as a hint to 3575 * tell the CPU it is a store-forwarding barrier. 3576 */ 3577 if (ctx->opcode & 0x2000000) { 3578 /* 3579 * ISA says that "Reserved fields in instructions are ignored 3580 * by the processor". So ignore the bit 6 on non-POWER9 CPU but 3581 * as this is not an instruction software should be using, 3582 * complain to the user. 3583 */ 3584 if (!(ctx->insns_flags2 & PPC2_ISA300)) { 3585 qemu_log_mask(LOG_GUEST_ERROR, "invalid eieio using bit 6 at @" 3586 TARGET_FMT_lx "\n", ctx->cia); 3587 } else { 3588 bar = TCG_MO_ST_LD; 3589 } 3590 } 3591 3592 tcg_gen_mb(bar | TCG_BAR_SC); 3593 } 3594 3595 #if !defined(CONFIG_USER_ONLY) 3596 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) 3597 { 3598 TCGv_i32 t; 3599 TCGLabel *l; 3600 3601 if (!ctx->lazy_tlb_flush) { 3602 return; 3603 } 3604 l = gen_new_label(); 3605 t = tcg_temp_new_i32(); 3606 tcg_gen_ld_i32(t, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 3607 tcg_gen_brcondi_i32(TCG_COND_EQ, t, 0, l); 3608 if (global) { 3609 gen_helper_check_tlb_flush_global(cpu_env); 3610 } else { 3611 gen_helper_check_tlb_flush_local(cpu_env); 3612 } 3613 gen_set_label(l); 3614 tcg_temp_free_i32(t); 3615 } 3616 #else 3617 static inline void gen_check_tlb_flush(DisasContext *ctx, bool global) { } 3618 #endif 3619 3620 /* isync */ 3621 static void gen_isync(DisasContext *ctx) 3622 { 3623 /* 3624 * We need to check for a pending TLB flush. This can only happen in 3625 * kernel mode however so check MSR_PR 3626 */ 3627 if (!ctx->pr) { 3628 gen_check_tlb_flush(ctx, false); 3629 } 3630 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 3631 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 3632 } 3633 3634 #define MEMOP_GET_SIZE(x) (1 << ((x) & MO_SIZE)) 3635 3636 static void gen_load_locked(DisasContext *ctx, MemOp memop) 3637 { 3638 TCGv gpr = cpu_gpr[rD(ctx->opcode)]; 3639 TCGv t0 = tcg_temp_new(); 3640 3641 gen_set_access_type(ctx, ACCESS_RES); 3642 gen_addr_reg_index(ctx, t0); 3643 tcg_gen_qemu_ld_tl(gpr, t0, ctx->mem_idx, memop | MO_ALIGN); 3644 tcg_gen_mov_tl(cpu_reserve, t0); 3645 tcg_gen_mov_tl(cpu_reserve_val, gpr); 3646 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_LDAQ); 3647 tcg_temp_free(t0); 3648 } 3649 3650 #define LARX(name, memop) \ 3651 static void gen_##name(DisasContext *ctx) \ 3652 { \ 3653 gen_load_locked(ctx, memop); \ 3654 } 3655 3656 /* lwarx */ 3657 LARX(lbarx, DEF_MEMOP(MO_UB)) 3658 LARX(lharx, DEF_MEMOP(MO_UW)) 3659 LARX(lwarx, DEF_MEMOP(MO_UL)) 3660 3661 static void gen_fetch_inc_conditional(DisasContext *ctx, MemOp memop, 3662 TCGv EA, TCGCond cond, int addend) 3663 { 3664 TCGv t = tcg_temp_new(); 3665 TCGv t2 = tcg_temp_new(); 3666 TCGv u = tcg_temp_new(); 3667 3668 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3669 tcg_gen_addi_tl(t2, EA, MEMOP_GET_SIZE(memop)); 3670 tcg_gen_qemu_ld_tl(t2, t2, ctx->mem_idx, memop); 3671 tcg_gen_addi_tl(u, t, addend); 3672 3673 /* E.g. for fetch and increment bounded... */ 3674 /* mem(EA,s) = (t != t2 ? u = t + 1 : t) */ 3675 tcg_gen_movcond_tl(cond, u, t, t2, u, t); 3676 tcg_gen_qemu_st_tl(u, EA, ctx->mem_idx, memop); 3677 3678 /* RT = (t != t2 ? t : u = 1<<(s*8-1)) */ 3679 tcg_gen_movi_tl(u, 1 << (MEMOP_GET_SIZE(memop) * 8 - 1)); 3680 tcg_gen_movcond_tl(cond, cpu_gpr[rD(ctx->opcode)], t, t2, t, u); 3681 3682 tcg_temp_free(t); 3683 tcg_temp_free(t2); 3684 tcg_temp_free(u); 3685 } 3686 3687 static void gen_ld_atomic(DisasContext *ctx, MemOp memop) 3688 { 3689 uint32_t gpr_FC = FC(ctx->opcode); 3690 TCGv EA = tcg_temp_new(); 3691 int rt = rD(ctx->opcode); 3692 bool need_serial; 3693 TCGv src, dst; 3694 3695 gen_addr_register(ctx, EA); 3696 dst = cpu_gpr[rt]; 3697 src = cpu_gpr[(rt + 1) & 31]; 3698 3699 need_serial = false; 3700 memop |= MO_ALIGN; 3701 switch (gpr_FC) { 3702 case 0: /* Fetch and add */ 3703 tcg_gen_atomic_fetch_add_tl(dst, EA, src, ctx->mem_idx, memop); 3704 break; 3705 case 1: /* Fetch and xor */ 3706 tcg_gen_atomic_fetch_xor_tl(dst, EA, src, ctx->mem_idx, memop); 3707 break; 3708 case 2: /* Fetch and or */ 3709 tcg_gen_atomic_fetch_or_tl(dst, EA, src, ctx->mem_idx, memop); 3710 break; 3711 case 3: /* Fetch and 'and' */ 3712 tcg_gen_atomic_fetch_and_tl(dst, EA, src, ctx->mem_idx, memop); 3713 break; 3714 case 4: /* Fetch and max unsigned */ 3715 tcg_gen_atomic_fetch_umax_tl(dst, EA, src, ctx->mem_idx, memop); 3716 break; 3717 case 5: /* Fetch and max signed */ 3718 tcg_gen_atomic_fetch_smax_tl(dst, EA, src, ctx->mem_idx, memop); 3719 break; 3720 case 6: /* Fetch and min unsigned */ 3721 tcg_gen_atomic_fetch_umin_tl(dst, EA, src, ctx->mem_idx, memop); 3722 break; 3723 case 7: /* Fetch and min signed */ 3724 tcg_gen_atomic_fetch_smin_tl(dst, EA, src, ctx->mem_idx, memop); 3725 break; 3726 case 8: /* Swap */ 3727 tcg_gen_atomic_xchg_tl(dst, EA, src, ctx->mem_idx, memop); 3728 break; 3729 3730 case 16: /* Compare and swap not equal */ 3731 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3732 need_serial = true; 3733 } else { 3734 TCGv t0 = tcg_temp_new(); 3735 TCGv t1 = tcg_temp_new(); 3736 3737 tcg_gen_qemu_ld_tl(t0, EA, ctx->mem_idx, memop); 3738 if ((memop & MO_SIZE) == MO_64 || TARGET_LONG_BITS == 32) { 3739 tcg_gen_mov_tl(t1, src); 3740 } else { 3741 tcg_gen_ext32u_tl(t1, src); 3742 } 3743 tcg_gen_movcond_tl(TCG_COND_NE, t1, t0, t1, 3744 cpu_gpr[(rt + 2) & 31], t0); 3745 tcg_gen_qemu_st_tl(t1, EA, ctx->mem_idx, memop); 3746 tcg_gen_mov_tl(dst, t0); 3747 3748 tcg_temp_free(t0); 3749 tcg_temp_free(t1); 3750 } 3751 break; 3752 3753 case 24: /* Fetch and increment bounded */ 3754 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3755 need_serial = true; 3756 } else { 3757 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, 1); 3758 } 3759 break; 3760 case 25: /* Fetch and increment equal */ 3761 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3762 need_serial = true; 3763 } else { 3764 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_EQ, 1); 3765 } 3766 break; 3767 case 28: /* Fetch and decrement bounded */ 3768 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3769 need_serial = true; 3770 } else { 3771 gen_fetch_inc_conditional(ctx, memop, EA, TCG_COND_NE, -1); 3772 } 3773 break; 3774 3775 default: 3776 /* invoke data storage error handler */ 3777 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3778 } 3779 tcg_temp_free(EA); 3780 3781 if (need_serial) { 3782 /* Restart with exclusive lock. */ 3783 gen_helper_exit_atomic(cpu_env); 3784 ctx->base.is_jmp = DISAS_NORETURN; 3785 } 3786 } 3787 3788 static void gen_lwat(DisasContext *ctx) 3789 { 3790 gen_ld_atomic(ctx, DEF_MEMOP(MO_UL)); 3791 } 3792 3793 #ifdef TARGET_PPC64 3794 static void gen_ldat(DisasContext *ctx) 3795 { 3796 gen_ld_atomic(ctx, DEF_MEMOP(MO_UQ)); 3797 } 3798 #endif 3799 3800 static void gen_st_atomic(DisasContext *ctx, MemOp memop) 3801 { 3802 uint32_t gpr_FC = FC(ctx->opcode); 3803 TCGv EA = tcg_temp_new(); 3804 TCGv src, discard; 3805 3806 gen_addr_register(ctx, EA); 3807 src = cpu_gpr[rD(ctx->opcode)]; 3808 discard = tcg_temp_new(); 3809 3810 memop |= MO_ALIGN; 3811 switch (gpr_FC) { 3812 case 0: /* add and Store */ 3813 tcg_gen_atomic_add_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3814 break; 3815 case 1: /* xor and Store */ 3816 tcg_gen_atomic_xor_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3817 break; 3818 case 2: /* Or and Store */ 3819 tcg_gen_atomic_or_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3820 break; 3821 case 3: /* 'and' and Store */ 3822 tcg_gen_atomic_and_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3823 break; 3824 case 4: /* Store max unsigned */ 3825 tcg_gen_atomic_umax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3826 break; 3827 case 5: /* Store max signed */ 3828 tcg_gen_atomic_smax_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3829 break; 3830 case 6: /* Store min unsigned */ 3831 tcg_gen_atomic_umin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3832 break; 3833 case 7: /* Store min signed */ 3834 tcg_gen_atomic_smin_fetch_tl(discard, EA, src, ctx->mem_idx, memop); 3835 break; 3836 case 24: /* Store twin */ 3837 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3838 /* Restart with exclusive lock. */ 3839 gen_helper_exit_atomic(cpu_env); 3840 ctx->base.is_jmp = DISAS_NORETURN; 3841 } else { 3842 TCGv t = tcg_temp_new(); 3843 TCGv t2 = tcg_temp_new(); 3844 TCGv s = tcg_temp_new(); 3845 TCGv s2 = tcg_temp_new(); 3846 TCGv ea_plus_s = tcg_temp_new(); 3847 3848 tcg_gen_qemu_ld_tl(t, EA, ctx->mem_idx, memop); 3849 tcg_gen_addi_tl(ea_plus_s, EA, MEMOP_GET_SIZE(memop)); 3850 tcg_gen_qemu_ld_tl(t2, ea_plus_s, ctx->mem_idx, memop); 3851 tcg_gen_movcond_tl(TCG_COND_EQ, s, t, t2, src, t); 3852 tcg_gen_movcond_tl(TCG_COND_EQ, s2, t, t2, src, t2); 3853 tcg_gen_qemu_st_tl(s, EA, ctx->mem_idx, memop); 3854 tcg_gen_qemu_st_tl(s2, ea_plus_s, ctx->mem_idx, memop); 3855 3856 tcg_temp_free(ea_plus_s); 3857 tcg_temp_free(s2); 3858 tcg_temp_free(s); 3859 tcg_temp_free(t2); 3860 tcg_temp_free(t); 3861 } 3862 break; 3863 default: 3864 /* invoke data storage error handler */ 3865 gen_exception_err(ctx, POWERPC_EXCP_DSI, POWERPC_EXCP_INVAL); 3866 } 3867 tcg_temp_free(discard); 3868 tcg_temp_free(EA); 3869 } 3870 3871 static void gen_stwat(DisasContext *ctx) 3872 { 3873 gen_st_atomic(ctx, DEF_MEMOP(MO_UL)); 3874 } 3875 3876 #ifdef TARGET_PPC64 3877 static void gen_stdat(DisasContext *ctx) 3878 { 3879 gen_st_atomic(ctx, DEF_MEMOP(MO_UQ)); 3880 } 3881 #endif 3882 3883 static void gen_conditional_store(DisasContext *ctx, MemOp memop) 3884 { 3885 TCGLabel *l1 = gen_new_label(); 3886 TCGLabel *l2 = gen_new_label(); 3887 TCGv t0 = tcg_temp_new(); 3888 int reg = rS(ctx->opcode); 3889 3890 gen_set_access_type(ctx, ACCESS_RES); 3891 gen_addr_reg_index(ctx, t0); 3892 tcg_gen_brcond_tl(TCG_COND_NE, t0, cpu_reserve, l1); 3893 tcg_temp_free(t0); 3894 3895 t0 = tcg_temp_new(); 3896 tcg_gen_atomic_cmpxchg_tl(t0, cpu_reserve, cpu_reserve_val, 3897 cpu_gpr[reg], ctx->mem_idx, 3898 DEF_MEMOP(memop) | MO_ALIGN); 3899 tcg_gen_setcond_tl(TCG_COND_EQ, t0, t0, cpu_reserve_val); 3900 tcg_gen_shli_tl(t0, t0, CRF_EQ_BIT); 3901 tcg_gen_or_tl(t0, t0, cpu_so); 3902 tcg_gen_trunc_tl_i32(cpu_crf[0], t0); 3903 tcg_temp_free(t0); 3904 tcg_gen_br(l2); 3905 3906 gen_set_label(l1); 3907 3908 /* 3909 * Address mismatch implies failure. But we still need to provide 3910 * the memory barrier semantics of the instruction. 3911 */ 3912 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_STRL); 3913 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 3914 3915 gen_set_label(l2); 3916 tcg_gen_movi_tl(cpu_reserve, -1); 3917 } 3918 3919 #define STCX(name, memop) \ 3920 static void gen_##name(DisasContext *ctx) \ 3921 { \ 3922 gen_conditional_store(ctx, memop); \ 3923 } 3924 3925 STCX(stbcx_, DEF_MEMOP(MO_UB)) 3926 STCX(sthcx_, DEF_MEMOP(MO_UW)) 3927 STCX(stwcx_, DEF_MEMOP(MO_UL)) 3928 3929 #if defined(TARGET_PPC64) 3930 /* ldarx */ 3931 LARX(ldarx, DEF_MEMOP(MO_UQ)) 3932 /* stdcx. */ 3933 STCX(stdcx_, DEF_MEMOP(MO_UQ)) 3934 3935 /* lqarx */ 3936 static void gen_lqarx(DisasContext *ctx) 3937 { 3938 int rd = rD(ctx->opcode); 3939 TCGv EA, hi, lo; 3940 3941 if (unlikely((rd & 1) || (rd == rA(ctx->opcode)) || 3942 (rd == rB(ctx->opcode)))) { 3943 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 3944 return; 3945 } 3946 3947 gen_set_access_type(ctx, ACCESS_RES); 3948 EA = tcg_temp_new(); 3949 gen_addr_reg_index(ctx, EA); 3950 3951 /* Note that the low part is always in RD+1, even in LE mode. */ 3952 lo = cpu_gpr[rd + 1]; 3953 hi = cpu_gpr[rd]; 3954 3955 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 3956 if (HAVE_ATOMIC128) { 3957 TCGv_i32 oi = tcg_temp_new_i32(); 3958 if (ctx->le_mode) { 3959 tcg_gen_movi_i32(oi, make_memop_idx(MO_LE | MO_128 | MO_ALIGN, 3960 ctx->mem_idx)); 3961 gen_helper_lq_le_parallel(lo, cpu_env, EA, oi); 3962 } else { 3963 tcg_gen_movi_i32(oi, make_memop_idx(MO_BE | MO_128 | MO_ALIGN, 3964 ctx->mem_idx)); 3965 gen_helper_lq_be_parallel(lo, cpu_env, EA, oi); 3966 } 3967 tcg_temp_free_i32(oi); 3968 tcg_gen_ld_i64(hi, cpu_env, offsetof(CPUPPCState, retxh)); 3969 } else { 3970 /* Restart with exclusive lock. */ 3971 gen_helper_exit_atomic(cpu_env); 3972 ctx->base.is_jmp = DISAS_NORETURN; 3973 tcg_temp_free(EA); 3974 return; 3975 } 3976 } else if (ctx->le_mode) { 3977 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_LEUQ | MO_ALIGN_16); 3978 tcg_gen_mov_tl(cpu_reserve, EA); 3979 gen_addr_add(ctx, EA, EA, 8); 3980 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_LEUQ); 3981 } else { 3982 tcg_gen_qemu_ld_i64(hi, EA, ctx->mem_idx, MO_BEUQ | MO_ALIGN_16); 3983 tcg_gen_mov_tl(cpu_reserve, EA); 3984 gen_addr_add(ctx, EA, EA, 8); 3985 tcg_gen_qemu_ld_i64(lo, EA, ctx->mem_idx, MO_BEUQ); 3986 } 3987 tcg_temp_free(EA); 3988 3989 tcg_gen_st_tl(hi, cpu_env, offsetof(CPUPPCState, reserve_val)); 3990 tcg_gen_st_tl(lo, cpu_env, offsetof(CPUPPCState, reserve_val2)); 3991 } 3992 3993 /* stqcx. */ 3994 static void gen_stqcx_(DisasContext *ctx) 3995 { 3996 int rs = rS(ctx->opcode); 3997 TCGv EA, hi, lo; 3998 3999 if (unlikely(rs & 1)) { 4000 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4001 return; 4002 } 4003 4004 gen_set_access_type(ctx, ACCESS_RES); 4005 EA = tcg_temp_new(); 4006 gen_addr_reg_index(ctx, EA); 4007 4008 /* Note that the low part is always in RS+1, even in LE mode. */ 4009 lo = cpu_gpr[rs + 1]; 4010 hi = cpu_gpr[rs]; 4011 4012 if (tb_cflags(ctx->base.tb) & CF_PARALLEL) { 4013 if (HAVE_CMPXCHG128) { 4014 TCGv_i32 oi = tcg_const_i32(DEF_MEMOP(MO_128) | MO_ALIGN); 4015 if (ctx->le_mode) { 4016 gen_helper_stqcx_le_parallel(cpu_crf[0], cpu_env, 4017 EA, lo, hi, oi); 4018 } else { 4019 gen_helper_stqcx_be_parallel(cpu_crf[0], cpu_env, 4020 EA, lo, hi, oi); 4021 } 4022 tcg_temp_free_i32(oi); 4023 } else { 4024 /* Restart with exclusive lock. */ 4025 gen_helper_exit_atomic(cpu_env); 4026 ctx->base.is_jmp = DISAS_NORETURN; 4027 } 4028 tcg_temp_free(EA); 4029 } else { 4030 TCGLabel *lab_fail = gen_new_label(); 4031 TCGLabel *lab_over = gen_new_label(); 4032 TCGv_i64 t0 = tcg_temp_new_i64(); 4033 TCGv_i64 t1 = tcg_temp_new_i64(); 4034 4035 tcg_gen_brcond_tl(TCG_COND_NE, EA, cpu_reserve, lab_fail); 4036 tcg_temp_free(EA); 4037 4038 gen_qemu_ld64_i64(ctx, t0, cpu_reserve); 4039 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4040 ? offsetof(CPUPPCState, reserve_val2) 4041 : offsetof(CPUPPCState, reserve_val))); 4042 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4043 4044 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4045 gen_qemu_ld64_i64(ctx, t0, t0); 4046 tcg_gen_ld_i64(t1, cpu_env, (ctx->le_mode 4047 ? offsetof(CPUPPCState, reserve_val) 4048 : offsetof(CPUPPCState, reserve_val2))); 4049 tcg_gen_brcond_i64(TCG_COND_NE, t0, t1, lab_fail); 4050 4051 /* Success */ 4052 gen_qemu_st64_i64(ctx, ctx->le_mode ? lo : hi, cpu_reserve); 4053 tcg_gen_addi_i64(t0, cpu_reserve, 8); 4054 gen_qemu_st64_i64(ctx, ctx->le_mode ? hi : lo, t0); 4055 4056 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4057 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 4058 tcg_gen_br(lab_over); 4059 4060 gen_set_label(lab_fail); 4061 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 4062 4063 gen_set_label(lab_over); 4064 tcg_gen_movi_tl(cpu_reserve, -1); 4065 tcg_temp_free_i64(t0); 4066 tcg_temp_free_i64(t1); 4067 } 4068 } 4069 #endif /* defined(TARGET_PPC64) */ 4070 4071 /* sync */ 4072 static void gen_sync(DisasContext *ctx) 4073 { 4074 uint32_t l = (ctx->opcode >> 21) & 3; 4075 4076 /* 4077 * We may need to check for a pending TLB flush. 4078 * 4079 * We do this on ptesync (l == 2) on ppc64 and any sync pn ppc32. 4080 * 4081 * Additionally, this can only happen in kernel mode however so 4082 * check MSR_PR as well. 4083 */ 4084 if (((l == 2) || !(ctx->insns_flags & PPC_64B)) && !ctx->pr) { 4085 gen_check_tlb_flush(ctx, true); 4086 } 4087 tcg_gen_mb(TCG_MO_ALL | TCG_BAR_SC); 4088 } 4089 4090 /* wait */ 4091 static void gen_wait(DisasContext *ctx) 4092 { 4093 TCGv_i32 t0 = tcg_const_i32(1); 4094 tcg_gen_st_i32(t0, cpu_env, 4095 -offsetof(PowerPCCPU, env) + offsetof(CPUState, halted)); 4096 tcg_temp_free_i32(t0); 4097 /* Stop translation, as the CPU is supposed to sleep from now */ 4098 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4099 } 4100 4101 #if defined(TARGET_PPC64) 4102 static void gen_doze(DisasContext *ctx) 4103 { 4104 #if defined(CONFIG_USER_ONLY) 4105 GEN_PRIV; 4106 #else 4107 TCGv_i32 t; 4108 4109 CHK_HV; 4110 t = tcg_const_i32(PPC_PM_DOZE); 4111 gen_helper_pminsn(cpu_env, t); 4112 tcg_temp_free_i32(t); 4113 /* Stop translation, as the CPU is supposed to sleep from now */ 4114 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4115 #endif /* defined(CONFIG_USER_ONLY) */ 4116 } 4117 4118 static void gen_nap(DisasContext *ctx) 4119 { 4120 #if defined(CONFIG_USER_ONLY) 4121 GEN_PRIV; 4122 #else 4123 TCGv_i32 t; 4124 4125 CHK_HV; 4126 t = tcg_const_i32(PPC_PM_NAP); 4127 gen_helper_pminsn(cpu_env, t); 4128 tcg_temp_free_i32(t); 4129 /* Stop translation, as the CPU is supposed to sleep from now */ 4130 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4131 #endif /* defined(CONFIG_USER_ONLY) */ 4132 } 4133 4134 static void gen_stop(DisasContext *ctx) 4135 { 4136 #if defined(CONFIG_USER_ONLY) 4137 GEN_PRIV; 4138 #else 4139 TCGv_i32 t; 4140 4141 CHK_HV; 4142 t = tcg_const_i32(PPC_PM_STOP); 4143 gen_helper_pminsn(cpu_env, t); 4144 tcg_temp_free_i32(t); 4145 /* Stop translation, as the CPU is supposed to sleep from now */ 4146 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4147 #endif /* defined(CONFIG_USER_ONLY) */ 4148 } 4149 4150 static void gen_sleep(DisasContext *ctx) 4151 { 4152 #if defined(CONFIG_USER_ONLY) 4153 GEN_PRIV; 4154 #else 4155 TCGv_i32 t; 4156 4157 CHK_HV; 4158 t = tcg_const_i32(PPC_PM_SLEEP); 4159 gen_helper_pminsn(cpu_env, t); 4160 tcg_temp_free_i32(t); 4161 /* Stop translation, as the CPU is supposed to sleep from now */ 4162 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4163 #endif /* defined(CONFIG_USER_ONLY) */ 4164 } 4165 4166 static void gen_rvwinkle(DisasContext *ctx) 4167 { 4168 #if defined(CONFIG_USER_ONLY) 4169 GEN_PRIV; 4170 #else 4171 TCGv_i32 t; 4172 4173 CHK_HV; 4174 t = tcg_const_i32(PPC_PM_RVWINKLE); 4175 gen_helper_pminsn(cpu_env, t); 4176 tcg_temp_free_i32(t); 4177 /* Stop translation, as the CPU is supposed to sleep from now */ 4178 gen_exception_nip(ctx, EXCP_HLT, ctx->base.pc_next); 4179 #endif /* defined(CONFIG_USER_ONLY) */ 4180 } 4181 #endif /* #if defined(TARGET_PPC64) */ 4182 4183 static inline void gen_update_cfar(DisasContext *ctx, target_ulong nip) 4184 { 4185 #if defined(TARGET_PPC64) 4186 if (ctx->has_cfar) { 4187 tcg_gen_movi_tl(cpu_cfar, nip); 4188 } 4189 #endif 4190 } 4191 4192 #if defined(TARGET_PPC64) 4193 static void pmu_count_insns(DisasContext *ctx) 4194 { 4195 /* 4196 * Do not bother calling the helper if the PMU isn't counting 4197 * instructions. 4198 */ 4199 if (!ctx->pmu_insn_cnt) { 4200 return; 4201 } 4202 4203 #if !defined(CONFIG_USER_ONLY) 4204 /* 4205 * The PMU insns_inc() helper stops the internal PMU timer if a 4206 * counter overflows happens. In that case, if the guest is 4207 * running with icount and we do not handle it beforehand, 4208 * the helper can trigger a 'bad icount read'. 4209 */ 4210 gen_icount_io_start(ctx); 4211 4212 gen_helper_insns_inc(cpu_env, tcg_constant_i32(ctx->base.num_insns)); 4213 #else 4214 /* 4215 * User mode can read (but not write) PMC5 and start/stop 4216 * the PMU via MMCR0_FC. In this case just increment 4217 * PMC5 with base.num_insns. 4218 */ 4219 TCGv t0 = tcg_temp_new(); 4220 4221 gen_load_spr(t0, SPR_POWER_PMC5); 4222 tcg_gen_addi_tl(t0, t0, ctx->base.num_insns); 4223 gen_store_spr(SPR_POWER_PMC5, t0); 4224 4225 tcg_temp_free(t0); 4226 #endif /* #if !defined(CONFIG_USER_ONLY) */ 4227 } 4228 #else 4229 static void pmu_count_insns(DisasContext *ctx) 4230 { 4231 return; 4232 } 4233 #endif /* #if defined(TARGET_PPC64) */ 4234 4235 static inline bool use_goto_tb(DisasContext *ctx, target_ulong dest) 4236 { 4237 return translator_use_goto_tb(&ctx->base, dest); 4238 } 4239 4240 static void gen_lookup_and_goto_ptr(DisasContext *ctx) 4241 { 4242 if (unlikely(ctx->singlestep_enabled)) { 4243 gen_debug_exception(ctx); 4244 } else { 4245 /* 4246 * tcg_gen_lookup_and_goto_ptr will exit the TB if 4247 * CF_NO_GOTO_PTR is set. Count insns now. 4248 */ 4249 if (ctx->base.tb->flags & CF_NO_GOTO_PTR) { 4250 pmu_count_insns(ctx); 4251 } 4252 4253 tcg_gen_lookup_and_goto_ptr(); 4254 } 4255 } 4256 4257 /*** Branch ***/ 4258 static void gen_goto_tb(DisasContext *ctx, int n, target_ulong dest) 4259 { 4260 if (NARROW_MODE(ctx)) { 4261 dest = (uint32_t) dest; 4262 } 4263 if (use_goto_tb(ctx, dest)) { 4264 pmu_count_insns(ctx); 4265 tcg_gen_goto_tb(n); 4266 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4267 tcg_gen_exit_tb(ctx->base.tb, n); 4268 } else { 4269 tcg_gen_movi_tl(cpu_nip, dest & ~3); 4270 gen_lookup_and_goto_ptr(ctx); 4271 } 4272 } 4273 4274 static inline void gen_setlr(DisasContext *ctx, target_ulong nip) 4275 { 4276 if (NARROW_MODE(ctx)) { 4277 nip = (uint32_t)nip; 4278 } 4279 tcg_gen_movi_tl(cpu_lr, nip); 4280 } 4281 4282 /* b ba bl bla */ 4283 static void gen_b(DisasContext *ctx) 4284 { 4285 target_ulong li, target; 4286 4287 /* sign extend LI */ 4288 li = LI(ctx->opcode); 4289 li = (li ^ 0x02000000) - 0x02000000; 4290 if (likely(AA(ctx->opcode) == 0)) { 4291 target = ctx->cia + li; 4292 } else { 4293 target = li; 4294 } 4295 if (LK(ctx->opcode)) { 4296 gen_setlr(ctx, ctx->base.pc_next); 4297 } 4298 gen_update_cfar(ctx, ctx->cia); 4299 gen_goto_tb(ctx, 0, target); 4300 ctx->base.is_jmp = DISAS_NORETURN; 4301 } 4302 4303 #define BCOND_IM 0 4304 #define BCOND_LR 1 4305 #define BCOND_CTR 2 4306 #define BCOND_TAR 3 4307 4308 static void gen_bcond(DisasContext *ctx, int type) 4309 { 4310 uint32_t bo = BO(ctx->opcode); 4311 TCGLabel *l1; 4312 TCGv target; 4313 4314 if (type == BCOND_LR || type == BCOND_CTR || type == BCOND_TAR) { 4315 target = tcg_temp_local_new(); 4316 if (type == BCOND_CTR) { 4317 tcg_gen_mov_tl(target, cpu_ctr); 4318 } else if (type == BCOND_TAR) { 4319 gen_load_spr(target, SPR_TAR); 4320 } else { 4321 tcg_gen_mov_tl(target, cpu_lr); 4322 } 4323 } else { 4324 target = NULL; 4325 } 4326 if (LK(ctx->opcode)) { 4327 gen_setlr(ctx, ctx->base.pc_next); 4328 } 4329 l1 = gen_new_label(); 4330 if ((bo & 0x4) == 0) { 4331 /* Decrement and test CTR */ 4332 TCGv temp = tcg_temp_new(); 4333 4334 if (type == BCOND_CTR) { 4335 /* 4336 * All ISAs up to v3 describe this form of bcctr as invalid but 4337 * some processors, ie. 64-bit server processors compliant with 4338 * arch 2.x, do implement a "test and decrement" logic instead, 4339 * as described in their respective UMs. This logic involves CTR 4340 * to act as both the branch target and a counter, which makes 4341 * it basically useless and thus never used in real code. 4342 * 4343 * This form was hence chosen to trigger extra micro-architectural 4344 * side-effect on real HW needed for the Spectre v2 workaround. 4345 * It is up to guests that implement such workaround, ie. linux, to 4346 * use this form in a way it just triggers the side-effect without 4347 * doing anything else harmful. 4348 */ 4349 if (unlikely(!is_book3s_arch2x(ctx))) { 4350 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4351 tcg_temp_free(temp); 4352 tcg_temp_free(target); 4353 return; 4354 } 4355 4356 if (NARROW_MODE(ctx)) { 4357 tcg_gen_ext32u_tl(temp, cpu_ctr); 4358 } else { 4359 tcg_gen_mov_tl(temp, cpu_ctr); 4360 } 4361 if (bo & 0x2) { 4362 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4363 } else { 4364 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4365 } 4366 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4367 } else { 4368 tcg_gen_subi_tl(cpu_ctr, cpu_ctr, 1); 4369 if (NARROW_MODE(ctx)) { 4370 tcg_gen_ext32u_tl(temp, cpu_ctr); 4371 } else { 4372 tcg_gen_mov_tl(temp, cpu_ctr); 4373 } 4374 if (bo & 0x2) { 4375 tcg_gen_brcondi_tl(TCG_COND_NE, temp, 0, l1); 4376 } else { 4377 tcg_gen_brcondi_tl(TCG_COND_EQ, temp, 0, l1); 4378 } 4379 } 4380 tcg_temp_free(temp); 4381 } 4382 if ((bo & 0x10) == 0) { 4383 /* Test CR */ 4384 uint32_t bi = BI(ctx->opcode); 4385 uint32_t mask = 0x08 >> (bi & 0x03); 4386 TCGv_i32 temp = tcg_temp_new_i32(); 4387 4388 if (bo & 0x8) { 4389 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4390 tcg_gen_brcondi_i32(TCG_COND_EQ, temp, 0, l1); 4391 } else { 4392 tcg_gen_andi_i32(temp, cpu_crf[bi >> 2], mask); 4393 tcg_gen_brcondi_i32(TCG_COND_NE, temp, 0, l1); 4394 } 4395 tcg_temp_free_i32(temp); 4396 } 4397 gen_update_cfar(ctx, ctx->cia); 4398 if (type == BCOND_IM) { 4399 target_ulong li = (target_long)((int16_t)(BD(ctx->opcode))); 4400 if (likely(AA(ctx->opcode) == 0)) { 4401 gen_goto_tb(ctx, 0, ctx->cia + li); 4402 } else { 4403 gen_goto_tb(ctx, 0, li); 4404 } 4405 } else { 4406 if (NARROW_MODE(ctx)) { 4407 tcg_gen_andi_tl(cpu_nip, target, (uint32_t)~3); 4408 } else { 4409 tcg_gen_andi_tl(cpu_nip, target, ~3); 4410 } 4411 gen_lookup_and_goto_ptr(ctx); 4412 tcg_temp_free(target); 4413 } 4414 if ((bo & 0x14) != 0x14) { 4415 /* fallthrough case */ 4416 gen_set_label(l1); 4417 gen_goto_tb(ctx, 1, ctx->base.pc_next); 4418 } 4419 ctx->base.is_jmp = DISAS_NORETURN; 4420 } 4421 4422 static void gen_bc(DisasContext *ctx) 4423 { 4424 gen_bcond(ctx, BCOND_IM); 4425 } 4426 4427 static void gen_bcctr(DisasContext *ctx) 4428 { 4429 gen_bcond(ctx, BCOND_CTR); 4430 } 4431 4432 static void gen_bclr(DisasContext *ctx) 4433 { 4434 gen_bcond(ctx, BCOND_LR); 4435 } 4436 4437 static void gen_bctar(DisasContext *ctx) 4438 { 4439 gen_bcond(ctx, BCOND_TAR); 4440 } 4441 4442 /*** Condition register logical ***/ 4443 #define GEN_CRLOGIC(name, tcg_op, opc) \ 4444 static void glue(gen_, name)(DisasContext *ctx) \ 4445 { \ 4446 uint8_t bitmask; \ 4447 int sh; \ 4448 TCGv_i32 t0, t1; \ 4449 sh = (crbD(ctx->opcode) & 0x03) - (crbA(ctx->opcode) & 0x03); \ 4450 t0 = tcg_temp_new_i32(); \ 4451 if (sh > 0) \ 4452 tcg_gen_shri_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], sh); \ 4453 else if (sh < 0) \ 4454 tcg_gen_shli_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2], -sh); \ 4455 else \ 4456 tcg_gen_mov_i32(t0, cpu_crf[crbA(ctx->opcode) >> 2]); \ 4457 t1 = tcg_temp_new_i32(); \ 4458 sh = (crbD(ctx->opcode) & 0x03) - (crbB(ctx->opcode) & 0x03); \ 4459 if (sh > 0) \ 4460 tcg_gen_shri_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], sh); \ 4461 else if (sh < 0) \ 4462 tcg_gen_shli_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2], -sh); \ 4463 else \ 4464 tcg_gen_mov_i32(t1, cpu_crf[crbB(ctx->opcode) >> 2]); \ 4465 tcg_op(t0, t0, t1); \ 4466 bitmask = 0x08 >> (crbD(ctx->opcode) & 0x03); \ 4467 tcg_gen_andi_i32(t0, t0, bitmask); \ 4468 tcg_gen_andi_i32(t1, cpu_crf[crbD(ctx->opcode) >> 2], ~bitmask); \ 4469 tcg_gen_or_i32(cpu_crf[crbD(ctx->opcode) >> 2], t0, t1); \ 4470 tcg_temp_free_i32(t0); \ 4471 tcg_temp_free_i32(t1); \ 4472 } 4473 4474 /* crand */ 4475 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08); 4476 /* crandc */ 4477 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04); 4478 /* creqv */ 4479 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09); 4480 /* crnand */ 4481 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07); 4482 /* crnor */ 4483 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01); 4484 /* cror */ 4485 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E); 4486 /* crorc */ 4487 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D); 4488 /* crxor */ 4489 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06); 4490 4491 /* mcrf */ 4492 static void gen_mcrf(DisasContext *ctx) 4493 { 4494 tcg_gen_mov_i32(cpu_crf[crfD(ctx->opcode)], cpu_crf[crfS(ctx->opcode)]); 4495 } 4496 4497 /*** System linkage ***/ 4498 4499 /* rfi (supervisor only) */ 4500 static void gen_rfi(DisasContext *ctx) 4501 { 4502 #if defined(CONFIG_USER_ONLY) 4503 GEN_PRIV; 4504 #else 4505 /* 4506 * This instruction doesn't exist anymore on 64-bit server 4507 * processors compliant with arch 2.x 4508 */ 4509 if (is_book3s_arch2x(ctx)) { 4510 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 4511 return; 4512 } 4513 /* Restore CPU state */ 4514 CHK_SV; 4515 gen_icount_io_start(ctx); 4516 gen_update_cfar(ctx, ctx->cia); 4517 gen_helper_rfi(cpu_env); 4518 ctx->base.is_jmp = DISAS_EXIT; 4519 #endif 4520 } 4521 4522 #if defined(TARGET_PPC64) 4523 static void gen_rfid(DisasContext *ctx) 4524 { 4525 #if defined(CONFIG_USER_ONLY) 4526 GEN_PRIV; 4527 #else 4528 /* Restore CPU state */ 4529 CHK_SV; 4530 gen_icount_io_start(ctx); 4531 gen_update_cfar(ctx, ctx->cia); 4532 gen_helper_rfid(cpu_env); 4533 ctx->base.is_jmp = DISAS_EXIT; 4534 #endif 4535 } 4536 4537 #if !defined(CONFIG_USER_ONLY) 4538 static void gen_rfscv(DisasContext *ctx) 4539 { 4540 #if defined(CONFIG_USER_ONLY) 4541 GEN_PRIV; 4542 #else 4543 /* Restore CPU state */ 4544 CHK_SV; 4545 gen_icount_io_start(ctx); 4546 gen_update_cfar(ctx, ctx->cia); 4547 gen_helper_rfscv(cpu_env); 4548 ctx->base.is_jmp = DISAS_EXIT; 4549 #endif 4550 } 4551 #endif 4552 4553 static void gen_hrfid(DisasContext *ctx) 4554 { 4555 #if defined(CONFIG_USER_ONLY) 4556 GEN_PRIV; 4557 #else 4558 /* Restore CPU state */ 4559 CHK_HV; 4560 gen_helper_hrfid(cpu_env); 4561 ctx->base.is_jmp = DISAS_EXIT; 4562 #endif 4563 } 4564 #endif 4565 4566 /* sc */ 4567 #if defined(CONFIG_USER_ONLY) 4568 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL_USER 4569 #else 4570 #define POWERPC_SYSCALL POWERPC_EXCP_SYSCALL 4571 #define POWERPC_SYSCALL_VECTORED POWERPC_EXCP_SYSCALL_VECTORED 4572 #endif 4573 static void gen_sc(DisasContext *ctx) 4574 { 4575 uint32_t lev; 4576 4577 lev = (ctx->opcode >> 5) & 0x7F; 4578 gen_exception_err(ctx, POWERPC_SYSCALL, lev); 4579 } 4580 4581 #if defined(TARGET_PPC64) 4582 #if !defined(CONFIG_USER_ONLY) 4583 static void gen_scv(DisasContext *ctx) 4584 { 4585 uint32_t lev = (ctx->opcode >> 5) & 0x7F; 4586 4587 /* Set the PC back to the faulting instruction. */ 4588 gen_update_nip(ctx, ctx->cia); 4589 gen_helper_scv(cpu_env, tcg_constant_i32(lev)); 4590 4591 ctx->base.is_jmp = DISAS_NORETURN; 4592 } 4593 #endif 4594 #endif 4595 4596 /*** Trap ***/ 4597 4598 /* Check for unconditional traps (always or never) */ 4599 static bool check_unconditional_trap(DisasContext *ctx) 4600 { 4601 /* Trap never */ 4602 if (TO(ctx->opcode) == 0) { 4603 return true; 4604 } 4605 /* Trap always */ 4606 if (TO(ctx->opcode) == 31) { 4607 gen_exception_err(ctx, POWERPC_EXCP_PROGRAM, POWERPC_EXCP_TRAP); 4608 return true; 4609 } 4610 return false; 4611 } 4612 4613 /* tw */ 4614 static void gen_tw(DisasContext *ctx) 4615 { 4616 TCGv_i32 t0; 4617 4618 if (check_unconditional_trap(ctx)) { 4619 return; 4620 } 4621 t0 = tcg_const_i32(TO(ctx->opcode)); 4622 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4623 t0); 4624 tcg_temp_free_i32(t0); 4625 } 4626 4627 /* twi */ 4628 static void gen_twi(DisasContext *ctx) 4629 { 4630 TCGv t0; 4631 TCGv_i32 t1; 4632 4633 if (check_unconditional_trap(ctx)) { 4634 return; 4635 } 4636 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4637 t1 = tcg_const_i32(TO(ctx->opcode)); 4638 gen_helper_tw(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4639 tcg_temp_free(t0); 4640 tcg_temp_free_i32(t1); 4641 } 4642 4643 #if defined(TARGET_PPC64) 4644 /* td */ 4645 static void gen_td(DisasContext *ctx) 4646 { 4647 TCGv_i32 t0; 4648 4649 if (check_unconditional_trap(ctx)) { 4650 return; 4651 } 4652 t0 = tcg_const_i32(TO(ctx->opcode)); 4653 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 4654 t0); 4655 tcg_temp_free_i32(t0); 4656 } 4657 4658 /* tdi */ 4659 static void gen_tdi(DisasContext *ctx) 4660 { 4661 TCGv t0; 4662 TCGv_i32 t1; 4663 4664 if (check_unconditional_trap(ctx)) { 4665 return; 4666 } 4667 t0 = tcg_const_tl(SIMM(ctx->opcode)); 4668 t1 = tcg_const_i32(TO(ctx->opcode)); 4669 gen_helper_td(cpu_env, cpu_gpr[rA(ctx->opcode)], t0, t1); 4670 tcg_temp_free(t0); 4671 tcg_temp_free_i32(t1); 4672 } 4673 #endif 4674 4675 /*** Processor control ***/ 4676 4677 /* mcrxr */ 4678 static void gen_mcrxr(DisasContext *ctx) 4679 { 4680 TCGv_i32 t0 = tcg_temp_new_i32(); 4681 TCGv_i32 t1 = tcg_temp_new_i32(); 4682 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4683 4684 tcg_gen_trunc_tl_i32(t0, cpu_so); 4685 tcg_gen_trunc_tl_i32(t1, cpu_ov); 4686 tcg_gen_trunc_tl_i32(dst, cpu_ca); 4687 tcg_gen_shli_i32(t0, t0, 3); 4688 tcg_gen_shli_i32(t1, t1, 2); 4689 tcg_gen_shli_i32(dst, dst, 1); 4690 tcg_gen_or_i32(dst, dst, t0); 4691 tcg_gen_or_i32(dst, dst, t1); 4692 tcg_temp_free_i32(t0); 4693 tcg_temp_free_i32(t1); 4694 4695 tcg_gen_movi_tl(cpu_so, 0); 4696 tcg_gen_movi_tl(cpu_ov, 0); 4697 tcg_gen_movi_tl(cpu_ca, 0); 4698 } 4699 4700 #ifdef TARGET_PPC64 4701 /* mcrxrx */ 4702 static void gen_mcrxrx(DisasContext *ctx) 4703 { 4704 TCGv t0 = tcg_temp_new(); 4705 TCGv t1 = tcg_temp_new(); 4706 TCGv_i32 dst = cpu_crf[crfD(ctx->opcode)]; 4707 4708 /* copy OV and OV32 */ 4709 tcg_gen_shli_tl(t0, cpu_ov, 1); 4710 tcg_gen_or_tl(t0, t0, cpu_ov32); 4711 tcg_gen_shli_tl(t0, t0, 2); 4712 /* copy CA and CA32 */ 4713 tcg_gen_shli_tl(t1, cpu_ca, 1); 4714 tcg_gen_or_tl(t1, t1, cpu_ca32); 4715 tcg_gen_or_tl(t0, t0, t1); 4716 tcg_gen_trunc_tl_i32(dst, t0); 4717 tcg_temp_free(t0); 4718 tcg_temp_free(t1); 4719 } 4720 #endif 4721 4722 /* mfcr mfocrf */ 4723 static void gen_mfcr(DisasContext *ctx) 4724 { 4725 uint32_t crm, crn; 4726 4727 if (likely(ctx->opcode & 0x00100000)) { 4728 crm = CRM(ctx->opcode); 4729 if (likely(crm && ((crm & (crm - 1)) == 0))) { 4730 crn = ctz32(crm); 4731 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], cpu_crf[7 - crn]); 4732 tcg_gen_shli_tl(cpu_gpr[rD(ctx->opcode)], 4733 cpu_gpr[rD(ctx->opcode)], crn * 4); 4734 } 4735 } else { 4736 TCGv_i32 t0 = tcg_temp_new_i32(); 4737 tcg_gen_mov_i32(t0, cpu_crf[0]); 4738 tcg_gen_shli_i32(t0, t0, 4); 4739 tcg_gen_or_i32(t0, t0, cpu_crf[1]); 4740 tcg_gen_shli_i32(t0, t0, 4); 4741 tcg_gen_or_i32(t0, t0, cpu_crf[2]); 4742 tcg_gen_shli_i32(t0, t0, 4); 4743 tcg_gen_or_i32(t0, t0, cpu_crf[3]); 4744 tcg_gen_shli_i32(t0, t0, 4); 4745 tcg_gen_or_i32(t0, t0, cpu_crf[4]); 4746 tcg_gen_shli_i32(t0, t0, 4); 4747 tcg_gen_or_i32(t0, t0, cpu_crf[5]); 4748 tcg_gen_shli_i32(t0, t0, 4); 4749 tcg_gen_or_i32(t0, t0, cpu_crf[6]); 4750 tcg_gen_shli_i32(t0, t0, 4); 4751 tcg_gen_or_i32(t0, t0, cpu_crf[7]); 4752 tcg_gen_extu_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 4753 tcg_temp_free_i32(t0); 4754 } 4755 } 4756 4757 /* mfmsr */ 4758 static void gen_mfmsr(DisasContext *ctx) 4759 { 4760 CHK_SV; 4761 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], cpu_msr); 4762 } 4763 4764 /* mfspr */ 4765 static inline void gen_op_mfspr(DisasContext *ctx) 4766 { 4767 void (*read_cb)(DisasContext *ctx, int gprn, int sprn); 4768 uint32_t sprn = SPR(ctx->opcode); 4769 4770 #if defined(CONFIG_USER_ONLY) 4771 read_cb = ctx->spr_cb[sprn].uea_read; 4772 #else 4773 if (ctx->pr) { 4774 read_cb = ctx->spr_cb[sprn].uea_read; 4775 } else if (ctx->hv) { 4776 read_cb = ctx->spr_cb[sprn].hea_read; 4777 } else { 4778 read_cb = ctx->spr_cb[sprn].oea_read; 4779 } 4780 #endif 4781 if (likely(read_cb != NULL)) { 4782 if (likely(read_cb != SPR_NOACCESS)) { 4783 (*read_cb)(ctx, rD(ctx->opcode), sprn); 4784 } else { 4785 /* Privilege exception */ 4786 /* 4787 * This is a hack to avoid warnings when running Linux: 4788 * this OS breaks the PowerPC virtualisation model, 4789 * allowing userland application to read the PVR 4790 */ 4791 if (sprn != SPR_PVR) { 4792 qemu_log_mask(LOG_GUEST_ERROR, "Trying to read privileged spr " 4793 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4794 ctx->cia); 4795 } 4796 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4797 } 4798 } else { 4799 /* ISA 2.07 defines these as no-ops */ 4800 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4801 (sprn >= 808 && sprn <= 811)) { 4802 /* This is a nop */ 4803 return; 4804 } 4805 /* Not defined */ 4806 qemu_log_mask(LOG_GUEST_ERROR, 4807 "Trying to read invalid spr %d (0x%03x) at " 4808 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 4809 4810 /* 4811 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 4812 * generate a priv, a hv emu or a no-op 4813 */ 4814 if (sprn & 0x10) { 4815 if (ctx->pr) { 4816 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4817 } 4818 } else { 4819 if (ctx->pr || sprn == 0 || sprn == 4 || sprn == 5 || sprn == 6) { 4820 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 4821 } 4822 } 4823 } 4824 } 4825 4826 static void gen_mfspr(DisasContext *ctx) 4827 { 4828 gen_op_mfspr(ctx); 4829 } 4830 4831 /* mftb */ 4832 static void gen_mftb(DisasContext *ctx) 4833 { 4834 gen_op_mfspr(ctx); 4835 } 4836 4837 /* mtcrf mtocrf*/ 4838 static void gen_mtcrf(DisasContext *ctx) 4839 { 4840 uint32_t crm, crn; 4841 4842 crm = CRM(ctx->opcode); 4843 if (likely((ctx->opcode & 0x00100000))) { 4844 if (crm && ((crm & (crm - 1)) == 0)) { 4845 TCGv_i32 temp = tcg_temp_new_i32(); 4846 crn = ctz32(crm); 4847 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4848 tcg_gen_shri_i32(temp, temp, crn * 4); 4849 tcg_gen_andi_i32(cpu_crf[7 - crn], temp, 0xf); 4850 tcg_temp_free_i32(temp); 4851 } 4852 } else { 4853 TCGv_i32 temp = tcg_temp_new_i32(); 4854 tcg_gen_trunc_tl_i32(temp, cpu_gpr[rS(ctx->opcode)]); 4855 for (crn = 0 ; crn < 8 ; crn++) { 4856 if (crm & (1 << crn)) { 4857 tcg_gen_shri_i32(cpu_crf[7 - crn], temp, crn * 4); 4858 tcg_gen_andi_i32(cpu_crf[7 - crn], cpu_crf[7 - crn], 0xf); 4859 } 4860 } 4861 tcg_temp_free_i32(temp); 4862 } 4863 } 4864 4865 /* mtmsr */ 4866 #if defined(TARGET_PPC64) 4867 static void gen_mtmsrd(DisasContext *ctx) 4868 { 4869 if (unlikely(!is_book3s_arch2x(ctx))) { 4870 gen_invalid(ctx); 4871 return; 4872 } 4873 4874 CHK_SV; 4875 4876 #if !defined(CONFIG_USER_ONLY) 4877 TCGv t0, t1; 4878 target_ulong mask; 4879 4880 t0 = tcg_temp_new(); 4881 t1 = tcg_temp_new(); 4882 4883 gen_icount_io_start(ctx); 4884 4885 if (ctx->opcode & 0x00010000) { 4886 /* L=1 form only updates EE and RI */ 4887 mask = (1ULL << MSR_RI) | (1ULL << MSR_EE); 4888 } else { 4889 /* mtmsrd does not alter HV, S, ME, or LE */ 4890 mask = ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S) | 4891 (1ULL << MSR_HV)); 4892 /* 4893 * XXX: we need to update nip before the store if we enter 4894 * power saving mode, we will exit the loop directly from 4895 * ppc_store_msr 4896 */ 4897 gen_update_nip(ctx, ctx->base.pc_next); 4898 } 4899 4900 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask); 4901 tcg_gen_andi_tl(t1, cpu_msr, ~mask); 4902 tcg_gen_or_tl(t0, t0, t1); 4903 4904 gen_helper_store_msr(cpu_env, t0); 4905 4906 /* Must stop the translation as machine state (may have) changed */ 4907 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 4908 4909 tcg_temp_free(t0); 4910 tcg_temp_free(t1); 4911 #endif /* !defined(CONFIG_USER_ONLY) */ 4912 } 4913 #endif /* defined(TARGET_PPC64) */ 4914 4915 static void gen_mtmsr(DisasContext *ctx) 4916 { 4917 CHK_SV; 4918 4919 #if !defined(CONFIG_USER_ONLY) 4920 TCGv t0, t1; 4921 target_ulong mask = 0xFFFFFFFF; 4922 4923 t0 = tcg_temp_new(); 4924 t1 = tcg_temp_new(); 4925 4926 gen_icount_io_start(ctx); 4927 if (ctx->opcode & 0x00010000) { 4928 /* L=1 form only updates EE and RI */ 4929 mask &= (1ULL << MSR_RI) | (1ULL << MSR_EE); 4930 } else { 4931 /* mtmsr does not alter S, ME, or LE */ 4932 mask &= ~((1ULL << MSR_LE) | (1ULL << MSR_ME) | (1ULL << MSR_S)); 4933 4934 /* 4935 * XXX: we need to update nip before the store if we enter 4936 * power saving mode, we will exit the loop directly from 4937 * ppc_store_msr 4938 */ 4939 gen_update_nip(ctx, ctx->base.pc_next); 4940 } 4941 4942 tcg_gen_andi_tl(t0, cpu_gpr[rS(ctx->opcode)], mask); 4943 tcg_gen_andi_tl(t1, cpu_msr, ~mask); 4944 tcg_gen_or_tl(t0, t0, t1); 4945 4946 gen_helper_store_msr(cpu_env, t0); 4947 4948 /* Must stop the translation as machine state (may have) changed */ 4949 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 4950 4951 tcg_temp_free(t0); 4952 tcg_temp_free(t1); 4953 #endif 4954 } 4955 4956 /* mtspr */ 4957 static void gen_mtspr(DisasContext *ctx) 4958 { 4959 void (*write_cb)(DisasContext *ctx, int sprn, int gprn); 4960 uint32_t sprn = SPR(ctx->opcode); 4961 4962 #if defined(CONFIG_USER_ONLY) 4963 write_cb = ctx->spr_cb[sprn].uea_write; 4964 #else 4965 if (ctx->pr) { 4966 write_cb = ctx->spr_cb[sprn].uea_write; 4967 } else if (ctx->hv) { 4968 write_cb = ctx->spr_cb[sprn].hea_write; 4969 } else { 4970 write_cb = ctx->spr_cb[sprn].oea_write; 4971 } 4972 #endif 4973 if (likely(write_cb != NULL)) { 4974 if (likely(write_cb != SPR_NOACCESS)) { 4975 (*write_cb)(ctx, sprn, rS(ctx->opcode)); 4976 } else { 4977 /* Privilege exception */ 4978 qemu_log_mask(LOG_GUEST_ERROR, "Trying to write privileged spr " 4979 "%d (0x%03x) at " TARGET_FMT_lx "\n", sprn, sprn, 4980 ctx->cia); 4981 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_REG); 4982 } 4983 } else { 4984 /* ISA 2.07 defines these as no-ops */ 4985 if ((ctx->insns_flags2 & PPC2_ISA207S) && 4986 (sprn >= 808 && sprn <= 811)) { 4987 /* This is a nop */ 4988 return; 4989 } 4990 4991 /* Not defined */ 4992 qemu_log_mask(LOG_GUEST_ERROR, 4993 "Trying to write invalid spr %d (0x%03x) at " 4994 TARGET_FMT_lx "\n", sprn, sprn, ctx->cia); 4995 4996 4997 /* 4998 * The behaviour depends on MSR:PR and SPR# bit 0x10, it can 4999 * generate a priv, a hv emu or a no-op 5000 */ 5001 if (sprn & 0x10) { 5002 if (ctx->pr) { 5003 gen_priv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5004 } 5005 } else { 5006 if (ctx->pr || sprn == 0) { 5007 gen_hvpriv_exception(ctx, POWERPC_EXCP_INVAL_SPR); 5008 } 5009 } 5010 } 5011 } 5012 5013 #if defined(TARGET_PPC64) 5014 /* setb */ 5015 static void gen_setb(DisasContext *ctx) 5016 { 5017 TCGv_i32 t0 = tcg_temp_new_i32(); 5018 TCGv_i32 t8 = tcg_constant_i32(8); 5019 TCGv_i32 tm1 = tcg_constant_i32(-1); 5020 int crf = crfS(ctx->opcode); 5021 5022 tcg_gen_setcondi_i32(TCG_COND_GEU, t0, cpu_crf[crf], 4); 5023 tcg_gen_movcond_i32(TCG_COND_GEU, t0, cpu_crf[crf], t8, tm1, t0); 5024 tcg_gen_ext_i32_tl(cpu_gpr[rD(ctx->opcode)], t0); 5025 5026 tcg_temp_free_i32(t0); 5027 } 5028 #endif 5029 5030 /*** Cache management ***/ 5031 5032 /* dcbf */ 5033 static void gen_dcbf(DisasContext *ctx) 5034 { 5035 /* XXX: specification says this is treated as a load by the MMU */ 5036 TCGv t0; 5037 gen_set_access_type(ctx, ACCESS_CACHE); 5038 t0 = tcg_temp_new(); 5039 gen_addr_reg_index(ctx, t0); 5040 gen_qemu_ld8u(ctx, t0, t0); 5041 tcg_temp_free(t0); 5042 } 5043 5044 /* dcbfep (external PID dcbf) */ 5045 static void gen_dcbfep(DisasContext *ctx) 5046 { 5047 /* XXX: specification says this is treated as a load by the MMU */ 5048 TCGv t0; 5049 CHK_SV; 5050 gen_set_access_type(ctx, ACCESS_CACHE); 5051 t0 = tcg_temp_new(); 5052 gen_addr_reg_index(ctx, t0); 5053 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5054 tcg_temp_free(t0); 5055 } 5056 5057 /* dcbi (Supervisor only) */ 5058 static void gen_dcbi(DisasContext *ctx) 5059 { 5060 #if defined(CONFIG_USER_ONLY) 5061 GEN_PRIV; 5062 #else 5063 TCGv EA, val; 5064 5065 CHK_SV; 5066 EA = tcg_temp_new(); 5067 gen_set_access_type(ctx, ACCESS_CACHE); 5068 gen_addr_reg_index(ctx, EA); 5069 val = tcg_temp_new(); 5070 /* XXX: specification says this should be treated as a store by the MMU */ 5071 gen_qemu_ld8u(ctx, val, EA); 5072 gen_qemu_st8(ctx, val, EA); 5073 tcg_temp_free(val); 5074 tcg_temp_free(EA); 5075 #endif /* defined(CONFIG_USER_ONLY) */ 5076 } 5077 5078 /* dcdst */ 5079 static void gen_dcbst(DisasContext *ctx) 5080 { 5081 /* XXX: specification say this is treated as a load by the MMU */ 5082 TCGv t0; 5083 gen_set_access_type(ctx, ACCESS_CACHE); 5084 t0 = tcg_temp_new(); 5085 gen_addr_reg_index(ctx, t0); 5086 gen_qemu_ld8u(ctx, t0, t0); 5087 tcg_temp_free(t0); 5088 } 5089 5090 /* dcbstep (dcbstep External PID version) */ 5091 static void gen_dcbstep(DisasContext *ctx) 5092 { 5093 /* XXX: specification say this is treated as a load by the MMU */ 5094 TCGv t0; 5095 gen_set_access_type(ctx, ACCESS_CACHE); 5096 t0 = tcg_temp_new(); 5097 gen_addr_reg_index(ctx, t0); 5098 tcg_gen_qemu_ld_tl(t0, t0, PPC_TLB_EPID_LOAD, DEF_MEMOP(MO_UB)); 5099 tcg_temp_free(t0); 5100 } 5101 5102 /* dcbt */ 5103 static void gen_dcbt(DisasContext *ctx) 5104 { 5105 /* 5106 * interpreted as no-op 5107 * XXX: specification say this is treated as a load by the MMU but 5108 * does not generate any exception 5109 */ 5110 } 5111 5112 /* dcbtep */ 5113 static void gen_dcbtep(DisasContext *ctx) 5114 { 5115 /* 5116 * interpreted as no-op 5117 * XXX: specification say this is treated as a load by the MMU but 5118 * does not generate any exception 5119 */ 5120 } 5121 5122 /* dcbtst */ 5123 static void gen_dcbtst(DisasContext *ctx) 5124 { 5125 /* 5126 * interpreted as no-op 5127 * XXX: specification say this is treated as a load by the MMU but 5128 * does not generate any exception 5129 */ 5130 } 5131 5132 /* dcbtstep */ 5133 static void gen_dcbtstep(DisasContext *ctx) 5134 { 5135 /* 5136 * interpreted as no-op 5137 * XXX: specification say this is treated as a load by the MMU but 5138 * does not generate any exception 5139 */ 5140 } 5141 5142 /* dcbtls */ 5143 static void gen_dcbtls(DisasContext *ctx) 5144 { 5145 /* Always fails locking the cache */ 5146 TCGv t0 = tcg_temp_new(); 5147 gen_load_spr(t0, SPR_Exxx_L1CSR0); 5148 tcg_gen_ori_tl(t0, t0, L1CSR0_CUL); 5149 gen_store_spr(SPR_Exxx_L1CSR0, t0); 5150 tcg_temp_free(t0); 5151 } 5152 5153 /* dcbz */ 5154 static void gen_dcbz(DisasContext *ctx) 5155 { 5156 TCGv tcgv_addr; 5157 TCGv_i32 tcgv_op; 5158 5159 gen_set_access_type(ctx, ACCESS_CACHE); 5160 tcgv_addr = tcg_temp_new(); 5161 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5162 gen_addr_reg_index(ctx, tcgv_addr); 5163 gen_helper_dcbz(cpu_env, tcgv_addr, tcgv_op); 5164 tcg_temp_free(tcgv_addr); 5165 tcg_temp_free_i32(tcgv_op); 5166 } 5167 5168 /* dcbzep */ 5169 static void gen_dcbzep(DisasContext *ctx) 5170 { 5171 TCGv tcgv_addr; 5172 TCGv_i32 tcgv_op; 5173 5174 gen_set_access_type(ctx, ACCESS_CACHE); 5175 tcgv_addr = tcg_temp_new(); 5176 tcgv_op = tcg_const_i32(ctx->opcode & 0x03FF000); 5177 gen_addr_reg_index(ctx, tcgv_addr); 5178 gen_helper_dcbzep(cpu_env, tcgv_addr, tcgv_op); 5179 tcg_temp_free(tcgv_addr); 5180 tcg_temp_free_i32(tcgv_op); 5181 } 5182 5183 /* dst / dstt */ 5184 static void gen_dst(DisasContext *ctx) 5185 { 5186 if (rA(ctx->opcode) == 0) { 5187 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5188 } else { 5189 /* interpreted as no-op */ 5190 } 5191 } 5192 5193 /* dstst /dststt */ 5194 static void gen_dstst(DisasContext *ctx) 5195 { 5196 if (rA(ctx->opcode) == 0) { 5197 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 5198 } else { 5199 /* interpreted as no-op */ 5200 } 5201 5202 } 5203 5204 /* dss / dssall */ 5205 static void gen_dss(DisasContext *ctx) 5206 { 5207 /* interpreted as no-op */ 5208 } 5209 5210 /* icbi */ 5211 static void gen_icbi(DisasContext *ctx) 5212 { 5213 TCGv t0; 5214 gen_set_access_type(ctx, ACCESS_CACHE); 5215 t0 = tcg_temp_new(); 5216 gen_addr_reg_index(ctx, t0); 5217 gen_helper_icbi(cpu_env, t0); 5218 tcg_temp_free(t0); 5219 } 5220 5221 /* icbiep */ 5222 static void gen_icbiep(DisasContext *ctx) 5223 { 5224 TCGv t0; 5225 gen_set_access_type(ctx, ACCESS_CACHE); 5226 t0 = tcg_temp_new(); 5227 gen_addr_reg_index(ctx, t0); 5228 gen_helper_icbiep(cpu_env, t0); 5229 tcg_temp_free(t0); 5230 } 5231 5232 /* Optional: */ 5233 /* dcba */ 5234 static void gen_dcba(DisasContext *ctx) 5235 { 5236 /* 5237 * interpreted as no-op 5238 * XXX: specification say this is treated as a store by the MMU 5239 * but does not generate any exception 5240 */ 5241 } 5242 5243 /*** Segment register manipulation ***/ 5244 /* Supervisor only: */ 5245 5246 /* mfsr */ 5247 static void gen_mfsr(DisasContext *ctx) 5248 { 5249 #if defined(CONFIG_USER_ONLY) 5250 GEN_PRIV; 5251 #else 5252 TCGv t0; 5253 5254 CHK_SV; 5255 t0 = tcg_const_tl(SR(ctx->opcode)); 5256 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5257 tcg_temp_free(t0); 5258 #endif /* defined(CONFIG_USER_ONLY) */ 5259 } 5260 5261 /* mfsrin */ 5262 static void gen_mfsrin(DisasContext *ctx) 5263 { 5264 #if defined(CONFIG_USER_ONLY) 5265 GEN_PRIV; 5266 #else 5267 TCGv t0; 5268 5269 CHK_SV; 5270 t0 = tcg_temp_new(); 5271 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5272 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5273 tcg_temp_free(t0); 5274 #endif /* defined(CONFIG_USER_ONLY) */ 5275 } 5276 5277 /* mtsr */ 5278 static void gen_mtsr(DisasContext *ctx) 5279 { 5280 #if defined(CONFIG_USER_ONLY) 5281 GEN_PRIV; 5282 #else 5283 TCGv t0; 5284 5285 CHK_SV; 5286 t0 = tcg_const_tl(SR(ctx->opcode)); 5287 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5288 tcg_temp_free(t0); 5289 #endif /* defined(CONFIG_USER_ONLY) */ 5290 } 5291 5292 /* mtsrin */ 5293 static void gen_mtsrin(DisasContext *ctx) 5294 { 5295 #if defined(CONFIG_USER_ONLY) 5296 GEN_PRIV; 5297 #else 5298 TCGv t0; 5299 CHK_SV; 5300 5301 t0 = tcg_temp_new(); 5302 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5303 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rD(ctx->opcode)]); 5304 tcg_temp_free(t0); 5305 #endif /* defined(CONFIG_USER_ONLY) */ 5306 } 5307 5308 #if defined(TARGET_PPC64) 5309 /* Specific implementation for PowerPC 64 "bridge" emulation using SLB */ 5310 5311 /* mfsr */ 5312 static void gen_mfsr_64b(DisasContext *ctx) 5313 { 5314 #if defined(CONFIG_USER_ONLY) 5315 GEN_PRIV; 5316 #else 5317 TCGv t0; 5318 5319 CHK_SV; 5320 t0 = tcg_const_tl(SR(ctx->opcode)); 5321 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5322 tcg_temp_free(t0); 5323 #endif /* defined(CONFIG_USER_ONLY) */ 5324 } 5325 5326 /* mfsrin */ 5327 static void gen_mfsrin_64b(DisasContext *ctx) 5328 { 5329 #if defined(CONFIG_USER_ONLY) 5330 GEN_PRIV; 5331 #else 5332 TCGv t0; 5333 5334 CHK_SV; 5335 t0 = tcg_temp_new(); 5336 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5337 gen_helper_load_sr(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5338 tcg_temp_free(t0); 5339 #endif /* defined(CONFIG_USER_ONLY) */ 5340 } 5341 5342 /* mtsr */ 5343 static void gen_mtsr_64b(DisasContext *ctx) 5344 { 5345 #if defined(CONFIG_USER_ONLY) 5346 GEN_PRIV; 5347 #else 5348 TCGv t0; 5349 5350 CHK_SV; 5351 t0 = tcg_const_tl(SR(ctx->opcode)); 5352 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5353 tcg_temp_free(t0); 5354 #endif /* defined(CONFIG_USER_ONLY) */ 5355 } 5356 5357 /* mtsrin */ 5358 static void gen_mtsrin_64b(DisasContext *ctx) 5359 { 5360 #if defined(CONFIG_USER_ONLY) 5361 GEN_PRIV; 5362 #else 5363 TCGv t0; 5364 5365 CHK_SV; 5366 t0 = tcg_temp_new(); 5367 tcg_gen_extract_tl(t0, cpu_gpr[rB(ctx->opcode)], 28, 4); 5368 gen_helper_store_sr(cpu_env, t0, cpu_gpr[rS(ctx->opcode)]); 5369 tcg_temp_free(t0); 5370 #endif /* defined(CONFIG_USER_ONLY) */ 5371 } 5372 5373 /* slbmte */ 5374 static void gen_slbmte(DisasContext *ctx) 5375 { 5376 #if defined(CONFIG_USER_ONLY) 5377 GEN_PRIV; 5378 #else 5379 CHK_SV; 5380 5381 gen_helper_store_slb(cpu_env, cpu_gpr[rB(ctx->opcode)], 5382 cpu_gpr[rS(ctx->opcode)]); 5383 #endif /* defined(CONFIG_USER_ONLY) */ 5384 } 5385 5386 static void gen_slbmfee(DisasContext *ctx) 5387 { 5388 #if defined(CONFIG_USER_ONLY) 5389 GEN_PRIV; 5390 #else 5391 CHK_SV; 5392 5393 gen_helper_load_slb_esid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5394 cpu_gpr[rB(ctx->opcode)]); 5395 #endif /* defined(CONFIG_USER_ONLY) */ 5396 } 5397 5398 static void gen_slbmfev(DisasContext *ctx) 5399 { 5400 #if defined(CONFIG_USER_ONLY) 5401 GEN_PRIV; 5402 #else 5403 CHK_SV; 5404 5405 gen_helper_load_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5406 cpu_gpr[rB(ctx->opcode)]); 5407 #endif /* defined(CONFIG_USER_ONLY) */ 5408 } 5409 5410 static void gen_slbfee_(DisasContext *ctx) 5411 { 5412 #if defined(CONFIG_USER_ONLY) 5413 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5414 #else 5415 TCGLabel *l1, *l2; 5416 5417 if (unlikely(ctx->pr)) { 5418 gen_inval_exception(ctx, POWERPC_EXCP_PRIV_REG); 5419 return; 5420 } 5421 gen_helper_find_slb_vsid(cpu_gpr[rS(ctx->opcode)], cpu_env, 5422 cpu_gpr[rB(ctx->opcode)]); 5423 l1 = gen_new_label(); 5424 l2 = gen_new_label(); 5425 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 5426 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rS(ctx->opcode)], -1, l1); 5427 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], CRF_EQ); 5428 tcg_gen_br(l2); 5429 gen_set_label(l1); 5430 tcg_gen_movi_tl(cpu_gpr[rS(ctx->opcode)], 0); 5431 gen_set_label(l2); 5432 #endif 5433 } 5434 #endif /* defined(TARGET_PPC64) */ 5435 5436 /*** Lookaside buffer management ***/ 5437 /* Optional & supervisor only: */ 5438 5439 /* tlbia */ 5440 static void gen_tlbia(DisasContext *ctx) 5441 { 5442 #if defined(CONFIG_USER_ONLY) 5443 GEN_PRIV; 5444 #else 5445 CHK_HV; 5446 5447 gen_helper_tlbia(cpu_env); 5448 #endif /* defined(CONFIG_USER_ONLY) */ 5449 } 5450 5451 /* tlbiel */ 5452 static void gen_tlbiel(DisasContext *ctx) 5453 { 5454 #if defined(CONFIG_USER_ONLY) 5455 GEN_PRIV; 5456 #else 5457 bool psr = (ctx->opcode >> 17) & 0x1; 5458 5459 if (ctx->pr || (!ctx->hv && !psr && ctx->hr)) { 5460 /* 5461 * tlbiel is privileged except when PSR=0 and HR=1, making it 5462 * hypervisor privileged. 5463 */ 5464 GEN_PRIV; 5465 } 5466 5467 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5468 #endif /* defined(CONFIG_USER_ONLY) */ 5469 } 5470 5471 /* tlbie */ 5472 static void gen_tlbie(DisasContext *ctx) 5473 { 5474 #if defined(CONFIG_USER_ONLY) 5475 GEN_PRIV; 5476 #else 5477 bool psr = (ctx->opcode >> 17) & 0x1; 5478 TCGv_i32 t1; 5479 5480 if (ctx->pr) { 5481 /* tlbie is privileged... */ 5482 GEN_PRIV; 5483 } else if (!ctx->hv) { 5484 if (!ctx->gtse || (!psr && ctx->hr)) { 5485 /* 5486 * ... except when GTSE=0 or when PSR=0 and HR=1, making it 5487 * hypervisor privileged. 5488 */ 5489 GEN_PRIV; 5490 } 5491 } 5492 5493 if (NARROW_MODE(ctx)) { 5494 TCGv t0 = tcg_temp_new(); 5495 tcg_gen_ext32u_tl(t0, cpu_gpr[rB(ctx->opcode)]); 5496 gen_helper_tlbie(cpu_env, t0); 5497 tcg_temp_free(t0); 5498 } else { 5499 gen_helper_tlbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5500 } 5501 t1 = tcg_temp_new_i32(); 5502 tcg_gen_ld_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5503 tcg_gen_ori_i32(t1, t1, TLB_NEED_GLOBAL_FLUSH); 5504 tcg_gen_st_i32(t1, cpu_env, offsetof(CPUPPCState, tlb_need_flush)); 5505 tcg_temp_free_i32(t1); 5506 #endif /* defined(CONFIG_USER_ONLY) */ 5507 } 5508 5509 /* tlbsync */ 5510 static void gen_tlbsync(DisasContext *ctx) 5511 { 5512 #if defined(CONFIG_USER_ONLY) 5513 GEN_PRIV; 5514 #else 5515 5516 if (ctx->gtse) { 5517 CHK_SV; /* If gtse is set then tlbsync is supervisor privileged */ 5518 } else { 5519 CHK_HV; /* Else hypervisor privileged */ 5520 } 5521 5522 /* BookS does both ptesync and tlbsync make tlbsync a nop for server */ 5523 if (ctx->insns_flags & PPC_BOOKE) { 5524 gen_check_tlb_flush(ctx, true); 5525 } 5526 #endif /* defined(CONFIG_USER_ONLY) */ 5527 } 5528 5529 #if defined(TARGET_PPC64) 5530 /* slbia */ 5531 static void gen_slbia(DisasContext *ctx) 5532 { 5533 #if defined(CONFIG_USER_ONLY) 5534 GEN_PRIV; 5535 #else 5536 uint32_t ih = (ctx->opcode >> 21) & 0x7; 5537 TCGv_i32 t0 = tcg_const_i32(ih); 5538 5539 CHK_SV; 5540 5541 gen_helper_slbia(cpu_env, t0); 5542 tcg_temp_free_i32(t0); 5543 #endif /* defined(CONFIG_USER_ONLY) */ 5544 } 5545 5546 /* slbie */ 5547 static void gen_slbie(DisasContext *ctx) 5548 { 5549 #if defined(CONFIG_USER_ONLY) 5550 GEN_PRIV; 5551 #else 5552 CHK_SV; 5553 5554 gen_helper_slbie(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5555 #endif /* defined(CONFIG_USER_ONLY) */ 5556 } 5557 5558 /* slbieg */ 5559 static void gen_slbieg(DisasContext *ctx) 5560 { 5561 #if defined(CONFIG_USER_ONLY) 5562 GEN_PRIV; 5563 #else 5564 CHK_SV; 5565 5566 gen_helper_slbieg(cpu_env, cpu_gpr[rB(ctx->opcode)]); 5567 #endif /* defined(CONFIG_USER_ONLY) */ 5568 } 5569 5570 /* slbsync */ 5571 static void gen_slbsync(DisasContext *ctx) 5572 { 5573 #if defined(CONFIG_USER_ONLY) 5574 GEN_PRIV; 5575 #else 5576 CHK_SV; 5577 gen_check_tlb_flush(ctx, true); 5578 #endif /* defined(CONFIG_USER_ONLY) */ 5579 } 5580 5581 #endif /* defined(TARGET_PPC64) */ 5582 5583 /*** External control ***/ 5584 /* Optional: */ 5585 5586 /* eciwx */ 5587 static void gen_eciwx(DisasContext *ctx) 5588 { 5589 TCGv t0; 5590 /* Should check EAR[E] ! */ 5591 gen_set_access_type(ctx, ACCESS_EXT); 5592 t0 = tcg_temp_new(); 5593 gen_addr_reg_index(ctx, t0); 5594 tcg_gen_qemu_ld_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5595 DEF_MEMOP(MO_UL | MO_ALIGN)); 5596 tcg_temp_free(t0); 5597 } 5598 5599 /* ecowx */ 5600 static void gen_ecowx(DisasContext *ctx) 5601 { 5602 TCGv t0; 5603 /* Should check EAR[E] ! */ 5604 gen_set_access_type(ctx, ACCESS_EXT); 5605 t0 = tcg_temp_new(); 5606 gen_addr_reg_index(ctx, t0); 5607 tcg_gen_qemu_st_tl(cpu_gpr[rD(ctx->opcode)], t0, ctx->mem_idx, 5608 DEF_MEMOP(MO_UL | MO_ALIGN)); 5609 tcg_temp_free(t0); 5610 } 5611 5612 /* PowerPC 601 specific instructions */ 5613 5614 /* abs - abs. */ 5615 static void gen_abs(DisasContext *ctx) 5616 { 5617 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5618 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5619 5620 tcg_gen_abs_tl(d, a); 5621 if (unlikely(Rc(ctx->opcode) != 0)) { 5622 gen_set_Rc0(ctx, d); 5623 } 5624 } 5625 5626 /* abso - abso. */ 5627 static void gen_abso(DisasContext *ctx) 5628 { 5629 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5630 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5631 5632 tcg_gen_setcondi_tl(TCG_COND_EQ, cpu_ov, a, 0x80000000); 5633 tcg_gen_abs_tl(d, a); 5634 tcg_gen_or_tl(cpu_so, cpu_so, cpu_ov); 5635 if (unlikely(Rc(ctx->opcode) != 0)) { 5636 gen_set_Rc0(ctx, d); 5637 } 5638 } 5639 5640 /* clcs */ 5641 static void gen_clcs(DisasContext *ctx) 5642 { 5643 TCGv_i32 t0 = tcg_const_i32(rA(ctx->opcode)); 5644 gen_helper_clcs(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 5645 tcg_temp_free_i32(t0); 5646 /* Rc=1 sets CR0 to an undefined state */ 5647 } 5648 5649 /* div - div. */ 5650 static void gen_div(DisasContext *ctx) 5651 { 5652 gen_helper_div(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5653 cpu_gpr[rB(ctx->opcode)]); 5654 if (unlikely(Rc(ctx->opcode) != 0)) { 5655 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5656 } 5657 } 5658 5659 /* divo - divo. */ 5660 static void gen_divo(DisasContext *ctx) 5661 { 5662 gen_helper_divo(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5663 cpu_gpr[rB(ctx->opcode)]); 5664 if (unlikely(Rc(ctx->opcode) != 0)) { 5665 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5666 } 5667 } 5668 5669 /* divs - divs. */ 5670 static void gen_divs(DisasContext *ctx) 5671 { 5672 gen_helper_divs(cpu_gpr[rD(ctx->opcode)], cpu_env, cpu_gpr[rA(ctx->opcode)], 5673 cpu_gpr[rB(ctx->opcode)]); 5674 if (unlikely(Rc(ctx->opcode) != 0)) { 5675 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5676 } 5677 } 5678 5679 /* divso - divso. */ 5680 static void gen_divso(DisasContext *ctx) 5681 { 5682 gen_helper_divso(cpu_gpr[rD(ctx->opcode)], cpu_env, 5683 cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5684 if (unlikely(Rc(ctx->opcode) != 0)) { 5685 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5686 } 5687 } 5688 5689 /* doz - doz. */ 5690 static void gen_doz(DisasContext *ctx) 5691 { 5692 TCGLabel *l1 = gen_new_label(); 5693 TCGLabel *l2 = gen_new_label(); 5694 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5695 cpu_gpr[rA(ctx->opcode)], l1); 5696 tcg_gen_sub_tl(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], 5697 cpu_gpr[rA(ctx->opcode)]); 5698 tcg_gen_br(l2); 5699 gen_set_label(l1); 5700 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5701 gen_set_label(l2); 5702 if (unlikely(Rc(ctx->opcode) != 0)) { 5703 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5704 } 5705 } 5706 5707 /* dozo - dozo. */ 5708 static void gen_dozo(DisasContext *ctx) 5709 { 5710 TCGLabel *l1 = gen_new_label(); 5711 TCGLabel *l2 = gen_new_label(); 5712 TCGv t0 = tcg_temp_new(); 5713 TCGv t1 = tcg_temp_new(); 5714 TCGv t2 = tcg_temp_new(); 5715 /* Start with XER OV disabled, the most likely case */ 5716 tcg_gen_movi_tl(cpu_ov, 0); 5717 tcg_gen_brcond_tl(TCG_COND_GE, cpu_gpr[rB(ctx->opcode)], 5718 cpu_gpr[rA(ctx->opcode)], l1); 5719 tcg_gen_sub_tl(t0, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5720 tcg_gen_xor_tl(t1, cpu_gpr[rB(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5721 tcg_gen_xor_tl(t2, cpu_gpr[rA(ctx->opcode)], t0); 5722 tcg_gen_andc_tl(t1, t1, t2); 5723 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], t0); 5724 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 5725 tcg_gen_movi_tl(cpu_ov, 1); 5726 tcg_gen_movi_tl(cpu_so, 1); 5727 tcg_gen_br(l2); 5728 gen_set_label(l1); 5729 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5730 gen_set_label(l2); 5731 tcg_temp_free(t0); 5732 tcg_temp_free(t1); 5733 tcg_temp_free(t2); 5734 if (unlikely(Rc(ctx->opcode) != 0)) { 5735 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5736 } 5737 } 5738 5739 /* dozi */ 5740 static void gen_dozi(DisasContext *ctx) 5741 { 5742 target_long simm = SIMM(ctx->opcode); 5743 TCGLabel *l1 = gen_new_label(); 5744 TCGLabel *l2 = gen_new_label(); 5745 tcg_gen_brcondi_tl(TCG_COND_LT, cpu_gpr[rA(ctx->opcode)], simm, l1); 5746 tcg_gen_subfi_tl(cpu_gpr[rD(ctx->opcode)], simm, cpu_gpr[rA(ctx->opcode)]); 5747 tcg_gen_br(l2); 5748 gen_set_label(l1); 5749 tcg_gen_movi_tl(cpu_gpr[rD(ctx->opcode)], 0); 5750 gen_set_label(l2); 5751 if (unlikely(Rc(ctx->opcode) != 0)) { 5752 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5753 } 5754 } 5755 5756 /* lscbx - lscbx. */ 5757 static void gen_lscbx(DisasContext *ctx) 5758 { 5759 TCGv t0 = tcg_temp_new(); 5760 TCGv_i32 t1 = tcg_const_i32(rD(ctx->opcode)); 5761 TCGv_i32 t2 = tcg_const_i32(rA(ctx->opcode)); 5762 TCGv_i32 t3 = tcg_const_i32(rB(ctx->opcode)); 5763 5764 gen_addr_reg_index(ctx, t0); 5765 gen_helper_lscbx(t0, cpu_env, t0, t1, t2, t3); 5766 tcg_temp_free_i32(t1); 5767 tcg_temp_free_i32(t2); 5768 tcg_temp_free_i32(t3); 5769 tcg_gen_andi_tl(cpu_xer, cpu_xer, ~0x7F); 5770 tcg_gen_or_tl(cpu_xer, cpu_xer, t0); 5771 if (unlikely(Rc(ctx->opcode) != 0)) { 5772 gen_set_Rc0(ctx, t0); 5773 } 5774 tcg_temp_free(t0); 5775 } 5776 5777 /* maskg - maskg. */ 5778 static void gen_maskg(DisasContext *ctx) 5779 { 5780 TCGLabel *l1 = gen_new_label(); 5781 TCGv t0 = tcg_temp_new(); 5782 TCGv t1 = tcg_temp_new(); 5783 TCGv t2 = tcg_temp_new(); 5784 TCGv t3 = tcg_temp_new(); 5785 tcg_gen_movi_tl(t3, 0xFFFFFFFF); 5786 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5787 tcg_gen_andi_tl(t1, cpu_gpr[rS(ctx->opcode)], 0x1F); 5788 tcg_gen_addi_tl(t2, t0, 1); 5789 tcg_gen_shr_tl(t2, t3, t2); 5790 tcg_gen_shr_tl(t3, t3, t1); 5791 tcg_gen_xor_tl(cpu_gpr[rA(ctx->opcode)], t2, t3); 5792 tcg_gen_brcond_tl(TCG_COND_GE, t0, t1, l1); 5793 tcg_gen_neg_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 5794 gen_set_label(l1); 5795 tcg_temp_free(t0); 5796 tcg_temp_free(t1); 5797 tcg_temp_free(t2); 5798 tcg_temp_free(t3); 5799 if (unlikely(Rc(ctx->opcode) != 0)) { 5800 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5801 } 5802 } 5803 5804 /* maskir - maskir. */ 5805 static void gen_maskir(DisasContext *ctx) 5806 { 5807 TCGv t0 = tcg_temp_new(); 5808 TCGv t1 = tcg_temp_new(); 5809 tcg_gen_and_tl(t0, cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5810 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 5811 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5812 tcg_temp_free(t0); 5813 tcg_temp_free(t1); 5814 if (unlikely(Rc(ctx->opcode) != 0)) { 5815 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5816 } 5817 } 5818 5819 /* mul - mul. */ 5820 static void gen_mul(DisasContext *ctx) 5821 { 5822 TCGv_i64 t0 = tcg_temp_new_i64(); 5823 TCGv_i64 t1 = tcg_temp_new_i64(); 5824 TCGv t2 = tcg_temp_new(); 5825 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5826 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5827 tcg_gen_mul_i64(t0, t0, t1); 5828 tcg_gen_trunc_i64_tl(t2, t0); 5829 gen_store_spr(SPR_MQ, t2); 5830 tcg_gen_shri_i64(t1, t0, 32); 5831 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5832 tcg_temp_free_i64(t0); 5833 tcg_temp_free_i64(t1); 5834 tcg_temp_free(t2); 5835 if (unlikely(Rc(ctx->opcode) != 0)) { 5836 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5837 } 5838 } 5839 5840 /* mulo - mulo. */ 5841 static void gen_mulo(DisasContext *ctx) 5842 { 5843 TCGLabel *l1 = gen_new_label(); 5844 TCGv_i64 t0 = tcg_temp_new_i64(); 5845 TCGv_i64 t1 = tcg_temp_new_i64(); 5846 TCGv t2 = tcg_temp_new(); 5847 /* Start with XER OV disabled, the most likely case */ 5848 tcg_gen_movi_tl(cpu_ov, 0); 5849 tcg_gen_extu_tl_i64(t0, cpu_gpr[rA(ctx->opcode)]); 5850 tcg_gen_extu_tl_i64(t1, cpu_gpr[rB(ctx->opcode)]); 5851 tcg_gen_mul_i64(t0, t0, t1); 5852 tcg_gen_trunc_i64_tl(t2, t0); 5853 gen_store_spr(SPR_MQ, t2); 5854 tcg_gen_shri_i64(t1, t0, 32); 5855 tcg_gen_trunc_i64_tl(cpu_gpr[rD(ctx->opcode)], t1); 5856 tcg_gen_ext32s_i64(t1, t0); 5857 tcg_gen_brcond_i64(TCG_COND_EQ, t0, t1, l1); 5858 tcg_gen_movi_tl(cpu_ov, 1); 5859 tcg_gen_movi_tl(cpu_so, 1); 5860 gen_set_label(l1); 5861 tcg_temp_free_i64(t0); 5862 tcg_temp_free_i64(t1); 5863 tcg_temp_free(t2); 5864 if (unlikely(Rc(ctx->opcode) != 0)) { 5865 gen_set_Rc0(ctx, cpu_gpr[rD(ctx->opcode)]); 5866 } 5867 } 5868 5869 /* nabs - nabs. */ 5870 static void gen_nabs(DisasContext *ctx) 5871 { 5872 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5873 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5874 5875 tcg_gen_abs_tl(d, a); 5876 tcg_gen_neg_tl(d, d); 5877 if (unlikely(Rc(ctx->opcode) != 0)) { 5878 gen_set_Rc0(ctx, d); 5879 } 5880 } 5881 5882 /* nabso - nabso. */ 5883 static void gen_nabso(DisasContext *ctx) 5884 { 5885 TCGv d = cpu_gpr[rD(ctx->opcode)]; 5886 TCGv a = cpu_gpr[rA(ctx->opcode)]; 5887 5888 tcg_gen_abs_tl(d, a); 5889 tcg_gen_neg_tl(d, d); 5890 /* nabs never overflows */ 5891 tcg_gen_movi_tl(cpu_ov, 0); 5892 if (unlikely(Rc(ctx->opcode) != 0)) { 5893 gen_set_Rc0(ctx, d); 5894 } 5895 } 5896 5897 /* rlmi - rlmi. */ 5898 static void gen_rlmi(DisasContext *ctx) 5899 { 5900 uint32_t mb = MB(ctx->opcode); 5901 uint32_t me = ME(ctx->opcode); 5902 TCGv t0 = tcg_temp_new(); 5903 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5904 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5905 tcg_gen_andi_tl(t0, t0, MASK(mb, me)); 5906 tcg_gen_andi_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 5907 ~MASK(mb, me)); 5908 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], t0); 5909 tcg_temp_free(t0); 5910 if (unlikely(Rc(ctx->opcode) != 0)) { 5911 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5912 } 5913 } 5914 5915 /* rrib - rrib. */ 5916 static void gen_rrib(DisasContext *ctx) 5917 { 5918 TCGv t0 = tcg_temp_new(); 5919 TCGv t1 = tcg_temp_new(); 5920 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5921 tcg_gen_movi_tl(t1, 0x80000000); 5922 tcg_gen_shr_tl(t1, t1, t0); 5923 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5924 tcg_gen_and_tl(t0, t0, t1); 5925 tcg_gen_andc_tl(t1, cpu_gpr[rA(ctx->opcode)], t1); 5926 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5927 tcg_temp_free(t0); 5928 tcg_temp_free(t1); 5929 if (unlikely(Rc(ctx->opcode) != 0)) { 5930 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5931 } 5932 } 5933 5934 /* sle - sle. */ 5935 static void gen_sle(DisasContext *ctx) 5936 { 5937 TCGv t0 = tcg_temp_new(); 5938 TCGv t1 = tcg_temp_new(); 5939 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 5940 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 5941 tcg_gen_subfi_tl(t1, 32, t1); 5942 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 5943 tcg_gen_or_tl(t1, t0, t1); 5944 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5945 gen_store_spr(SPR_MQ, t1); 5946 tcg_temp_free(t0); 5947 tcg_temp_free(t1); 5948 if (unlikely(Rc(ctx->opcode) != 0)) { 5949 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5950 } 5951 } 5952 5953 /* sleq - sleq. */ 5954 static void gen_sleq(DisasContext *ctx) 5955 { 5956 TCGv t0 = tcg_temp_new(); 5957 TCGv t1 = tcg_temp_new(); 5958 TCGv t2 = tcg_temp_new(); 5959 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 5960 tcg_gen_movi_tl(t2, 0xFFFFFFFF); 5961 tcg_gen_shl_tl(t2, t2, t0); 5962 tcg_gen_rotl_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 5963 gen_load_spr(t1, SPR_MQ); 5964 gen_store_spr(SPR_MQ, t0); 5965 tcg_gen_and_tl(t0, t0, t2); 5966 tcg_gen_andc_tl(t1, t1, t2); 5967 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 5968 tcg_temp_free(t0); 5969 tcg_temp_free(t1); 5970 tcg_temp_free(t2); 5971 if (unlikely(Rc(ctx->opcode) != 0)) { 5972 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5973 } 5974 } 5975 5976 /* sliq - sliq. */ 5977 static void gen_sliq(DisasContext *ctx) 5978 { 5979 int sh = SH(ctx->opcode); 5980 TCGv t0 = tcg_temp_new(); 5981 TCGv t1 = tcg_temp_new(); 5982 tcg_gen_shli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 5983 tcg_gen_shri_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 5984 tcg_gen_or_tl(t1, t0, t1); 5985 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 5986 gen_store_spr(SPR_MQ, t1); 5987 tcg_temp_free(t0); 5988 tcg_temp_free(t1); 5989 if (unlikely(Rc(ctx->opcode) != 0)) { 5990 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 5991 } 5992 } 5993 5994 /* slliq - slliq. */ 5995 static void gen_slliq(DisasContext *ctx) 5996 { 5997 int sh = SH(ctx->opcode); 5998 TCGv t0 = tcg_temp_new(); 5999 TCGv t1 = tcg_temp_new(); 6000 tcg_gen_rotli_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6001 gen_load_spr(t1, SPR_MQ); 6002 gen_store_spr(SPR_MQ, t0); 6003 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU << sh)); 6004 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU << sh)); 6005 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6006 tcg_temp_free(t0); 6007 tcg_temp_free(t1); 6008 if (unlikely(Rc(ctx->opcode) != 0)) { 6009 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6010 } 6011 } 6012 6013 /* sllq - sllq. */ 6014 static void gen_sllq(DisasContext *ctx) 6015 { 6016 TCGLabel *l1 = gen_new_label(); 6017 TCGLabel *l2 = gen_new_label(); 6018 TCGv t0 = tcg_temp_local_new(); 6019 TCGv t1 = tcg_temp_local_new(); 6020 TCGv t2 = tcg_temp_local_new(); 6021 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6022 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6023 tcg_gen_shl_tl(t1, t1, t2); 6024 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6025 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6026 gen_load_spr(t0, SPR_MQ); 6027 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6028 tcg_gen_br(l2); 6029 gen_set_label(l1); 6030 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6031 gen_load_spr(t2, SPR_MQ); 6032 tcg_gen_andc_tl(t1, t2, t1); 6033 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6034 gen_set_label(l2); 6035 tcg_temp_free(t0); 6036 tcg_temp_free(t1); 6037 tcg_temp_free(t2); 6038 if (unlikely(Rc(ctx->opcode) != 0)) { 6039 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6040 } 6041 } 6042 6043 /* slq - slq. */ 6044 static void gen_slq(DisasContext *ctx) 6045 { 6046 TCGLabel *l1 = gen_new_label(); 6047 TCGv t0 = tcg_temp_new(); 6048 TCGv t1 = tcg_temp_new(); 6049 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6050 tcg_gen_shl_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6051 tcg_gen_subfi_tl(t1, 32, t1); 6052 tcg_gen_shr_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6053 tcg_gen_or_tl(t1, t0, t1); 6054 gen_store_spr(SPR_MQ, t1); 6055 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6056 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6057 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6058 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6059 gen_set_label(l1); 6060 tcg_temp_free(t0); 6061 tcg_temp_free(t1); 6062 if (unlikely(Rc(ctx->opcode) != 0)) { 6063 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6064 } 6065 } 6066 6067 /* sraiq - sraiq. */ 6068 static void gen_sraiq(DisasContext *ctx) 6069 { 6070 int sh = SH(ctx->opcode); 6071 TCGLabel *l1 = gen_new_label(); 6072 TCGv t0 = tcg_temp_new(); 6073 TCGv t1 = tcg_temp_new(); 6074 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6075 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6076 tcg_gen_or_tl(t0, t0, t1); 6077 gen_store_spr(SPR_MQ, t0); 6078 tcg_gen_movi_tl(cpu_ca, 0); 6079 tcg_gen_brcondi_tl(TCG_COND_EQ, t1, 0, l1); 6080 tcg_gen_brcondi_tl(TCG_COND_GE, cpu_gpr[rS(ctx->opcode)], 0, l1); 6081 tcg_gen_movi_tl(cpu_ca, 1); 6082 gen_set_label(l1); 6083 tcg_gen_sari_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], sh); 6084 tcg_temp_free(t0); 6085 tcg_temp_free(t1); 6086 if (unlikely(Rc(ctx->opcode) != 0)) { 6087 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6088 } 6089 } 6090 6091 /* sraq - sraq. */ 6092 static void gen_sraq(DisasContext *ctx) 6093 { 6094 TCGLabel *l1 = gen_new_label(); 6095 TCGLabel *l2 = gen_new_label(); 6096 TCGv t0 = tcg_temp_new(); 6097 TCGv t1 = tcg_temp_local_new(); 6098 TCGv t2 = tcg_temp_local_new(); 6099 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6100 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6101 tcg_gen_sar_tl(t1, cpu_gpr[rS(ctx->opcode)], t2); 6102 tcg_gen_subfi_tl(t2, 32, t2); 6103 tcg_gen_shl_tl(t2, cpu_gpr[rS(ctx->opcode)], t2); 6104 tcg_gen_or_tl(t0, t0, t2); 6105 gen_store_spr(SPR_MQ, t0); 6106 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6107 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l1); 6108 tcg_gen_mov_tl(t2, cpu_gpr[rS(ctx->opcode)]); 6109 tcg_gen_sari_tl(t1, cpu_gpr[rS(ctx->opcode)], 31); 6110 gen_set_label(l1); 6111 tcg_temp_free(t0); 6112 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t1); 6113 tcg_gen_movi_tl(cpu_ca, 0); 6114 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l2); 6115 tcg_gen_brcondi_tl(TCG_COND_EQ, t2, 0, l2); 6116 tcg_gen_movi_tl(cpu_ca, 1); 6117 gen_set_label(l2); 6118 tcg_temp_free(t1); 6119 tcg_temp_free(t2); 6120 if (unlikely(Rc(ctx->opcode) != 0)) { 6121 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6122 } 6123 } 6124 6125 /* sre - sre. */ 6126 static void gen_sre(DisasContext *ctx) 6127 { 6128 TCGv t0 = tcg_temp_new(); 6129 TCGv t1 = tcg_temp_new(); 6130 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6131 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6132 tcg_gen_subfi_tl(t1, 32, t1); 6133 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6134 tcg_gen_or_tl(t1, t0, t1); 6135 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6136 gen_store_spr(SPR_MQ, t1); 6137 tcg_temp_free(t0); 6138 tcg_temp_free(t1); 6139 if (unlikely(Rc(ctx->opcode) != 0)) { 6140 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6141 } 6142 } 6143 6144 /* srea - srea. */ 6145 static void gen_srea(DisasContext *ctx) 6146 { 6147 TCGv t0 = tcg_temp_new(); 6148 TCGv t1 = tcg_temp_new(); 6149 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6150 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6151 gen_store_spr(SPR_MQ, t0); 6152 tcg_gen_sar_tl(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)], t1); 6153 tcg_temp_free(t0); 6154 tcg_temp_free(t1); 6155 if (unlikely(Rc(ctx->opcode) != 0)) { 6156 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6157 } 6158 } 6159 6160 /* sreq */ 6161 static void gen_sreq(DisasContext *ctx) 6162 { 6163 TCGv t0 = tcg_temp_new(); 6164 TCGv t1 = tcg_temp_new(); 6165 TCGv t2 = tcg_temp_new(); 6166 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x1F); 6167 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6168 tcg_gen_shr_tl(t1, t1, t0); 6169 tcg_gen_rotr_tl(t0, cpu_gpr[rS(ctx->opcode)], t0); 6170 gen_load_spr(t2, SPR_MQ); 6171 gen_store_spr(SPR_MQ, t0); 6172 tcg_gen_and_tl(t0, t0, t1); 6173 tcg_gen_andc_tl(t2, t2, t1); 6174 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6175 tcg_temp_free(t0); 6176 tcg_temp_free(t1); 6177 tcg_temp_free(t2); 6178 if (unlikely(Rc(ctx->opcode) != 0)) { 6179 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6180 } 6181 } 6182 6183 /* sriq */ 6184 static void gen_sriq(DisasContext *ctx) 6185 { 6186 int sh = SH(ctx->opcode); 6187 TCGv t0 = tcg_temp_new(); 6188 TCGv t1 = tcg_temp_new(); 6189 tcg_gen_shri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6190 tcg_gen_shli_tl(t1, cpu_gpr[rS(ctx->opcode)], 32 - sh); 6191 tcg_gen_or_tl(t1, t0, t1); 6192 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6193 gen_store_spr(SPR_MQ, t1); 6194 tcg_temp_free(t0); 6195 tcg_temp_free(t1); 6196 if (unlikely(Rc(ctx->opcode) != 0)) { 6197 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6198 } 6199 } 6200 6201 /* srliq */ 6202 static void gen_srliq(DisasContext *ctx) 6203 { 6204 int sh = SH(ctx->opcode); 6205 TCGv t0 = tcg_temp_new(); 6206 TCGv t1 = tcg_temp_new(); 6207 tcg_gen_rotri_tl(t0, cpu_gpr[rS(ctx->opcode)], sh); 6208 gen_load_spr(t1, SPR_MQ); 6209 gen_store_spr(SPR_MQ, t0); 6210 tcg_gen_andi_tl(t0, t0, (0xFFFFFFFFU >> sh)); 6211 tcg_gen_andi_tl(t1, t1, ~(0xFFFFFFFFU >> sh)); 6212 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6213 tcg_temp_free(t0); 6214 tcg_temp_free(t1); 6215 if (unlikely(Rc(ctx->opcode) != 0)) { 6216 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6217 } 6218 } 6219 6220 /* srlq */ 6221 static void gen_srlq(DisasContext *ctx) 6222 { 6223 TCGLabel *l1 = gen_new_label(); 6224 TCGLabel *l2 = gen_new_label(); 6225 TCGv t0 = tcg_temp_local_new(); 6226 TCGv t1 = tcg_temp_local_new(); 6227 TCGv t2 = tcg_temp_local_new(); 6228 tcg_gen_andi_tl(t2, cpu_gpr[rB(ctx->opcode)], 0x1F); 6229 tcg_gen_movi_tl(t1, 0xFFFFFFFF); 6230 tcg_gen_shr_tl(t2, t1, t2); 6231 tcg_gen_andi_tl(t0, cpu_gpr[rB(ctx->opcode)], 0x20); 6232 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6233 gen_load_spr(t0, SPR_MQ); 6234 tcg_gen_and_tl(cpu_gpr[rA(ctx->opcode)], t0, t2); 6235 tcg_gen_br(l2); 6236 gen_set_label(l1); 6237 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t2); 6238 tcg_gen_and_tl(t0, t0, t2); 6239 gen_load_spr(t1, SPR_MQ); 6240 tcg_gen_andc_tl(t1, t1, t2); 6241 tcg_gen_or_tl(cpu_gpr[rA(ctx->opcode)], t0, t1); 6242 gen_set_label(l2); 6243 tcg_temp_free(t0); 6244 tcg_temp_free(t1); 6245 tcg_temp_free(t2); 6246 if (unlikely(Rc(ctx->opcode) != 0)) { 6247 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6248 } 6249 } 6250 6251 /* srq */ 6252 static void gen_srq(DisasContext *ctx) 6253 { 6254 TCGLabel *l1 = gen_new_label(); 6255 TCGv t0 = tcg_temp_new(); 6256 TCGv t1 = tcg_temp_new(); 6257 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x1F); 6258 tcg_gen_shr_tl(t0, cpu_gpr[rS(ctx->opcode)], t1); 6259 tcg_gen_subfi_tl(t1, 32, t1); 6260 tcg_gen_shl_tl(t1, cpu_gpr[rS(ctx->opcode)], t1); 6261 tcg_gen_or_tl(t1, t0, t1); 6262 gen_store_spr(SPR_MQ, t1); 6263 tcg_gen_andi_tl(t1, cpu_gpr[rB(ctx->opcode)], 0x20); 6264 tcg_gen_mov_tl(cpu_gpr[rA(ctx->opcode)], t0); 6265 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); 6266 tcg_gen_movi_tl(cpu_gpr[rA(ctx->opcode)], 0); 6267 gen_set_label(l1); 6268 tcg_temp_free(t0); 6269 tcg_temp_free(t1); 6270 if (unlikely(Rc(ctx->opcode) != 0)) { 6271 gen_set_Rc0(ctx, cpu_gpr[rA(ctx->opcode)]); 6272 } 6273 } 6274 6275 /* PowerPC 602 specific instructions */ 6276 6277 /* dsa */ 6278 static void gen_dsa(DisasContext *ctx) 6279 { 6280 /* XXX: TODO */ 6281 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6282 } 6283 6284 /* esa */ 6285 static void gen_esa(DisasContext *ctx) 6286 { 6287 /* XXX: TODO */ 6288 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6289 } 6290 6291 /* mfrom */ 6292 static void gen_mfrom(DisasContext *ctx) 6293 { 6294 #if defined(CONFIG_USER_ONLY) 6295 GEN_PRIV; 6296 #else 6297 CHK_SV; 6298 gen_helper_602_mfrom(cpu_gpr[rD(ctx->opcode)], cpu_gpr[rA(ctx->opcode)]); 6299 #endif /* defined(CONFIG_USER_ONLY) */ 6300 } 6301 6302 /* 602 - 603 - G2 TLB management */ 6303 6304 /* tlbld */ 6305 static void gen_tlbld_6xx(DisasContext *ctx) 6306 { 6307 #if defined(CONFIG_USER_ONLY) 6308 GEN_PRIV; 6309 #else 6310 CHK_SV; 6311 gen_helper_6xx_tlbd(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6312 #endif /* defined(CONFIG_USER_ONLY) */ 6313 } 6314 6315 /* tlbli */ 6316 static void gen_tlbli_6xx(DisasContext *ctx) 6317 { 6318 #if defined(CONFIG_USER_ONLY) 6319 GEN_PRIV; 6320 #else 6321 CHK_SV; 6322 gen_helper_6xx_tlbi(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6323 #endif /* defined(CONFIG_USER_ONLY) */ 6324 } 6325 6326 /* POWER instructions not in PowerPC 601 */ 6327 6328 /* clf */ 6329 static void gen_clf(DisasContext *ctx) 6330 { 6331 /* Cache line flush: implemented as no-op */ 6332 } 6333 6334 /* cli */ 6335 static void gen_cli(DisasContext *ctx) 6336 { 6337 #if defined(CONFIG_USER_ONLY) 6338 GEN_PRIV; 6339 #else 6340 /* Cache line invalidate: privileged and treated as no-op */ 6341 CHK_SV; 6342 #endif /* defined(CONFIG_USER_ONLY) */ 6343 } 6344 6345 /* dclst */ 6346 static void gen_dclst(DisasContext *ctx) 6347 { 6348 /* Data cache line store: treated as no-op */ 6349 } 6350 6351 static void gen_mfsri(DisasContext *ctx) 6352 { 6353 #if defined(CONFIG_USER_ONLY) 6354 GEN_PRIV; 6355 #else 6356 int ra = rA(ctx->opcode); 6357 int rd = rD(ctx->opcode); 6358 TCGv t0; 6359 6360 CHK_SV; 6361 t0 = tcg_temp_new(); 6362 gen_addr_reg_index(ctx, t0); 6363 tcg_gen_extract_tl(t0, t0, 28, 4); 6364 gen_helper_load_sr(cpu_gpr[rd], cpu_env, t0); 6365 tcg_temp_free(t0); 6366 if (ra != 0 && ra != rd) { 6367 tcg_gen_mov_tl(cpu_gpr[ra], cpu_gpr[rd]); 6368 } 6369 #endif /* defined(CONFIG_USER_ONLY) */ 6370 } 6371 6372 static void gen_rac(DisasContext *ctx) 6373 { 6374 #if defined(CONFIG_USER_ONLY) 6375 GEN_PRIV; 6376 #else 6377 TCGv t0; 6378 6379 CHK_SV; 6380 t0 = tcg_temp_new(); 6381 gen_addr_reg_index(ctx, t0); 6382 gen_helper_rac(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6383 tcg_temp_free(t0); 6384 #endif /* defined(CONFIG_USER_ONLY) */ 6385 } 6386 6387 static void gen_rfsvc(DisasContext *ctx) 6388 { 6389 #if defined(CONFIG_USER_ONLY) 6390 GEN_PRIV; 6391 #else 6392 CHK_SV; 6393 6394 gen_helper_rfsvc(cpu_env); 6395 ctx->base.is_jmp = DISAS_EXIT; 6396 #endif /* defined(CONFIG_USER_ONLY) */ 6397 } 6398 6399 /* svc is not implemented for now */ 6400 6401 /* BookE specific instructions */ 6402 6403 /* XXX: not implemented on 440 ? */ 6404 static void gen_mfapidi(DisasContext *ctx) 6405 { 6406 /* XXX: TODO */ 6407 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6408 } 6409 6410 /* XXX: not implemented on 440 ? */ 6411 static void gen_tlbiva(DisasContext *ctx) 6412 { 6413 #if defined(CONFIG_USER_ONLY) 6414 GEN_PRIV; 6415 #else 6416 TCGv t0; 6417 6418 CHK_SV; 6419 t0 = tcg_temp_new(); 6420 gen_addr_reg_index(ctx, t0); 6421 gen_helper_tlbiva(cpu_env, cpu_gpr[rB(ctx->opcode)]); 6422 tcg_temp_free(t0); 6423 #endif /* defined(CONFIG_USER_ONLY) */ 6424 } 6425 6426 /* All 405 MAC instructions are translated here */ 6427 static inline void gen_405_mulladd_insn(DisasContext *ctx, int opc2, int opc3, 6428 int ra, int rb, int rt, int Rc) 6429 { 6430 TCGv t0, t1; 6431 6432 t0 = tcg_temp_local_new(); 6433 t1 = tcg_temp_local_new(); 6434 6435 switch (opc3 & 0x0D) { 6436 case 0x05: 6437 /* macchw - macchw. - macchwo - macchwo. */ 6438 /* macchws - macchws. - macchwso - macchwso. */ 6439 /* nmacchw - nmacchw. - nmacchwo - nmacchwo. */ 6440 /* nmacchws - nmacchws. - nmacchwso - nmacchwso. */ 6441 /* mulchw - mulchw. */ 6442 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6443 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6444 tcg_gen_ext16s_tl(t1, t1); 6445 break; 6446 case 0x04: 6447 /* macchwu - macchwu. - macchwuo - macchwuo. */ 6448 /* macchwsu - macchwsu. - macchwsuo - macchwsuo. */ 6449 /* mulchwu - mulchwu. */ 6450 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6451 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6452 tcg_gen_ext16u_tl(t1, t1); 6453 break; 6454 case 0x01: 6455 /* machhw - machhw. - machhwo - machhwo. */ 6456 /* machhws - machhws. - machhwso - machhwso. */ 6457 /* nmachhw - nmachhw. - nmachhwo - nmachhwo. */ 6458 /* nmachhws - nmachhws. - nmachhwso - nmachhwso. */ 6459 /* mulhhw - mulhhw. */ 6460 tcg_gen_sari_tl(t0, cpu_gpr[ra], 16); 6461 tcg_gen_ext16s_tl(t0, t0); 6462 tcg_gen_sari_tl(t1, cpu_gpr[rb], 16); 6463 tcg_gen_ext16s_tl(t1, t1); 6464 break; 6465 case 0x00: 6466 /* machhwu - machhwu. - machhwuo - machhwuo. */ 6467 /* machhwsu - machhwsu. - machhwsuo - machhwsuo. */ 6468 /* mulhhwu - mulhhwu. */ 6469 tcg_gen_shri_tl(t0, cpu_gpr[ra], 16); 6470 tcg_gen_ext16u_tl(t0, t0); 6471 tcg_gen_shri_tl(t1, cpu_gpr[rb], 16); 6472 tcg_gen_ext16u_tl(t1, t1); 6473 break; 6474 case 0x0D: 6475 /* maclhw - maclhw. - maclhwo - maclhwo. */ 6476 /* maclhws - maclhws. - maclhwso - maclhwso. */ 6477 /* nmaclhw - nmaclhw. - nmaclhwo - nmaclhwo. */ 6478 /* nmaclhws - nmaclhws. - nmaclhwso - nmaclhwso. */ 6479 /* mullhw - mullhw. */ 6480 tcg_gen_ext16s_tl(t0, cpu_gpr[ra]); 6481 tcg_gen_ext16s_tl(t1, cpu_gpr[rb]); 6482 break; 6483 case 0x0C: 6484 /* maclhwu - maclhwu. - maclhwuo - maclhwuo. */ 6485 /* maclhwsu - maclhwsu. - maclhwsuo - maclhwsuo. */ 6486 /* mullhwu - mullhwu. */ 6487 tcg_gen_ext16u_tl(t0, cpu_gpr[ra]); 6488 tcg_gen_ext16u_tl(t1, cpu_gpr[rb]); 6489 break; 6490 } 6491 if (opc2 & 0x04) { 6492 /* (n)multiply-and-accumulate (0x0C / 0x0E) */ 6493 tcg_gen_mul_tl(t1, t0, t1); 6494 if (opc2 & 0x02) { 6495 /* nmultiply-and-accumulate (0x0E) */ 6496 tcg_gen_sub_tl(t0, cpu_gpr[rt], t1); 6497 } else { 6498 /* multiply-and-accumulate (0x0C) */ 6499 tcg_gen_add_tl(t0, cpu_gpr[rt], t1); 6500 } 6501 6502 if (opc3 & 0x12) { 6503 /* Check overflow and/or saturate */ 6504 TCGLabel *l1 = gen_new_label(); 6505 6506 if (opc3 & 0x10) { 6507 /* Start with XER OV disabled, the most likely case */ 6508 tcg_gen_movi_tl(cpu_ov, 0); 6509 } 6510 if (opc3 & 0x01) { 6511 /* Signed */ 6512 tcg_gen_xor_tl(t1, cpu_gpr[rt], t1); 6513 tcg_gen_brcondi_tl(TCG_COND_GE, t1, 0, l1); 6514 tcg_gen_xor_tl(t1, cpu_gpr[rt], t0); 6515 tcg_gen_brcondi_tl(TCG_COND_LT, t1, 0, l1); 6516 if (opc3 & 0x02) { 6517 /* Saturate */ 6518 tcg_gen_sari_tl(t0, cpu_gpr[rt], 31); 6519 tcg_gen_xori_tl(t0, t0, 0x7fffffff); 6520 } 6521 } else { 6522 /* Unsigned */ 6523 tcg_gen_brcond_tl(TCG_COND_GEU, t0, t1, l1); 6524 if (opc3 & 0x02) { 6525 /* Saturate */ 6526 tcg_gen_movi_tl(t0, UINT32_MAX); 6527 } 6528 } 6529 if (opc3 & 0x10) { 6530 /* Check overflow */ 6531 tcg_gen_movi_tl(cpu_ov, 1); 6532 tcg_gen_movi_tl(cpu_so, 1); 6533 } 6534 gen_set_label(l1); 6535 tcg_gen_mov_tl(cpu_gpr[rt], t0); 6536 } 6537 } else { 6538 tcg_gen_mul_tl(cpu_gpr[rt], t0, t1); 6539 } 6540 tcg_temp_free(t0); 6541 tcg_temp_free(t1); 6542 if (unlikely(Rc) != 0) { 6543 /* Update Rc0 */ 6544 gen_set_Rc0(ctx, cpu_gpr[rt]); 6545 } 6546 } 6547 6548 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 6549 static void glue(gen_, name)(DisasContext *ctx) \ 6550 { \ 6551 gen_405_mulladd_insn(ctx, opc2, opc3, rA(ctx->opcode), rB(ctx->opcode), \ 6552 rD(ctx->opcode), Rc(ctx->opcode)); \ 6553 } 6554 6555 /* macchw - macchw. */ 6556 GEN_MAC_HANDLER(macchw, 0x0C, 0x05); 6557 /* macchwo - macchwo. */ 6558 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15); 6559 /* macchws - macchws. */ 6560 GEN_MAC_HANDLER(macchws, 0x0C, 0x07); 6561 /* macchwso - macchwso. */ 6562 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17); 6563 /* macchwsu - macchwsu. */ 6564 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06); 6565 /* macchwsuo - macchwsuo. */ 6566 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16); 6567 /* macchwu - macchwu. */ 6568 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04); 6569 /* macchwuo - macchwuo. */ 6570 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14); 6571 /* machhw - machhw. */ 6572 GEN_MAC_HANDLER(machhw, 0x0C, 0x01); 6573 /* machhwo - machhwo. */ 6574 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11); 6575 /* machhws - machhws. */ 6576 GEN_MAC_HANDLER(machhws, 0x0C, 0x03); 6577 /* machhwso - machhwso. */ 6578 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13); 6579 /* machhwsu - machhwsu. */ 6580 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02); 6581 /* machhwsuo - machhwsuo. */ 6582 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12); 6583 /* machhwu - machhwu. */ 6584 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00); 6585 /* machhwuo - machhwuo. */ 6586 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10); 6587 /* maclhw - maclhw. */ 6588 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D); 6589 /* maclhwo - maclhwo. */ 6590 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D); 6591 /* maclhws - maclhws. */ 6592 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F); 6593 /* maclhwso - maclhwso. */ 6594 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F); 6595 /* maclhwu - maclhwu. */ 6596 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C); 6597 /* maclhwuo - maclhwuo. */ 6598 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C); 6599 /* maclhwsu - maclhwsu. */ 6600 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E); 6601 /* maclhwsuo - maclhwsuo. */ 6602 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E); 6603 /* nmacchw - nmacchw. */ 6604 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05); 6605 /* nmacchwo - nmacchwo. */ 6606 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15); 6607 /* nmacchws - nmacchws. */ 6608 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07); 6609 /* nmacchwso - nmacchwso. */ 6610 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17); 6611 /* nmachhw - nmachhw. */ 6612 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01); 6613 /* nmachhwo - nmachhwo. */ 6614 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11); 6615 /* nmachhws - nmachhws. */ 6616 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03); 6617 /* nmachhwso - nmachhwso. */ 6618 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13); 6619 /* nmaclhw - nmaclhw. */ 6620 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D); 6621 /* nmaclhwo - nmaclhwo. */ 6622 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D); 6623 /* nmaclhws - nmaclhws. */ 6624 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F); 6625 /* nmaclhwso - nmaclhwso. */ 6626 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F); 6627 6628 /* mulchw - mulchw. */ 6629 GEN_MAC_HANDLER(mulchw, 0x08, 0x05); 6630 /* mulchwu - mulchwu. */ 6631 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04); 6632 /* mulhhw - mulhhw. */ 6633 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01); 6634 /* mulhhwu - mulhhwu. */ 6635 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00); 6636 /* mullhw - mullhw. */ 6637 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D); 6638 /* mullhwu - mullhwu. */ 6639 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C); 6640 6641 /* mfdcr */ 6642 static void gen_mfdcr(DisasContext *ctx) 6643 { 6644 #if defined(CONFIG_USER_ONLY) 6645 GEN_PRIV; 6646 #else 6647 TCGv dcrn; 6648 6649 CHK_SV; 6650 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6651 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, dcrn); 6652 tcg_temp_free(dcrn); 6653 #endif /* defined(CONFIG_USER_ONLY) */ 6654 } 6655 6656 /* mtdcr */ 6657 static void gen_mtdcr(DisasContext *ctx) 6658 { 6659 #if defined(CONFIG_USER_ONLY) 6660 GEN_PRIV; 6661 #else 6662 TCGv dcrn; 6663 6664 CHK_SV; 6665 dcrn = tcg_const_tl(SPR(ctx->opcode)); 6666 gen_helper_store_dcr(cpu_env, dcrn, cpu_gpr[rS(ctx->opcode)]); 6667 tcg_temp_free(dcrn); 6668 #endif /* defined(CONFIG_USER_ONLY) */ 6669 } 6670 6671 /* mfdcrx */ 6672 /* XXX: not implemented on 440 ? */ 6673 static void gen_mfdcrx(DisasContext *ctx) 6674 { 6675 #if defined(CONFIG_USER_ONLY) 6676 GEN_PRIV; 6677 #else 6678 CHK_SV; 6679 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6680 cpu_gpr[rA(ctx->opcode)]); 6681 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6682 #endif /* defined(CONFIG_USER_ONLY) */ 6683 } 6684 6685 /* mtdcrx */ 6686 /* XXX: not implemented on 440 ? */ 6687 static void gen_mtdcrx(DisasContext *ctx) 6688 { 6689 #if defined(CONFIG_USER_ONLY) 6690 GEN_PRIV; 6691 #else 6692 CHK_SV; 6693 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6694 cpu_gpr[rS(ctx->opcode)]); 6695 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6696 #endif /* defined(CONFIG_USER_ONLY) */ 6697 } 6698 6699 /* mfdcrux (PPC 460) : user-mode access to DCR */ 6700 static void gen_mfdcrux(DisasContext *ctx) 6701 { 6702 gen_helper_load_dcr(cpu_gpr[rD(ctx->opcode)], cpu_env, 6703 cpu_gpr[rA(ctx->opcode)]); 6704 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6705 } 6706 6707 /* mtdcrux (PPC 460) : user-mode access to DCR */ 6708 static void gen_mtdcrux(DisasContext *ctx) 6709 { 6710 gen_helper_store_dcr(cpu_env, cpu_gpr[rA(ctx->opcode)], 6711 cpu_gpr[rS(ctx->opcode)]); 6712 /* Note: Rc update flag set leads to undefined state of Rc0 */ 6713 } 6714 6715 /* dccci */ 6716 static void gen_dccci(DisasContext *ctx) 6717 { 6718 CHK_SV; 6719 /* interpreted as no-op */ 6720 } 6721 6722 /* dcread */ 6723 static void gen_dcread(DisasContext *ctx) 6724 { 6725 #if defined(CONFIG_USER_ONLY) 6726 GEN_PRIV; 6727 #else 6728 TCGv EA, val; 6729 6730 CHK_SV; 6731 gen_set_access_type(ctx, ACCESS_CACHE); 6732 EA = tcg_temp_new(); 6733 gen_addr_reg_index(ctx, EA); 6734 val = tcg_temp_new(); 6735 gen_qemu_ld32u(ctx, val, EA); 6736 tcg_temp_free(val); 6737 tcg_gen_mov_tl(cpu_gpr[rD(ctx->opcode)], EA); 6738 tcg_temp_free(EA); 6739 #endif /* defined(CONFIG_USER_ONLY) */ 6740 } 6741 6742 /* icbt */ 6743 static void gen_icbt_40x(DisasContext *ctx) 6744 { 6745 /* 6746 * interpreted as no-op 6747 * XXX: specification say this is treated as a load by the MMU but 6748 * does not generate any exception 6749 */ 6750 } 6751 6752 /* iccci */ 6753 static void gen_iccci(DisasContext *ctx) 6754 { 6755 CHK_SV; 6756 /* interpreted as no-op */ 6757 } 6758 6759 /* icread */ 6760 static void gen_icread(DisasContext *ctx) 6761 { 6762 CHK_SV; 6763 /* interpreted as no-op */ 6764 } 6765 6766 /* rfci (supervisor only) */ 6767 static void gen_rfci_40x(DisasContext *ctx) 6768 { 6769 #if defined(CONFIG_USER_ONLY) 6770 GEN_PRIV; 6771 #else 6772 CHK_SV; 6773 /* Restore CPU state */ 6774 gen_helper_40x_rfci(cpu_env); 6775 ctx->base.is_jmp = DISAS_EXIT; 6776 #endif /* defined(CONFIG_USER_ONLY) */ 6777 } 6778 6779 static void gen_rfci(DisasContext *ctx) 6780 { 6781 #if defined(CONFIG_USER_ONLY) 6782 GEN_PRIV; 6783 #else 6784 CHK_SV; 6785 /* Restore CPU state */ 6786 gen_helper_rfci(cpu_env); 6787 ctx->base.is_jmp = DISAS_EXIT; 6788 #endif /* defined(CONFIG_USER_ONLY) */ 6789 } 6790 6791 /* BookE specific */ 6792 6793 /* XXX: not implemented on 440 ? */ 6794 static void gen_rfdi(DisasContext *ctx) 6795 { 6796 #if defined(CONFIG_USER_ONLY) 6797 GEN_PRIV; 6798 #else 6799 CHK_SV; 6800 /* Restore CPU state */ 6801 gen_helper_rfdi(cpu_env); 6802 ctx->base.is_jmp = DISAS_EXIT; 6803 #endif /* defined(CONFIG_USER_ONLY) */ 6804 } 6805 6806 /* XXX: not implemented on 440 ? */ 6807 static void gen_rfmci(DisasContext *ctx) 6808 { 6809 #if defined(CONFIG_USER_ONLY) 6810 GEN_PRIV; 6811 #else 6812 CHK_SV; 6813 /* Restore CPU state */ 6814 gen_helper_rfmci(cpu_env); 6815 ctx->base.is_jmp = DISAS_EXIT; 6816 #endif /* defined(CONFIG_USER_ONLY) */ 6817 } 6818 6819 /* TLB management - PowerPC 405 implementation */ 6820 6821 /* tlbre */ 6822 static void gen_tlbre_40x(DisasContext *ctx) 6823 { 6824 #if defined(CONFIG_USER_ONLY) 6825 GEN_PRIV; 6826 #else 6827 CHK_SV; 6828 switch (rB(ctx->opcode)) { 6829 case 0: 6830 gen_helper_4xx_tlbre_hi(cpu_gpr[rD(ctx->opcode)], cpu_env, 6831 cpu_gpr[rA(ctx->opcode)]); 6832 break; 6833 case 1: 6834 gen_helper_4xx_tlbre_lo(cpu_gpr[rD(ctx->opcode)], cpu_env, 6835 cpu_gpr[rA(ctx->opcode)]); 6836 break; 6837 default: 6838 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6839 break; 6840 } 6841 #endif /* defined(CONFIG_USER_ONLY) */ 6842 } 6843 6844 /* tlbsx - tlbsx. */ 6845 static void gen_tlbsx_40x(DisasContext *ctx) 6846 { 6847 #if defined(CONFIG_USER_ONLY) 6848 GEN_PRIV; 6849 #else 6850 TCGv t0; 6851 6852 CHK_SV; 6853 t0 = tcg_temp_new(); 6854 gen_addr_reg_index(ctx, t0); 6855 gen_helper_4xx_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6856 tcg_temp_free(t0); 6857 if (Rc(ctx->opcode)) { 6858 TCGLabel *l1 = gen_new_label(); 6859 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6860 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6861 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6862 gen_set_label(l1); 6863 } 6864 #endif /* defined(CONFIG_USER_ONLY) */ 6865 } 6866 6867 /* tlbwe */ 6868 static void gen_tlbwe_40x(DisasContext *ctx) 6869 { 6870 #if defined(CONFIG_USER_ONLY) 6871 GEN_PRIV; 6872 #else 6873 CHK_SV; 6874 6875 switch (rB(ctx->opcode)) { 6876 case 0: 6877 gen_helper_4xx_tlbwe_hi(cpu_env, cpu_gpr[rA(ctx->opcode)], 6878 cpu_gpr[rS(ctx->opcode)]); 6879 break; 6880 case 1: 6881 gen_helper_4xx_tlbwe_lo(cpu_env, cpu_gpr[rA(ctx->opcode)], 6882 cpu_gpr[rS(ctx->opcode)]); 6883 break; 6884 default: 6885 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6886 break; 6887 } 6888 #endif /* defined(CONFIG_USER_ONLY) */ 6889 } 6890 6891 /* TLB management - PowerPC 440 implementation */ 6892 6893 /* tlbre */ 6894 static void gen_tlbre_440(DisasContext *ctx) 6895 { 6896 #if defined(CONFIG_USER_ONLY) 6897 GEN_PRIV; 6898 #else 6899 CHK_SV; 6900 6901 switch (rB(ctx->opcode)) { 6902 case 0: 6903 case 1: 6904 case 2: 6905 { 6906 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6907 gen_helper_440_tlbre(cpu_gpr[rD(ctx->opcode)], cpu_env, 6908 t0, cpu_gpr[rA(ctx->opcode)]); 6909 tcg_temp_free_i32(t0); 6910 } 6911 break; 6912 default: 6913 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6914 break; 6915 } 6916 #endif /* defined(CONFIG_USER_ONLY) */ 6917 } 6918 6919 /* tlbsx - tlbsx. */ 6920 static void gen_tlbsx_440(DisasContext *ctx) 6921 { 6922 #if defined(CONFIG_USER_ONLY) 6923 GEN_PRIV; 6924 #else 6925 TCGv t0; 6926 6927 CHK_SV; 6928 t0 = tcg_temp_new(); 6929 gen_addr_reg_index(ctx, t0); 6930 gen_helper_440_tlbsx(cpu_gpr[rD(ctx->opcode)], cpu_env, t0); 6931 tcg_temp_free(t0); 6932 if (Rc(ctx->opcode)) { 6933 TCGLabel *l1 = gen_new_label(); 6934 tcg_gen_trunc_tl_i32(cpu_crf[0], cpu_so); 6935 tcg_gen_brcondi_tl(TCG_COND_EQ, cpu_gpr[rD(ctx->opcode)], -1, l1); 6936 tcg_gen_ori_i32(cpu_crf[0], cpu_crf[0], 0x02); 6937 gen_set_label(l1); 6938 } 6939 #endif /* defined(CONFIG_USER_ONLY) */ 6940 } 6941 6942 /* tlbwe */ 6943 static void gen_tlbwe_440(DisasContext *ctx) 6944 { 6945 #if defined(CONFIG_USER_ONLY) 6946 GEN_PRIV; 6947 #else 6948 CHK_SV; 6949 switch (rB(ctx->opcode)) { 6950 case 0: 6951 case 1: 6952 case 2: 6953 { 6954 TCGv_i32 t0 = tcg_const_i32(rB(ctx->opcode)); 6955 gen_helper_440_tlbwe(cpu_env, t0, cpu_gpr[rA(ctx->opcode)], 6956 cpu_gpr[rS(ctx->opcode)]); 6957 tcg_temp_free_i32(t0); 6958 } 6959 break; 6960 default: 6961 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 6962 break; 6963 } 6964 #endif /* defined(CONFIG_USER_ONLY) */ 6965 } 6966 6967 /* TLB management - PowerPC BookE 2.06 implementation */ 6968 6969 /* tlbre */ 6970 static void gen_tlbre_booke206(DisasContext *ctx) 6971 { 6972 #if defined(CONFIG_USER_ONLY) 6973 GEN_PRIV; 6974 #else 6975 CHK_SV; 6976 gen_helper_booke206_tlbre(cpu_env); 6977 #endif /* defined(CONFIG_USER_ONLY) */ 6978 } 6979 6980 /* tlbsx - tlbsx. */ 6981 static void gen_tlbsx_booke206(DisasContext *ctx) 6982 { 6983 #if defined(CONFIG_USER_ONLY) 6984 GEN_PRIV; 6985 #else 6986 TCGv t0; 6987 6988 CHK_SV; 6989 if (rA(ctx->opcode)) { 6990 t0 = tcg_temp_new(); 6991 tcg_gen_mov_tl(t0, cpu_gpr[rD(ctx->opcode)]); 6992 } else { 6993 t0 = tcg_const_tl(0); 6994 } 6995 6996 tcg_gen_add_tl(t0, t0, cpu_gpr[rB(ctx->opcode)]); 6997 gen_helper_booke206_tlbsx(cpu_env, t0); 6998 tcg_temp_free(t0); 6999 #endif /* defined(CONFIG_USER_ONLY) */ 7000 } 7001 7002 /* tlbwe */ 7003 static void gen_tlbwe_booke206(DisasContext *ctx) 7004 { 7005 #if defined(CONFIG_USER_ONLY) 7006 GEN_PRIV; 7007 #else 7008 CHK_SV; 7009 gen_helper_booke206_tlbwe(cpu_env); 7010 #endif /* defined(CONFIG_USER_ONLY) */ 7011 } 7012 7013 static void gen_tlbivax_booke206(DisasContext *ctx) 7014 { 7015 #if defined(CONFIG_USER_ONLY) 7016 GEN_PRIV; 7017 #else 7018 TCGv t0; 7019 7020 CHK_SV; 7021 t0 = tcg_temp_new(); 7022 gen_addr_reg_index(ctx, t0); 7023 gen_helper_booke206_tlbivax(cpu_env, t0); 7024 tcg_temp_free(t0); 7025 #endif /* defined(CONFIG_USER_ONLY) */ 7026 } 7027 7028 static void gen_tlbilx_booke206(DisasContext *ctx) 7029 { 7030 #if defined(CONFIG_USER_ONLY) 7031 GEN_PRIV; 7032 #else 7033 TCGv t0; 7034 7035 CHK_SV; 7036 t0 = tcg_temp_new(); 7037 gen_addr_reg_index(ctx, t0); 7038 7039 switch ((ctx->opcode >> 21) & 0x3) { 7040 case 0: 7041 gen_helper_booke206_tlbilx0(cpu_env, t0); 7042 break; 7043 case 1: 7044 gen_helper_booke206_tlbilx1(cpu_env, t0); 7045 break; 7046 case 3: 7047 gen_helper_booke206_tlbilx3(cpu_env, t0); 7048 break; 7049 default: 7050 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7051 break; 7052 } 7053 7054 tcg_temp_free(t0); 7055 #endif /* defined(CONFIG_USER_ONLY) */ 7056 } 7057 7058 7059 /* wrtee */ 7060 static void gen_wrtee(DisasContext *ctx) 7061 { 7062 #if defined(CONFIG_USER_ONLY) 7063 GEN_PRIV; 7064 #else 7065 TCGv t0; 7066 7067 CHK_SV; 7068 t0 = tcg_temp_new(); 7069 tcg_gen_andi_tl(t0, cpu_gpr[rD(ctx->opcode)], (1 << MSR_EE)); 7070 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7071 tcg_gen_or_tl(cpu_msr, cpu_msr, t0); 7072 tcg_temp_free(t0); 7073 /* 7074 * Stop translation to have a chance to raise an exception if we 7075 * just set msr_ee to 1 7076 */ 7077 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7078 #endif /* defined(CONFIG_USER_ONLY) */ 7079 } 7080 7081 /* wrteei */ 7082 static void gen_wrteei(DisasContext *ctx) 7083 { 7084 #if defined(CONFIG_USER_ONLY) 7085 GEN_PRIV; 7086 #else 7087 CHK_SV; 7088 if (ctx->opcode & 0x00008000) { 7089 tcg_gen_ori_tl(cpu_msr, cpu_msr, (1 << MSR_EE)); 7090 /* Stop translation to have a chance to raise an exception */ 7091 ctx->base.is_jmp = DISAS_EXIT_UPDATE; 7092 } else { 7093 tcg_gen_andi_tl(cpu_msr, cpu_msr, ~(1 << MSR_EE)); 7094 } 7095 #endif /* defined(CONFIG_USER_ONLY) */ 7096 } 7097 7098 /* PowerPC 440 specific instructions */ 7099 7100 /* dlmzb */ 7101 static void gen_dlmzb(DisasContext *ctx) 7102 { 7103 TCGv_i32 t0 = tcg_const_i32(Rc(ctx->opcode)); 7104 gen_helper_dlmzb(cpu_gpr[rA(ctx->opcode)], cpu_env, 7105 cpu_gpr[rS(ctx->opcode)], cpu_gpr[rB(ctx->opcode)], t0); 7106 tcg_temp_free_i32(t0); 7107 } 7108 7109 /* mbar replaces eieio on 440 */ 7110 static void gen_mbar(DisasContext *ctx) 7111 { 7112 /* interpreted as no-op */ 7113 } 7114 7115 /* msync replaces sync on 440 */ 7116 static void gen_msync_4xx(DisasContext *ctx) 7117 { 7118 /* Only e500 seems to treat reserved bits as invalid */ 7119 if ((ctx->insns_flags2 & PPC2_BOOKE206) && 7120 (ctx->opcode & 0x03FFF801)) { 7121 gen_inval_exception(ctx, POWERPC_EXCP_INVAL_INVAL); 7122 } 7123 /* otherwise interpreted as no-op */ 7124 } 7125 7126 /* icbt */ 7127 static void gen_icbt_440(DisasContext *ctx) 7128 { 7129 /* 7130 * interpreted as no-op 7131 * XXX: specification say this is treated as a load by the MMU but 7132 * does not generate any exception 7133 */ 7134 } 7135 7136 /* Embedded.Processor Control */ 7137 7138 static void gen_msgclr(DisasContext *ctx) 7139 { 7140 #if defined(CONFIG_USER_ONLY) 7141 GEN_PRIV; 7142 #else 7143 CHK_HV; 7144 if (is_book3s_arch2x(ctx)) { 7145 gen_helper_book3s_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7146 } else { 7147 gen_helper_msgclr(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7148 } 7149 #endif /* defined(CONFIG_USER_ONLY) */ 7150 } 7151 7152 static void gen_msgsnd(DisasContext *ctx) 7153 { 7154 #if defined(CONFIG_USER_ONLY) 7155 GEN_PRIV; 7156 #else 7157 CHK_HV; 7158 if (is_book3s_arch2x(ctx)) { 7159 gen_helper_book3s_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7160 } else { 7161 gen_helper_msgsnd(cpu_gpr[rB(ctx->opcode)]); 7162 } 7163 #endif /* defined(CONFIG_USER_ONLY) */ 7164 } 7165 7166 #if defined(TARGET_PPC64) 7167 static void gen_msgclrp(DisasContext *ctx) 7168 { 7169 #if defined(CONFIG_USER_ONLY) 7170 GEN_PRIV; 7171 #else 7172 CHK_SV; 7173 gen_helper_book3s_msgclrp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7174 #endif /* defined(CONFIG_USER_ONLY) */ 7175 } 7176 7177 static void gen_msgsndp(DisasContext *ctx) 7178 { 7179 #if defined(CONFIG_USER_ONLY) 7180 GEN_PRIV; 7181 #else 7182 CHK_SV; 7183 gen_helper_book3s_msgsndp(cpu_env, cpu_gpr[rB(ctx->opcode)]); 7184 #endif /* defined(CONFIG_USER_ONLY) */ 7185 } 7186 #endif 7187 7188 static void gen_msgsync(DisasContext *ctx) 7189 { 7190 #if defined(CONFIG_USER_ONLY) 7191 GEN_PRIV; 7192 #else 7193 CHK_HV; 7194 #endif /* defined(CONFIG_USER_ONLY) */ 7195 /* interpreted as no-op */ 7196 } 7197 7198 #if defined(TARGET_PPC64) 7199 static void gen_maddld(DisasContext *ctx) 7200 { 7201 TCGv_i64 t1 = tcg_temp_new_i64(); 7202 7203 tcg_gen_mul_i64(t1, cpu_gpr[rA(ctx->opcode)], cpu_gpr[rB(ctx->opcode)]); 7204 tcg_gen_add_i64(cpu_gpr[rD(ctx->opcode)], t1, cpu_gpr[rC(ctx->opcode)]); 7205 tcg_temp_free_i64(t1); 7206 } 7207 7208 /* maddhd maddhdu */ 7209 static void gen_maddhd_maddhdu(DisasContext *ctx) 7210 { 7211 TCGv_i64 lo = tcg_temp_new_i64(); 7212 TCGv_i64 hi = tcg_temp_new_i64(); 7213 TCGv_i64 t1 = tcg_temp_new_i64(); 7214 7215 if (Rc(ctx->opcode)) { 7216 tcg_gen_mulu2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7217 cpu_gpr[rB(ctx->opcode)]); 7218 tcg_gen_movi_i64(t1, 0); 7219 } else { 7220 tcg_gen_muls2_i64(lo, hi, cpu_gpr[rA(ctx->opcode)], 7221 cpu_gpr[rB(ctx->opcode)]); 7222 tcg_gen_sari_i64(t1, cpu_gpr[rC(ctx->opcode)], 63); 7223 } 7224 tcg_gen_add2_i64(t1, cpu_gpr[rD(ctx->opcode)], lo, hi, 7225 cpu_gpr[rC(ctx->opcode)], t1); 7226 tcg_temp_free_i64(lo); 7227 tcg_temp_free_i64(hi); 7228 tcg_temp_free_i64(t1); 7229 } 7230 #endif /* defined(TARGET_PPC64) */ 7231 7232 static void gen_tbegin(DisasContext *ctx) 7233 { 7234 if (unlikely(!ctx->tm_enabled)) { 7235 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7236 return; 7237 } 7238 gen_helper_tbegin(cpu_env); 7239 } 7240 7241 #define GEN_TM_NOOP(name) \ 7242 static inline void gen_##name(DisasContext *ctx) \ 7243 { \ 7244 if (unlikely(!ctx->tm_enabled)) { \ 7245 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7246 return; \ 7247 } \ 7248 /* \ 7249 * Because tbegin always fails in QEMU, these user \ 7250 * space instructions all have a simple implementation: \ 7251 * \ 7252 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7253 * = 0b0 || 0b00 || 0b0 \ 7254 */ \ 7255 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7256 } 7257 7258 GEN_TM_NOOP(tend); 7259 GEN_TM_NOOP(tabort); 7260 GEN_TM_NOOP(tabortwc); 7261 GEN_TM_NOOP(tabortwci); 7262 GEN_TM_NOOP(tabortdc); 7263 GEN_TM_NOOP(tabortdci); 7264 GEN_TM_NOOP(tsr); 7265 7266 static inline void gen_cp_abort(DisasContext *ctx) 7267 { 7268 /* Do Nothing */ 7269 } 7270 7271 #define GEN_CP_PASTE_NOOP(name) \ 7272 static inline void gen_##name(DisasContext *ctx) \ 7273 { \ 7274 /* \ 7275 * Generate invalid exception until we have an \ 7276 * implementation of the copy paste facility \ 7277 */ \ 7278 gen_invalid(ctx); \ 7279 } 7280 7281 GEN_CP_PASTE_NOOP(copy) 7282 GEN_CP_PASTE_NOOP(paste) 7283 7284 static void gen_tcheck(DisasContext *ctx) 7285 { 7286 if (unlikely(!ctx->tm_enabled)) { 7287 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); 7288 return; 7289 } 7290 /* 7291 * Because tbegin always fails, the tcheck implementation is 7292 * simple: 7293 * 7294 * CR[CRF] = TDOOMED || MSR[TS] || 0b0 7295 * = 0b1 || 0b00 || 0b0 7296 */ 7297 tcg_gen_movi_i32(cpu_crf[crfD(ctx->opcode)], 0x8); 7298 } 7299 7300 #if defined(CONFIG_USER_ONLY) 7301 #define GEN_TM_PRIV_NOOP(name) \ 7302 static inline void gen_##name(DisasContext *ctx) \ 7303 { \ 7304 gen_priv_exception(ctx, POWERPC_EXCP_PRIV_OPC); \ 7305 } 7306 7307 #else 7308 7309 #define GEN_TM_PRIV_NOOP(name) \ 7310 static inline void gen_##name(DisasContext *ctx) \ 7311 { \ 7312 CHK_SV; \ 7313 if (unlikely(!ctx->tm_enabled)) { \ 7314 gen_exception_err(ctx, POWERPC_EXCP_FU, FSCR_IC_TM); \ 7315 return; \ 7316 } \ 7317 /* \ 7318 * Because tbegin always fails, the implementation is \ 7319 * simple: \ 7320 * \ 7321 * CR[0] = 0b0 || MSR[TS] || 0b0 \ 7322 * = 0b0 || 0b00 | 0b0 \ 7323 */ \ 7324 tcg_gen_movi_i32(cpu_crf[0], 0); \ 7325 } 7326 7327 #endif 7328 7329 GEN_TM_PRIV_NOOP(treclaim); 7330 GEN_TM_PRIV_NOOP(trechkpt); 7331 7332 static inline void get_fpr(TCGv_i64 dst, int regno) 7333 { 7334 tcg_gen_ld_i64(dst, cpu_env, fpr_offset(regno)); 7335 } 7336 7337 static inline void set_fpr(int regno, TCGv_i64 src) 7338 { 7339 tcg_gen_st_i64(src, cpu_env, fpr_offset(regno)); 7340 } 7341 7342 static inline void get_avr64(TCGv_i64 dst, int regno, bool high) 7343 { 7344 tcg_gen_ld_i64(dst, cpu_env, avr64_offset(regno, high)); 7345 } 7346 7347 static inline void set_avr64(int regno, TCGv_i64 src, bool high) 7348 { 7349 tcg_gen_st_i64(src, cpu_env, avr64_offset(regno, high)); 7350 } 7351 7352 /* 7353 * Helpers for decodetree used by !function for decoding arguments. 7354 */ 7355 static int times_2(DisasContext *ctx, int x) 7356 { 7357 return x * 2; 7358 } 7359 7360 static int times_4(DisasContext *ctx, int x) 7361 { 7362 return x * 4; 7363 } 7364 7365 static int times_16(DisasContext *ctx, int x) 7366 { 7367 return x * 16; 7368 } 7369 7370 /* 7371 * Helpers for trans_* functions to check for specific insns flags. 7372 * Use token pasting to ensure that we use the proper flag with the 7373 * proper variable. 7374 */ 7375 #define REQUIRE_INSNS_FLAGS(CTX, NAME) \ 7376 do { \ 7377 if (((CTX)->insns_flags & PPC_##NAME) == 0) { \ 7378 return false; \ 7379 } \ 7380 } while (0) 7381 7382 #define REQUIRE_INSNS_FLAGS2(CTX, NAME) \ 7383 do { \ 7384 if (((CTX)->insns_flags2 & PPC2_##NAME) == 0) { \ 7385 return false; \ 7386 } \ 7387 } while (0) 7388 7389 /* Then special-case the check for 64-bit so that we elide code for ppc32. */ 7390 #if TARGET_LONG_BITS == 32 7391 # define REQUIRE_64BIT(CTX) return false 7392 #else 7393 # define REQUIRE_64BIT(CTX) REQUIRE_INSNS_FLAGS(CTX, 64B) 7394 #endif 7395 7396 #define REQUIRE_VECTOR(CTX) \ 7397 do { \ 7398 if (unlikely(!(CTX)->altivec_enabled)) { \ 7399 gen_exception((CTX), POWERPC_EXCP_VPU); \ 7400 return true; \ 7401 } \ 7402 } while (0) 7403 7404 #define REQUIRE_VSX(CTX) \ 7405 do { \ 7406 if (unlikely(!(CTX)->vsx_enabled)) { \ 7407 gen_exception((CTX), POWERPC_EXCP_VSXU); \ 7408 return true; \ 7409 } \ 7410 } while (0) 7411 7412 #define REQUIRE_FPU(ctx) \ 7413 do { \ 7414 if (unlikely(!(ctx)->fpu_enabled)) { \ 7415 gen_exception((ctx), POWERPC_EXCP_FPU); \ 7416 return true; \ 7417 } \ 7418 } while (0) 7419 7420 /* 7421 * Helpers for implementing sets of trans_* functions. 7422 * Defer the implementation of NAME to FUNC, with optional extra arguments. 7423 */ 7424 #define TRANS(NAME, FUNC, ...) \ 7425 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7426 { return FUNC(ctx, a, __VA_ARGS__); } 7427 7428 #define TRANS64(NAME, FUNC, ...) \ 7429 static bool trans_##NAME(DisasContext *ctx, arg_##NAME *a) \ 7430 { REQUIRE_64BIT(ctx); return FUNC(ctx, a, __VA_ARGS__); } 7431 7432 /* TODO: More TRANS* helpers for extra insn_flags checks. */ 7433 7434 7435 #include "decode-insn32.c.inc" 7436 #include "decode-insn64.c.inc" 7437 #include "power8-pmu-regs.c.inc" 7438 7439 /* 7440 * Incorporate CIA into the constant when R=1. 7441 * Validate that when R=1, RA=0. 7442 */ 7443 static bool resolve_PLS_D(DisasContext *ctx, arg_D *d, arg_PLS_D *a) 7444 { 7445 d->rt = a->rt; 7446 d->ra = a->ra; 7447 d->si = a->si; 7448 if (a->r) { 7449 if (unlikely(a->ra != 0)) { 7450 gen_invalid(ctx); 7451 return false; 7452 } 7453 d->si += ctx->cia; 7454 } 7455 return true; 7456 } 7457 7458 #include "translate/fixedpoint-impl.c.inc" 7459 7460 #include "translate/fp-impl.c.inc" 7461 7462 #include "translate/vmx-impl.c.inc" 7463 7464 #include "translate/vsx-impl.c.inc" 7465 7466 #include "translate/dfp-impl.c.inc" 7467 7468 #include "translate/spe-impl.c.inc" 7469 7470 #include "translate/branch-impl.c.inc" 7471 7472 /* Handles lfdp, lxsd, lxssp */ 7473 static void gen_dform39(DisasContext *ctx) 7474 { 7475 switch (ctx->opcode & 0x3) { 7476 case 0: /* lfdp */ 7477 if (ctx->insns_flags2 & PPC2_ISA205) { 7478 return gen_lfdp(ctx); 7479 } 7480 break; 7481 case 2: /* lxsd */ 7482 if (ctx->insns_flags2 & PPC2_ISA300) { 7483 return gen_lxsd(ctx); 7484 } 7485 break; 7486 case 3: /* lxssp */ 7487 if (ctx->insns_flags2 & PPC2_ISA300) { 7488 return gen_lxssp(ctx); 7489 } 7490 break; 7491 } 7492 return gen_invalid(ctx); 7493 } 7494 7495 /* handles stfdp, lxv, stxsd, stxssp lxvx */ 7496 static void gen_dform3D(DisasContext *ctx) 7497 { 7498 if ((ctx->opcode & 3) != 1) { /* DS-FORM */ 7499 switch (ctx->opcode & 0x3) { 7500 case 0: /* stfdp */ 7501 if (ctx->insns_flags2 & PPC2_ISA205) { 7502 return gen_stfdp(ctx); 7503 } 7504 break; 7505 case 2: /* stxsd */ 7506 if (ctx->insns_flags2 & PPC2_ISA300) { 7507 return gen_stxsd(ctx); 7508 } 7509 break; 7510 case 3: /* stxssp */ 7511 if (ctx->insns_flags2 & PPC2_ISA300) { 7512 return gen_stxssp(ctx); 7513 } 7514 break; 7515 } 7516 } 7517 return gen_invalid(ctx); 7518 } 7519 7520 #if defined(TARGET_PPC64) 7521 /* brd */ 7522 static void gen_brd(DisasContext *ctx) 7523 { 7524 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7525 } 7526 7527 /* brw */ 7528 static void gen_brw(DisasContext *ctx) 7529 { 7530 tcg_gen_bswap64_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rS(ctx->opcode)]); 7531 tcg_gen_rotli_i64(cpu_gpr[rA(ctx->opcode)], cpu_gpr[rA(ctx->opcode)], 32); 7532 7533 } 7534 7535 /* brh */ 7536 static void gen_brh(DisasContext *ctx) 7537 { 7538 TCGv_i64 mask = tcg_constant_i64(0x00ff00ff00ff00ffull); 7539 TCGv_i64 t1 = tcg_temp_new_i64(); 7540 TCGv_i64 t2 = tcg_temp_new_i64(); 7541 7542 tcg_gen_shri_i64(t1, cpu_gpr[rS(ctx->opcode)], 8); 7543 tcg_gen_and_i64(t2, t1, mask); 7544 tcg_gen_and_i64(t1, cpu_gpr[rS(ctx->opcode)], mask); 7545 tcg_gen_shli_i64(t1, t1, 8); 7546 tcg_gen_or_i64(cpu_gpr[rA(ctx->opcode)], t1, t2); 7547 7548 tcg_temp_free_i64(t1); 7549 tcg_temp_free_i64(t2); 7550 } 7551 #endif 7552 7553 static opcode_t opcodes[] = { 7554 #if defined(TARGET_PPC64) 7555 GEN_HANDLER_E(brd, 0x1F, 0x1B, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA310), 7556 GEN_HANDLER_E(brw, 0x1F, 0x1B, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA310), 7557 GEN_HANDLER_E(brh, 0x1F, 0x1B, 0x06, 0x0000F801, PPC_NONE, PPC2_ISA310), 7558 #endif 7559 GEN_HANDLER(invalid, 0x00, 0x00, 0x00, 0xFFFFFFFF, PPC_NONE), 7560 #if defined(TARGET_PPC64) 7561 GEN_HANDLER_E(cmpeqb, 0x1F, 0x00, 0x07, 0x00600000, PPC_NONE, PPC2_ISA300), 7562 #endif 7563 GEN_HANDLER_E(cmpb, 0x1F, 0x1C, 0x0F, 0x00000001, PPC_NONE, PPC2_ISA205), 7564 GEN_HANDLER_E(cmprb, 0x1F, 0x00, 0x06, 0x00400001, PPC_NONE, PPC2_ISA300), 7565 GEN_HANDLER(isel, 0x1F, 0x0F, 0xFF, 0x00000001, PPC_ISEL), 7566 GEN_HANDLER(addic, 0x0C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7567 GEN_HANDLER2(addic_, "addic.", 0x0D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7568 GEN_HANDLER(mulhw, 0x1F, 0x0B, 0x02, 0x00000400, PPC_INTEGER), 7569 GEN_HANDLER(mulhwu, 0x1F, 0x0B, 0x00, 0x00000400, PPC_INTEGER), 7570 GEN_HANDLER(mullw, 0x1F, 0x0B, 0x07, 0x00000000, PPC_INTEGER), 7571 GEN_HANDLER(mullwo, 0x1F, 0x0B, 0x17, 0x00000000, PPC_INTEGER), 7572 GEN_HANDLER(mulli, 0x07, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7573 #if defined(TARGET_PPC64) 7574 GEN_HANDLER(mulld, 0x1F, 0x09, 0x07, 0x00000000, PPC_64B), 7575 #endif 7576 GEN_HANDLER(neg, 0x1F, 0x08, 0x03, 0x0000F800, PPC_INTEGER), 7577 GEN_HANDLER(nego, 0x1F, 0x08, 0x13, 0x0000F800, PPC_INTEGER), 7578 GEN_HANDLER(subfic, 0x08, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7579 GEN_HANDLER2(andi_, "andi.", 0x1C, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7580 GEN_HANDLER2(andis_, "andis.", 0x1D, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7581 GEN_HANDLER(cntlzw, 0x1F, 0x1A, 0x00, 0x00000000, PPC_INTEGER), 7582 GEN_HANDLER_E(cnttzw, 0x1F, 0x1A, 0x10, 0x00000000, PPC_NONE, PPC2_ISA300), 7583 GEN_HANDLER_E(copy, 0x1F, 0x06, 0x18, 0x03C00001, PPC_NONE, PPC2_ISA300), 7584 GEN_HANDLER_E(cp_abort, 0x1F, 0x06, 0x1A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7585 GEN_HANDLER_E(paste, 0x1F, 0x06, 0x1C, 0x03C00000, PPC_NONE, PPC2_ISA300), 7586 GEN_HANDLER(or, 0x1F, 0x1C, 0x0D, 0x00000000, PPC_INTEGER), 7587 GEN_HANDLER(xor, 0x1F, 0x1C, 0x09, 0x00000000, PPC_INTEGER), 7588 GEN_HANDLER(ori, 0x18, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7589 GEN_HANDLER(oris, 0x19, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7590 GEN_HANDLER(xori, 0x1A, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7591 GEN_HANDLER(xoris, 0x1B, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7592 GEN_HANDLER(popcntb, 0x1F, 0x1A, 0x03, 0x0000F801, PPC_POPCNTB), 7593 GEN_HANDLER(popcntw, 0x1F, 0x1A, 0x0b, 0x0000F801, PPC_POPCNTWD), 7594 GEN_HANDLER_E(prtyw, 0x1F, 0x1A, 0x04, 0x0000F801, PPC_NONE, PPC2_ISA205), 7595 #if defined(TARGET_PPC64) 7596 GEN_HANDLER(popcntd, 0x1F, 0x1A, 0x0F, 0x0000F801, PPC_POPCNTWD), 7597 GEN_HANDLER(cntlzd, 0x1F, 0x1A, 0x01, 0x00000000, PPC_64B), 7598 GEN_HANDLER_E(cnttzd, 0x1F, 0x1A, 0x11, 0x00000000, PPC_NONE, PPC2_ISA300), 7599 GEN_HANDLER_E(darn, 0x1F, 0x13, 0x17, 0x001CF801, PPC_NONE, PPC2_ISA300), 7600 GEN_HANDLER_E(prtyd, 0x1F, 0x1A, 0x05, 0x0000F801, PPC_NONE, PPC2_ISA205), 7601 GEN_HANDLER_E(bpermd, 0x1F, 0x1C, 0x07, 0x00000001, PPC_NONE, PPC2_PERM_ISA206), 7602 #endif 7603 GEN_HANDLER(rlwimi, 0x14, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7604 GEN_HANDLER(rlwinm, 0x15, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7605 GEN_HANDLER(rlwnm, 0x17, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7606 GEN_HANDLER(slw, 0x1F, 0x18, 0x00, 0x00000000, PPC_INTEGER), 7607 GEN_HANDLER(sraw, 0x1F, 0x18, 0x18, 0x00000000, PPC_INTEGER), 7608 GEN_HANDLER(srawi, 0x1F, 0x18, 0x19, 0x00000000, PPC_INTEGER), 7609 GEN_HANDLER(srw, 0x1F, 0x18, 0x10, 0x00000000, PPC_INTEGER), 7610 #if defined(TARGET_PPC64) 7611 GEN_HANDLER(sld, 0x1F, 0x1B, 0x00, 0x00000000, PPC_64B), 7612 GEN_HANDLER(srad, 0x1F, 0x1A, 0x18, 0x00000000, PPC_64B), 7613 GEN_HANDLER2(sradi0, "sradi", 0x1F, 0x1A, 0x19, 0x00000000, PPC_64B), 7614 GEN_HANDLER2(sradi1, "sradi", 0x1F, 0x1B, 0x19, 0x00000000, PPC_64B), 7615 GEN_HANDLER(srd, 0x1F, 0x1B, 0x10, 0x00000000, PPC_64B), 7616 GEN_HANDLER2_E(extswsli0, "extswsli", 0x1F, 0x1A, 0x1B, 0x00000000, 7617 PPC_NONE, PPC2_ISA300), 7618 GEN_HANDLER2_E(extswsli1, "extswsli", 0x1F, 0x1B, 0x1B, 0x00000000, 7619 PPC_NONE, PPC2_ISA300), 7620 #endif 7621 /* handles lfdp, lxsd, lxssp */ 7622 GEN_HANDLER_E(dform39, 0x39, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7623 /* handles stfdp, stxsd, stxssp */ 7624 GEN_HANDLER_E(dform3D, 0x3D, 0xFF, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA205), 7625 GEN_HANDLER(lmw, 0x2E, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7626 GEN_HANDLER(stmw, 0x2F, 0xFF, 0xFF, 0x00000000, PPC_INTEGER), 7627 GEN_HANDLER(lswi, 0x1F, 0x15, 0x12, 0x00000001, PPC_STRING), 7628 GEN_HANDLER(lswx, 0x1F, 0x15, 0x10, 0x00000001, PPC_STRING), 7629 GEN_HANDLER(stswi, 0x1F, 0x15, 0x16, 0x00000001, PPC_STRING), 7630 GEN_HANDLER(stswx, 0x1F, 0x15, 0x14, 0x00000001, PPC_STRING), 7631 GEN_HANDLER(eieio, 0x1F, 0x16, 0x1A, 0x01FFF801, PPC_MEM_EIEIO), 7632 GEN_HANDLER(isync, 0x13, 0x16, 0x04, 0x03FFF801, PPC_MEM), 7633 GEN_HANDLER_E(lbarx, 0x1F, 0x14, 0x01, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7634 GEN_HANDLER_E(lharx, 0x1F, 0x14, 0x03, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7635 GEN_HANDLER(lwarx, 0x1F, 0x14, 0x00, 0x00000000, PPC_RES), 7636 GEN_HANDLER_E(lwat, 0x1F, 0x06, 0x12, 0x00000001, PPC_NONE, PPC2_ISA300), 7637 GEN_HANDLER_E(stwat, 0x1F, 0x06, 0x16, 0x00000001, PPC_NONE, PPC2_ISA300), 7638 GEN_HANDLER_E(stbcx_, 0x1F, 0x16, 0x15, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7639 GEN_HANDLER_E(sthcx_, 0x1F, 0x16, 0x16, 0, PPC_NONE, PPC2_ATOMIC_ISA206), 7640 GEN_HANDLER2(stwcx_, "stwcx.", 0x1F, 0x16, 0x04, 0x00000000, PPC_RES), 7641 #if defined(TARGET_PPC64) 7642 GEN_HANDLER_E(ldat, 0x1F, 0x06, 0x13, 0x00000001, PPC_NONE, PPC2_ISA300), 7643 GEN_HANDLER_E(stdat, 0x1F, 0x06, 0x17, 0x00000001, PPC_NONE, PPC2_ISA300), 7644 GEN_HANDLER(ldarx, 0x1F, 0x14, 0x02, 0x00000000, PPC_64B), 7645 GEN_HANDLER_E(lqarx, 0x1F, 0x14, 0x08, 0, PPC_NONE, PPC2_LSQ_ISA207), 7646 GEN_HANDLER2(stdcx_, "stdcx.", 0x1F, 0x16, 0x06, 0x00000000, PPC_64B), 7647 GEN_HANDLER_E(stqcx_, 0x1F, 0x16, 0x05, 0, PPC_NONE, PPC2_LSQ_ISA207), 7648 #endif 7649 GEN_HANDLER(sync, 0x1F, 0x16, 0x12, 0x039FF801, PPC_MEM_SYNC), 7650 GEN_HANDLER(wait, 0x1F, 0x1E, 0x01, 0x03FFF801, PPC_WAIT), 7651 GEN_HANDLER_E(wait, 0x1F, 0x1E, 0x00, 0x039FF801, PPC_NONE, PPC2_ISA300), 7652 GEN_HANDLER(b, 0x12, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7653 GEN_HANDLER(bc, 0x10, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7654 GEN_HANDLER(bcctr, 0x13, 0x10, 0x10, 0x00000000, PPC_FLOW), 7655 GEN_HANDLER(bclr, 0x13, 0x10, 0x00, 0x00000000, PPC_FLOW), 7656 GEN_HANDLER_E(bctar, 0x13, 0x10, 0x11, 0x0000E000, PPC_NONE, PPC2_BCTAR_ISA207), 7657 GEN_HANDLER(mcrf, 0x13, 0x00, 0xFF, 0x00000001, PPC_INTEGER), 7658 GEN_HANDLER(rfi, 0x13, 0x12, 0x01, 0x03FF8001, PPC_FLOW), 7659 #if defined(TARGET_PPC64) 7660 GEN_HANDLER(rfid, 0x13, 0x12, 0x00, 0x03FF8001, PPC_64B), 7661 #if !defined(CONFIG_USER_ONLY) 7662 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7663 GEN_HANDLER_E(scv, 0x11, 0x10, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7664 GEN_HANDLER_E(scv, 0x11, 0x00, 0xFF, 0x03FFF01E, PPC_NONE, PPC2_ISA300), 7665 GEN_HANDLER_E(rfscv, 0x13, 0x12, 0x02, 0x03FF8001, PPC_NONE, PPC2_ISA300), 7666 #endif 7667 GEN_HANDLER_E(stop, 0x13, 0x12, 0x0b, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7668 GEN_HANDLER_E(doze, 0x13, 0x12, 0x0c, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7669 GEN_HANDLER_E(nap, 0x13, 0x12, 0x0d, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7670 GEN_HANDLER_E(sleep, 0x13, 0x12, 0x0e, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7671 GEN_HANDLER_E(rvwinkle, 0x13, 0x12, 0x0f, 0x03FFF801, PPC_NONE, PPC2_PM_ISA206), 7672 GEN_HANDLER(hrfid, 0x13, 0x12, 0x08, 0x03FF8001, PPC_64H), 7673 #endif 7674 /* Top bit of opc2 corresponds with low bit of LEV, so use two handlers */ 7675 GEN_HANDLER(sc, 0x11, 0x11, 0xFF, 0x03FFF01D, PPC_FLOW), 7676 GEN_HANDLER(sc, 0x11, 0x01, 0xFF, 0x03FFF01D, PPC_FLOW), 7677 GEN_HANDLER(tw, 0x1F, 0x04, 0x00, 0x00000001, PPC_FLOW), 7678 GEN_HANDLER(twi, 0x03, 0xFF, 0xFF, 0x00000000, PPC_FLOW), 7679 #if defined(TARGET_PPC64) 7680 GEN_HANDLER(td, 0x1F, 0x04, 0x02, 0x00000001, PPC_64B), 7681 GEN_HANDLER(tdi, 0x02, 0xFF, 0xFF, 0x00000000, PPC_64B), 7682 #endif 7683 GEN_HANDLER(mcrxr, 0x1F, 0x00, 0x10, 0x007FF801, PPC_MISC), 7684 GEN_HANDLER(mfcr, 0x1F, 0x13, 0x00, 0x00000801, PPC_MISC), 7685 GEN_HANDLER(mfmsr, 0x1F, 0x13, 0x02, 0x001FF801, PPC_MISC), 7686 GEN_HANDLER(mfspr, 0x1F, 0x13, 0x0A, 0x00000001, PPC_MISC), 7687 GEN_HANDLER(mftb, 0x1F, 0x13, 0x0B, 0x00000001, PPC_MFTB), 7688 GEN_HANDLER(mtcrf, 0x1F, 0x10, 0x04, 0x00000801, PPC_MISC), 7689 #if defined(TARGET_PPC64) 7690 GEN_HANDLER(mtmsrd, 0x1F, 0x12, 0x05, 0x001EF801, PPC_64B), 7691 GEN_HANDLER_E(setb, 0x1F, 0x00, 0x04, 0x0003F801, PPC_NONE, PPC2_ISA300), 7692 GEN_HANDLER_E(mcrxrx, 0x1F, 0x00, 0x12, 0x007FF801, PPC_NONE, PPC2_ISA300), 7693 #endif 7694 GEN_HANDLER(mtmsr, 0x1F, 0x12, 0x04, 0x001EF801, PPC_MISC), 7695 GEN_HANDLER(mtspr, 0x1F, 0x13, 0x0E, 0x00000000, PPC_MISC), 7696 GEN_HANDLER(dcbf, 0x1F, 0x16, 0x02, 0x03C00001, PPC_CACHE), 7697 GEN_HANDLER_E(dcbfep, 0x1F, 0x1F, 0x03, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7698 GEN_HANDLER(dcbi, 0x1F, 0x16, 0x0E, 0x03E00001, PPC_CACHE), 7699 GEN_HANDLER(dcbst, 0x1F, 0x16, 0x01, 0x03E00001, PPC_CACHE), 7700 GEN_HANDLER_E(dcbstep, 0x1F, 0x1F, 0x01, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7701 GEN_HANDLER(dcbt, 0x1F, 0x16, 0x08, 0x00000001, PPC_CACHE), 7702 GEN_HANDLER_E(dcbtep, 0x1F, 0x1F, 0x09, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7703 GEN_HANDLER(dcbtst, 0x1F, 0x16, 0x07, 0x00000001, PPC_CACHE), 7704 GEN_HANDLER_E(dcbtstep, 0x1F, 0x1F, 0x07, 0x00000001, PPC_NONE, PPC2_BOOKE206), 7705 GEN_HANDLER_E(dcbtls, 0x1F, 0x06, 0x05, 0x02000001, PPC_BOOKE, PPC2_BOOKE206), 7706 GEN_HANDLER(dcbz, 0x1F, 0x16, 0x1F, 0x03C00001, PPC_CACHE_DCBZ), 7707 GEN_HANDLER_E(dcbzep, 0x1F, 0x1F, 0x1F, 0x03C00001, PPC_NONE, PPC2_BOOKE206), 7708 GEN_HANDLER(dst, 0x1F, 0x16, 0x0A, 0x01800001, PPC_ALTIVEC), 7709 GEN_HANDLER(dstst, 0x1F, 0x16, 0x0B, 0x01800001, PPC_ALTIVEC), 7710 GEN_HANDLER(dss, 0x1F, 0x16, 0x19, 0x019FF801, PPC_ALTIVEC), 7711 GEN_HANDLER(icbi, 0x1F, 0x16, 0x1E, 0x03E00001, PPC_CACHE_ICBI), 7712 GEN_HANDLER_E(icbiep, 0x1F, 0x1F, 0x1E, 0x03E00001, PPC_NONE, PPC2_BOOKE206), 7713 GEN_HANDLER(dcba, 0x1F, 0x16, 0x17, 0x03E00001, PPC_CACHE_DCBA), 7714 GEN_HANDLER(mfsr, 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT), 7715 GEN_HANDLER(mfsrin, 0x1F, 0x13, 0x14, 0x001F0001, PPC_SEGMENT), 7716 GEN_HANDLER(mtsr, 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT), 7717 GEN_HANDLER(mtsrin, 0x1F, 0x12, 0x07, 0x001F0001, PPC_SEGMENT), 7718 #if defined(TARGET_PPC64) 7719 GEN_HANDLER2(mfsr_64b, "mfsr", 0x1F, 0x13, 0x12, 0x0010F801, PPC_SEGMENT_64B), 7720 GEN_HANDLER2(mfsrin_64b, "mfsrin", 0x1F, 0x13, 0x14, 0x001F0001, 7721 PPC_SEGMENT_64B), 7722 GEN_HANDLER2(mtsr_64b, "mtsr", 0x1F, 0x12, 0x06, 0x0010F801, PPC_SEGMENT_64B), 7723 GEN_HANDLER2(mtsrin_64b, "mtsrin", 0x1F, 0x12, 0x07, 0x001F0001, 7724 PPC_SEGMENT_64B), 7725 GEN_HANDLER2(slbmte, "slbmte", 0x1F, 0x12, 0x0C, 0x001F0001, PPC_SEGMENT_64B), 7726 GEN_HANDLER2(slbmfee, "slbmfee", 0x1F, 0x13, 0x1C, 0x001F0001, PPC_SEGMENT_64B), 7727 GEN_HANDLER2(slbmfev, "slbmfev", 0x1F, 0x13, 0x1A, 0x001F0001, PPC_SEGMENT_64B), 7728 GEN_HANDLER2(slbfee_, "slbfee.", 0x1F, 0x13, 0x1E, 0x001F0000, PPC_SEGMENT_64B), 7729 #endif 7730 GEN_HANDLER(tlbia, 0x1F, 0x12, 0x0B, 0x03FFFC01, PPC_MEM_TLBIA), 7731 /* 7732 * XXX Those instructions will need to be handled differently for 7733 * different ISA versions 7734 */ 7735 GEN_HANDLER(tlbiel, 0x1F, 0x12, 0x08, 0x001F0001, PPC_MEM_TLBIE), 7736 GEN_HANDLER(tlbie, 0x1F, 0x12, 0x09, 0x001F0001, PPC_MEM_TLBIE), 7737 GEN_HANDLER_E(tlbiel, 0x1F, 0x12, 0x08, 0x00100001, PPC_NONE, PPC2_ISA300), 7738 GEN_HANDLER_E(tlbie, 0x1F, 0x12, 0x09, 0x00100001, PPC_NONE, PPC2_ISA300), 7739 GEN_HANDLER(tlbsync, 0x1F, 0x16, 0x11, 0x03FFF801, PPC_MEM_TLBSYNC), 7740 #if defined(TARGET_PPC64) 7741 GEN_HANDLER(slbia, 0x1F, 0x12, 0x0F, 0x031FFC01, PPC_SLBI), 7742 GEN_HANDLER(slbie, 0x1F, 0x12, 0x0D, 0x03FF0001, PPC_SLBI), 7743 GEN_HANDLER_E(slbieg, 0x1F, 0x12, 0x0E, 0x001F0001, PPC_NONE, PPC2_ISA300), 7744 GEN_HANDLER_E(slbsync, 0x1F, 0x12, 0x0A, 0x03FFF801, PPC_NONE, PPC2_ISA300), 7745 #endif 7746 GEN_HANDLER(eciwx, 0x1F, 0x16, 0x0D, 0x00000001, PPC_EXTERN), 7747 GEN_HANDLER(ecowx, 0x1F, 0x16, 0x09, 0x00000001, PPC_EXTERN), 7748 GEN_HANDLER(abs, 0x1F, 0x08, 0x0B, 0x0000F800, PPC_POWER_BR), 7749 GEN_HANDLER(abso, 0x1F, 0x08, 0x1B, 0x0000F800, PPC_POWER_BR), 7750 GEN_HANDLER(clcs, 0x1F, 0x10, 0x13, 0x0000F800, PPC_POWER_BR), 7751 GEN_HANDLER(div, 0x1F, 0x0B, 0x0A, 0x00000000, PPC_POWER_BR), 7752 GEN_HANDLER(divo, 0x1F, 0x0B, 0x1A, 0x00000000, PPC_POWER_BR), 7753 GEN_HANDLER(divs, 0x1F, 0x0B, 0x0B, 0x00000000, PPC_POWER_BR), 7754 GEN_HANDLER(divso, 0x1F, 0x0B, 0x1B, 0x00000000, PPC_POWER_BR), 7755 GEN_HANDLER(doz, 0x1F, 0x08, 0x08, 0x00000000, PPC_POWER_BR), 7756 GEN_HANDLER(dozo, 0x1F, 0x08, 0x18, 0x00000000, PPC_POWER_BR), 7757 GEN_HANDLER(dozi, 0x09, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7758 GEN_HANDLER(lscbx, 0x1F, 0x15, 0x08, 0x00000000, PPC_POWER_BR), 7759 GEN_HANDLER(maskg, 0x1F, 0x1D, 0x00, 0x00000000, PPC_POWER_BR), 7760 GEN_HANDLER(maskir, 0x1F, 0x1D, 0x10, 0x00000000, PPC_POWER_BR), 7761 GEN_HANDLER(mul, 0x1F, 0x0B, 0x03, 0x00000000, PPC_POWER_BR), 7762 GEN_HANDLER(mulo, 0x1F, 0x0B, 0x13, 0x00000000, PPC_POWER_BR), 7763 GEN_HANDLER(nabs, 0x1F, 0x08, 0x0F, 0x00000000, PPC_POWER_BR), 7764 GEN_HANDLER(nabso, 0x1F, 0x08, 0x1F, 0x00000000, PPC_POWER_BR), 7765 GEN_HANDLER(rlmi, 0x16, 0xFF, 0xFF, 0x00000000, PPC_POWER_BR), 7766 GEN_HANDLER(rrib, 0x1F, 0x19, 0x10, 0x00000000, PPC_POWER_BR), 7767 GEN_HANDLER(sle, 0x1F, 0x19, 0x04, 0x00000000, PPC_POWER_BR), 7768 GEN_HANDLER(sleq, 0x1F, 0x19, 0x06, 0x00000000, PPC_POWER_BR), 7769 GEN_HANDLER(sliq, 0x1F, 0x18, 0x05, 0x00000000, PPC_POWER_BR), 7770 GEN_HANDLER(slliq, 0x1F, 0x18, 0x07, 0x00000000, PPC_POWER_BR), 7771 GEN_HANDLER(sllq, 0x1F, 0x18, 0x06, 0x00000000, PPC_POWER_BR), 7772 GEN_HANDLER(slq, 0x1F, 0x18, 0x04, 0x00000000, PPC_POWER_BR), 7773 GEN_HANDLER(sraiq, 0x1F, 0x18, 0x1D, 0x00000000, PPC_POWER_BR), 7774 GEN_HANDLER(sraq, 0x1F, 0x18, 0x1C, 0x00000000, PPC_POWER_BR), 7775 GEN_HANDLER(sre, 0x1F, 0x19, 0x14, 0x00000000, PPC_POWER_BR), 7776 GEN_HANDLER(srea, 0x1F, 0x19, 0x1C, 0x00000000, PPC_POWER_BR), 7777 GEN_HANDLER(sreq, 0x1F, 0x19, 0x16, 0x00000000, PPC_POWER_BR), 7778 GEN_HANDLER(sriq, 0x1F, 0x18, 0x15, 0x00000000, PPC_POWER_BR), 7779 GEN_HANDLER(srliq, 0x1F, 0x18, 0x17, 0x00000000, PPC_POWER_BR), 7780 GEN_HANDLER(srlq, 0x1F, 0x18, 0x16, 0x00000000, PPC_POWER_BR), 7781 GEN_HANDLER(srq, 0x1F, 0x18, 0x14, 0x00000000, PPC_POWER_BR), 7782 GEN_HANDLER(dsa, 0x1F, 0x14, 0x13, 0x03FFF801, PPC_602_SPEC), 7783 GEN_HANDLER(esa, 0x1F, 0x14, 0x12, 0x03FFF801, PPC_602_SPEC), 7784 GEN_HANDLER(mfrom, 0x1F, 0x09, 0x08, 0x03E0F801, PPC_602_SPEC), 7785 GEN_HANDLER2(tlbld_6xx, "tlbld", 0x1F, 0x12, 0x1E, 0x03FF0001, PPC_6xx_TLB), 7786 GEN_HANDLER2(tlbli_6xx, "tlbli", 0x1F, 0x12, 0x1F, 0x03FF0001, PPC_6xx_TLB), 7787 GEN_HANDLER(clf, 0x1F, 0x16, 0x03, 0x03E00000, PPC_POWER), 7788 GEN_HANDLER(cli, 0x1F, 0x16, 0x0F, 0x03E00000, PPC_POWER), 7789 GEN_HANDLER(dclst, 0x1F, 0x16, 0x13, 0x03E00000, PPC_POWER), 7790 GEN_HANDLER(mfsri, 0x1F, 0x13, 0x13, 0x00000001, PPC_POWER), 7791 GEN_HANDLER(rac, 0x1F, 0x12, 0x19, 0x00000001, PPC_POWER), 7792 GEN_HANDLER(rfsvc, 0x13, 0x12, 0x02, 0x03FFF0001, PPC_POWER), 7793 GEN_HANDLER(lfq, 0x38, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7794 GEN_HANDLER(lfqu, 0x39, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7795 GEN_HANDLER(lfqux, 0x1F, 0x17, 0x19, 0x00000001, PPC_POWER2), 7796 GEN_HANDLER(lfqx, 0x1F, 0x17, 0x18, 0x00000001, PPC_POWER2), 7797 GEN_HANDLER(stfq, 0x3C, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7798 GEN_HANDLER(stfqu, 0x3D, 0xFF, 0xFF, 0x00000003, PPC_POWER2), 7799 GEN_HANDLER(stfqux, 0x1F, 0x17, 0x1D, 0x00000001, PPC_POWER2), 7800 GEN_HANDLER(stfqx, 0x1F, 0x17, 0x1C, 0x00000001, PPC_POWER2), 7801 GEN_HANDLER(mfapidi, 0x1F, 0x13, 0x08, 0x0000F801, PPC_MFAPIDI), 7802 GEN_HANDLER(tlbiva, 0x1F, 0x12, 0x18, 0x03FFF801, PPC_TLBIVA), 7803 GEN_HANDLER(mfdcr, 0x1F, 0x03, 0x0A, 0x00000001, PPC_DCR), 7804 GEN_HANDLER(mtdcr, 0x1F, 0x03, 0x0E, 0x00000001, PPC_DCR), 7805 GEN_HANDLER(mfdcrx, 0x1F, 0x03, 0x08, 0x00000000, PPC_DCRX), 7806 GEN_HANDLER(mtdcrx, 0x1F, 0x03, 0x0C, 0x00000000, PPC_DCRX), 7807 GEN_HANDLER(mfdcrux, 0x1F, 0x03, 0x09, 0x00000000, PPC_DCRUX), 7808 GEN_HANDLER(mtdcrux, 0x1F, 0x03, 0x0D, 0x00000000, PPC_DCRUX), 7809 GEN_HANDLER(dccci, 0x1F, 0x06, 0x0E, 0x03E00001, PPC_4xx_COMMON), 7810 GEN_HANDLER(dcread, 0x1F, 0x06, 0x0F, 0x00000001, PPC_4xx_COMMON), 7811 GEN_HANDLER2(icbt_40x, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, PPC_40x_ICBT), 7812 GEN_HANDLER(iccci, 0x1F, 0x06, 0x1E, 0x00000001, PPC_4xx_COMMON), 7813 GEN_HANDLER(icread, 0x1F, 0x06, 0x1F, 0x03E00001, PPC_4xx_COMMON), 7814 GEN_HANDLER2(rfci_40x, "rfci", 0x13, 0x13, 0x01, 0x03FF8001, PPC_40x_EXCP), 7815 GEN_HANDLER_E(rfci, 0x13, 0x13, 0x01, 0x03FF8001, PPC_BOOKE, PPC2_BOOKE206), 7816 GEN_HANDLER(rfdi, 0x13, 0x07, 0x01, 0x03FF8001, PPC_RFDI), 7817 GEN_HANDLER(rfmci, 0x13, 0x06, 0x01, 0x03FF8001, PPC_RFMCI), 7818 GEN_HANDLER2(tlbre_40x, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_40x_TLB), 7819 GEN_HANDLER2(tlbsx_40x, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_40x_TLB), 7820 GEN_HANDLER2(tlbwe_40x, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_40x_TLB), 7821 GEN_HANDLER2(tlbre_440, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, PPC_BOOKE), 7822 GEN_HANDLER2(tlbsx_440, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, PPC_BOOKE), 7823 GEN_HANDLER2(tlbwe_440, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, PPC_BOOKE), 7824 GEN_HANDLER2_E(tlbre_booke206, "tlbre", 0x1F, 0x12, 0x1D, 0x00000001, 7825 PPC_NONE, PPC2_BOOKE206), 7826 GEN_HANDLER2_E(tlbsx_booke206, "tlbsx", 0x1F, 0x12, 0x1C, 0x00000000, 7827 PPC_NONE, PPC2_BOOKE206), 7828 GEN_HANDLER2_E(tlbwe_booke206, "tlbwe", 0x1F, 0x12, 0x1E, 0x00000001, 7829 PPC_NONE, PPC2_BOOKE206), 7830 GEN_HANDLER2_E(tlbivax_booke206, "tlbivax", 0x1F, 0x12, 0x18, 0x00000001, 7831 PPC_NONE, PPC2_BOOKE206), 7832 GEN_HANDLER2_E(tlbilx_booke206, "tlbilx", 0x1F, 0x12, 0x00, 0x03800001, 7833 PPC_NONE, PPC2_BOOKE206), 7834 GEN_HANDLER2_E(msgsnd, "msgsnd", 0x1F, 0x0E, 0x06, 0x03ff0001, 7835 PPC_NONE, PPC2_PRCNTL), 7836 GEN_HANDLER2_E(msgclr, "msgclr", 0x1F, 0x0E, 0x07, 0x03ff0001, 7837 PPC_NONE, PPC2_PRCNTL), 7838 GEN_HANDLER2_E(msgsync, "msgsync", 0x1F, 0x16, 0x1B, 0x00000000, 7839 PPC_NONE, PPC2_PRCNTL), 7840 GEN_HANDLER(wrtee, 0x1F, 0x03, 0x04, 0x000FFC01, PPC_WRTEE), 7841 GEN_HANDLER(wrteei, 0x1F, 0x03, 0x05, 0x000E7C01, PPC_WRTEE), 7842 GEN_HANDLER(dlmzb, 0x1F, 0x0E, 0x02, 0x00000000, PPC_440_SPEC), 7843 GEN_HANDLER_E(mbar, 0x1F, 0x16, 0x1a, 0x001FF801, 7844 PPC_BOOKE, PPC2_BOOKE206), 7845 GEN_HANDLER(msync_4xx, 0x1F, 0x16, 0x12, 0x039FF801, PPC_BOOKE), 7846 GEN_HANDLER2_E(icbt_440, "icbt", 0x1F, 0x16, 0x00, 0x03E00001, 7847 PPC_BOOKE, PPC2_BOOKE206), 7848 GEN_HANDLER2(icbt_440, "icbt", 0x1F, 0x06, 0x08, 0x03E00001, 7849 PPC_440_SPEC), 7850 GEN_HANDLER(lvsl, 0x1f, 0x06, 0x00, 0x00000001, PPC_ALTIVEC), 7851 GEN_HANDLER(lvsr, 0x1f, 0x06, 0x01, 0x00000001, PPC_ALTIVEC), 7852 GEN_HANDLER(mfvscr, 0x04, 0x2, 0x18, 0x001ff800, PPC_ALTIVEC), 7853 GEN_HANDLER(mtvscr, 0x04, 0x2, 0x19, 0x03ff0000, PPC_ALTIVEC), 7854 GEN_HANDLER(vmladduhm, 0x04, 0x11, 0xFF, 0x00000000, PPC_ALTIVEC), 7855 #if defined(TARGET_PPC64) 7856 GEN_HANDLER_E(maddhd_maddhdu, 0x04, 0x18, 0xFF, 0x00000000, PPC_NONE, 7857 PPC2_ISA300), 7858 GEN_HANDLER_E(maddld, 0x04, 0x19, 0xFF, 0x00000000, PPC_NONE, PPC2_ISA300), 7859 GEN_HANDLER2_E(msgsndp, "msgsndp", 0x1F, 0x0E, 0x04, 0x03ff0001, 7860 PPC_NONE, PPC2_ISA207S), 7861 GEN_HANDLER2_E(msgclrp, "msgclrp", 0x1F, 0x0E, 0x05, 0x03ff0001, 7862 PPC_NONE, PPC2_ISA207S), 7863 #endif 7864 7865 #undef GEN_INT_ARITH_ADD 7866 #undef GEN_INT_ARITH_ADD_CONST 7867 #define GEN_INT_ARITH_ADD(name, opc3, add_ca, compute_ca, compute_ov) \ 7868 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x00000000, PPC_INTEGER), 7869 #define GEN_INT_ARITH_ADD_CONST(name, opc3, const_val, \ 7870 add_ca, compute_ca, compute_ov) \ 7871 GEN_HANDLER(name, 0x1F, 0x0A, opc3, 0x0000F800, PPC_INTEGER), 7872 GEN_INT_ARITH_ADD(add, 0x08, 0, 0, 0) 7873 GEN_INT_ARITH_ADD(addo, 0x18, 0, 0, 1) 7874 GEN_INT_ARITH_ADD(addc, 0x00, 0, 1, 0) 7875 GEN_INT_ARITH_ADD(addco, 0x10, 0, 1, 1) 7876 GEN_INT_ARITH_ADD(adde, 0x04, 1, 1, 0) 7877 GEN_INT_ARITH_ADD(addeo, 0x14, 1, 1, 1) 7878 GEN_INT_ARITH_ADD_CONST(addme, 0x07, -1LL, 1, 1, 0) 7879 GEN_INT_ARITH_ADD_CONST(addmeo, 0x17, -1LL, 1, 1, 1) 7880 GEN_HANDLER_E(addex, 0x1F, 0x0A, 0x05, 0x00000000, PPC_NONE, PPC2_ISA300), 7881 GEN_INT_ARITH_ADD_CONST(addze, 0x06, 0, 1, 1, 0) 7882 GEN_INT_ARITH_ADD_CONST(addzeo, 0x16, 0, 1, 1, 1) 7883 7884 #undef GEN_INT_ARITH_DIVW 7885 #define GEN_INT_ARITH_DIVW(name, opc3, sign, compute_ov) \ 7886 GEN_HANDLER(name, 0x1F, 0x0B, opc3, 0x00000000, PPC_INTEGER) 7887 GEN_INT_ARITH_DIVW(divwu, 0x0E, 0, 0), 7888 GEN_INT_ARITH_DIVW(divwuo, 0x1E, 0, 1), 7889 GEN_INT_ARITH_DIVW(divw, 0x0F, 1, 0), 7890 GEN_INT_ARITH_DIVW(divwo, 0x1F, 1, 1), 7891 GEN_HANDLER_E(divwe, 0x1F, 0x0B, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7892 GEN_HANDLER_E(divweo, 0x1F, 0x0B, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7893 GEN_HANDLER_E(divweu, 0x1F, 0x0B, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7894 GEN_HANDLER_E(divweuo, 0x1F, 0x0B, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7895 GEN_HANDLER_E(modsw, 0x1F, 0x0B, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7896 GEN_HANDLER_E(moduw, 0x1F, 0x0B, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7897 7898 #if defined(TARGET_PPC64) 7899 #undef GEN_INT_ARITH_DIVD 7900 #define GEN_INT_ARITH_DIVD(name, opc3, sign, compute_ov) \ 7901 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7902 GEN_INT_ARITH_DIVD(divdu, 0x0E, 0, 0), 7903 GEN_INT_ARITH_DIVD(divduo, 0x1E, 0, 1), 7904 GEN_INT_ARITH_DIVD(divd, 0x0F, 1, 0), 7905 GEN_INT_ARITH_DIVD(divdo, 0x1F, 1, 1), 7906 7907 GEN_HANDLER_E(divdeu, 0x1F, 0x09, 0x0C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7908 GEN_HANDLER_E(divdeuo, 0x1F, 0x09, 0x1C, 0, PPC_NONE, PPC2_DIVE_ISA206), 7909 GEN_HANDLER_E(divde, 0x1F, 0x09, 0x0D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7910 GEN_HANDLER_E(divdeo, 0x1F, 0x09, 0x1D, 0, PPC_NONE, PPC2_DIVE_ISA206), 7911 GEN_HANDLER_E(modsd, 0x1F, 0x09, 0x18, 0x00000001, PPC_NONE, PPC2_ISA300), 7912 GEN_HANDLER_E(modud, 0x1F, 0x09, 0x08, 0x00000001, PPC_NONE, PPC2_ISA300), 7913 7914 #undef GEN_INT_ARITH_MUL_HELPER 7915 #define GEN_INT_ARITH_MUL_HELPER(name, opc3) \ 7916 GEN_HANDLER(name, 0x1F, 0x09, opc3, 0x00000000, PPC_64B) 7917 GEN_INT_ARITH_MUL_HELPER(mulhdu, 0x00), 7918 GEN_INT_ARITH_MUL_HELPER(mulhd, 0x02), 7919 GEN_INT_ARITH_MUL_HELPER(mulldo, 0x17), 7920 #endif 7921 7922 #undef GEN_INT_ARITH_SUBF 7923 #undef GEN_INT_ARITH_SUBF_CONST 7924 #define GEN_INT_ARITH_SUBF(name, opc3, add_ca, compute_ca, compute_ov) \ 7925 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x00000000, PPC_INTEGER), 7926 #define GEN_INT_ARITH_SUBF_CONST(name, opc3, const_val, \ 7927 add_ca, compute_ca, compute_ov) \ 7928 GEN_HANDLER(name, 0x1F, 0x08, opc3, 0x0000F800, PPC_INTEGER), 7929 GEN_INT_ARITH_SUBF(subf, 0x01, 0, 0, 0) 7930 GEN_INT_ARITH_SUBF(subfo, 0x11, 0, 0, 1) 7931 GEN_INT_ARITH_SUBF(subfc, 0x00, 0, 1, 0) 7932 GEN_INT_ARITH_SUBF(subfco, 0x10, 0, 1, 1) 7933 GEN_INT_ARITH_SUBF(subfe, 0x04, 1, 1, 0) 7934 GEN_INT_ARITH_SUBF(subfeo, 0x14, 1, 1, 1) 7935 GEN_INT_ARITH_SUBF_CONST(subfme, 0x07, -1LL, 1, 1, 0) 7936 GEN_INT_ARITH_SUBF_CONST(subfmeo, 0x17, -1LL, 1, 1, 1) 7937 GEN_INT_ARITH_SUBF_CONST(subfze, 0x06, 0, 1, 1, 0) 7938 GEN_INT_ARITH_SUBF_CONST(subfzeo, 0x16, 0, 1, 1, 1) 7939 7940 #undef GEN_LOGICAL1 7941 #undef GEN_LOGICAL2 7942 #define GEN_LOGICAL2(name, tcg_op, opc, type) \ 7943 GEN_HANDLER(name, 0x1F, 0x1C, opc, 0x00000000, type) 7944 #define GEN_LOGICAL1(name, tcg_op, opc, type) \ 7945 GEN_HANDLER(name, 0x1F, 0x1A, opc, 0x00000000, type) 7946 GEN_LOGICAL2(and, tcg_gen_and_tl, 0x00, PPC_INTEGER), 7947 GEN_LOGICAL2(andc, tcg_gen_andc_tl, 0x01, PPC_INTEGER), 7948 GEN_LOGICAL2(eqv, tcg_gen_eqv_tl, 0x08, PPC_INTEGER), 7949 GEN_LOGICAL1(extsb, tcg_gen_ext8s_tl, 0x1D, PPC_INTEGER), 7950 GEN_LOGICAL1(extsh, tcg_gen_ext16s_tl, 0x1C, PPC_INTEGER), 7951 GEN_LOGICAL2(nand, tcg_gen_nand_tl, 0x0E, PPC_INTEGER), 7952 GEN_LOGICAL2(nor, tcg_gen_nor_tl, 0x03, PPC_INTEGER), 7953 GEN_LOGICAL2(orc, tcg_gen_orc_tl, 0x0C, PPC_INTEGER), 7954 #if defined(TARGET_PPC64) 7955 GEN_LOGICAL1(extsw, tcg_gen_ext32s_tl, 0x1E, PPC_64B), 7956 #endif 7957 7958 #if defined(TARGET_PPC64) 7959 #undef GEN_PPC64_R2 7960 #undef GEN_PPC64_R4 7961 #define GEN_PPC64_R2(name, opc1, opc2) \ 7962 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7963 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7964 PPC_64B) 7965 #define GEN_PPC64_R4(name, opc1, opc2) \ 7966 GEN_HANDLER2(name##0, stringify(name), opc1, opc2, 0xFF, 0x00000000, PPC_64B),\ 7967 GEN_HANDLER2(name##1, stringify(name), opc1, opc2 | 0x01, 0xFF, 0x00000000, \ 7968 PPC_64B), \ 7969 GEN_HANDLER2(name##2, stringify(name), opc1, opc2 | 0x10, 0xFF, 0x00000000, \ 7970 PPC_64B), \ 7971 GEN_HANDLER2(name##3, stringify(name), opc1, opc2 | 0x11, 0xFF, 0x00000000, \ 7972 PPC_64B) 7973 GEN_PPC64_R4(rldicl, 0x1E, 0x00), 7974 GEN_PPC64_R4(rldicr, 0x1E, 0x02), 7975 GEN_PPC64_R4(rldic, 0x1E, 0x04), 7976 GEN_PPC64_R2(rldcl, 0x1E, 0x08), 7977 GEN_PPC64_R2(rldcr, 0x1E, 0x09), 7978 GEN_PPC64_R4(rldimi, 0x1E, 0x06), 7979 #endif 7980 7981 #undef GEN_LDX_E 7982 #define GEN_LDX_E(name, ldop, opc2, opc3, type, type2, chk) \ 7983 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000001, type, type2), 7984 7985 #if defined(TARGET_PPC64) 7986 GEN_LDX_E(ldbr, ld64ur_i64, 0x14, 0x10, PPC_NONE, PPC2_DBRX, CHK_NONE) 7987 7988 /* HV/P7 and later only */ 7989 GEN_LDX_HVRM(ldcix, ld64_i64, 0x15, 0x1b, PPC_CILDST) 7990 GEN_LDX_HVRM(lwzcix, ld32u, 0x15, 0x18, PPC_CILDST) 7991 GEN_LDX_HVRM(lhzcix, ld16u, 0x15, 0x19, PPC_CILDST) 7992 GEN_LDX_HVRM(lbzcix, ld8u, 0x15, 0x1a, PPC_CILDST) 7993 #endif 7994 GEN_LDX(lhbr, ld16ur, 0x16, 0x18, PPC_INTEGER) 7995 GEN_LDX(lwbr, ld32ur, 0x16, 0x10, PPC_INTEGER) 7996 7997 /* External PID based load */ 7998 #undef GEN_LDEPX 7999 #define GEN_LDEPX(name, ldop, opc2, opc3) \ 8000 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8001 0x00000001, PPC_NONE, PPC2_BOOKE206), 8002 8003 GEN_LDEPX(lb, DEF_MEMOP(MO_UB), 0x1F, 0x02) 8004 GEN_LDEPX(lh, DEF_MEMOP(MO_UW), 0x1F, 0x08) 8005 GEN_LDEPX(lw, DEF_MEMOP(MO_UL), 0x1F, 0x00) 8006 #if defined(TARGET_PPC64) 8007 GEN_LDEPX(ld, DEF_MEMOP(MO_UQ), 0x1D, 0x00) 8008 #endif 8009 8010 #undef GEN_STX_E 8011 #define GEN_STX_E(name, stop, opc2, opc3, type, type2, chk) \ 8012 GEN_HANDLER_E(name##x, 0x1F, opc2, opc3, 0x00000000, type, type2), 8013 8014 #if defined(TARGET_PPC64) 8015 GEN_STX_E(stdbr, st64r_i64, 0x14, 0x14, PPC_NONE, PPC2_DBRX, CHK_NONE) 8016 GEN_STX_HVRM(stdcix, st64_i64, 0x15, 0x1f, PPC_CILDST) 8017 GEN_STX_HVRM(stwcix, st32, 0x15, 0x1c, PPC_CILDST) 8018 GEN_STX_HVRM(sthcix, st16, 0x15, 0x1d, PPC_CILDST) 8019 GEN_STX_HVRM(stbcix, st8, 0x15, 0x1e, PPC_CILDST) 8020 #endif 8021 GEN_STX(sthbr, st16r, 0x16, 0x1C, PPC_INTEGER) 8022 GEN_STX(stwbr, st32r, 0x16, 0x14, PPC_INTEGER) 8023 8024 #undef GEN_STEPX 8025 #define GEN_STEPX(name, ldop, opc2, opc3) \ 8026 GEN_HANDLER_E(name##epx, 0x1F, opc2, opc3, \ 8027 0x00000001, PPC_NONE, PPC2_BOOKE206), 8028 8029 GEN_STEPX(stb, DEF_MEMOP(MO_UB), 0x1F, 0x06) 8030 GEN_STEPX(sth, DEF_MEMOP(MO_UW), 0x1F, 0x0C) 8031 GEN_STEPX(stw, DEF_MEMOP(MO_UL), 0x1F, 0x04) 8032 #if defined(TARGET_PPC64) 8033 GEN_STEPX(std, DEF_MEMOP(MO_UQ), 0x1D, 0x04) 8034 #endif 8035 8036 #undef GEN_CRLOGIC 8037 #define GEN_CRLOGIC(name, tcg_op, opc) \ 8038 GEN_HANDLER(name, 0x13, 0x01, opc, 0x00000001, PPC_INTEGER) 8039 GEN_CRLOGIC(crand, tcg_gen_and_i32, 0x08), 8040 GEN_CRLOGIC(crandc, tcg_gen_andc_i32, 0x04), 8041 GEN_CRLOGIC(creqv, tcg_gen_eqv_i32, 0x09), 8042 GEN_CRLOGIC(crnand, tcg_gen_nand_i32, 0x07), 8043 GEN_CRLOGIC(crnor, tcg_gen_nor_i32, 0x01), 8044 GEN_CRLOGIC(cror, tcg_gen_or_i32, 0x0E), 8045 GEN_CRLOGIC(crorc, tcg_gen_orc_i32, 0x0D), 8046 GEN_CRLOGIC(crxor, tcg_gen_xor_i32, 0x06), 8047 8048 #undef GEN_MAC_HANDLER 8049 #define GEN_MAC_HANDLER(name, opc2, opc3) \ 8050 GEN_HANDLER(name, 0x04, opc2, opc3, 0x00000000, PPC_405_MAC) 8051 GEN_MAC_HANDLER(macchw, 0x0C, 0x05), 8052 GEN_MAC_HANDLER(macchwo, 0x0C, 0x15), 8053 GEN_MAC_HANDLER(macchws, 0x0C, 0x07), 8054 GEN_MAC_HANDLER(macchwso, 0x0C, 0x17), 8055 GEN_MAC_HANDLER(macchwsu, 0x0C, 0x06), 8056 GEN_MAC_HANDLER(macchwsuo, 0x0C, 0x16), 8057 GEN_MAC_HANDLER(macchwu, 0x0C, 0x04), 8058 GEN_MAC_HANDLER(macchwuo, 0x0C, 0x14), 8059 GEN_MAC_HANDLER(machhw, 0x0C, 0x01), 8060 GEN_MAC_HANDLER(machhwo, 0x0C, 0x11), 8061 GEN_MAC_HANDLER(machhws, 0x0C, 0x03), 8062 GEN_MAC_HANDLER(machhwso, 0x0C, 0x13), 8063 GEN_MAC_HANDLER(machhwsu, 0x0C, 0x02), 8064 GEN_MAC_HANDLER(machhwsuo, 0x0C, 0x12), 8065 GEN_MAC_HANDLER(machhwu, 0x0C, 0x00), 8066 GEN_MAC_HANDLER(machhwuo, 0x0C, 0x10), 8067 GEN_MAC_HANDLER(maclhw, 0x0C, 0x0D), 8068 GEN_MAC_HANDLER(maclhwo, 0x0C, 0x1D), 8069 GEN_MAC_HANDLER(maclhws, 0x0C, 0x0F), 8070 GEN_MAC_HANDLER(maclhwso, 0x0C, 0x1F), 8071 GEN_MAC_HANDLER(maclhwu, 0x0C, 0x0C), 8072 GEN_MAC_HANDLER(maclhwuo, 0x0C, 0x1C), 8073 GEN_MAC_HANDLER(maclhwsu, 0x0C, 0x0E), 8074 GEN_MAC_HANDLER(maclhwsuo, 0x0C, 0x1E), 8075 GEN_MAC_HANDLER(nmacchw, 0x0E, 0x05), 8076 GEN_MAC_HANDLER(nmacchwo, 0x0E, 0x15), 8077 GEN_MAC_HANDLER(nmacchws, 0x0E, 0x07), 8078 GEN_MAC_HANDLER(nmacchwso, 0x0E, 0x17), 8079 GEN_MAC_HANDLER(nmachhw, 0x0E, 0x01), 8080 GEN_MAC_HANDLER(nmachhwo, 0x0E, 0x11), 8081 GEN_MAC_HANDLER(nmachhws, 0x0E, 0x03), 8082 GEN_MAC_HANDLER(nmachhwso, 0x0E, 0x13), 8083 GEN_MAC_HANDLER(nmaclhw, 0x0E, 0x0D), 8084 GEN_MAC_HANDLER(nmaclhwo, 0x0E, 0x1D), 8085 GEN_MAC_HANDLER(nmaclhws, 0x0E, 0x0F), 8086 GEN_MAC_HANDLER(nmaclhwso, 0x0E, 0x1F), 8087 GEN_MAC_HANDLER(mulchw, 0x08, 0x05), 8088 GEN_MAC_HANDLER(mulchwu, 0x08, 0x04), 8089 GEN_MAC_HANDLER(mulhhw, 0x08, 0x01), 8090 GEN_MAC_HANDLER(mulhhwu, 0x08, 0x00), 8091 GEN_MAC_HANDLER(mullhw, 0x08, 0x0D), 8092 GEN_MAC_HANDLER(mullhwu, 0x08, 0x0C), 8093 8094 GEN_HANDLER2_E(tbegin, "tbegin", 0x1F, 0x0E, 0x14, 0x01DFF800, \ 8095 PPC_NONE, PPC2_TM), 8096 GEN_HANDLER2_E(tend, "tend", 0x1F, 0x0E, 0x15, 0x01FFF800, \ 8097 PPC_NONE, PPC2_TM), 8098 GEN_HANDLER2_E(tabort, "tabort", 0x1F, 0x0E, 0x1C, 0x03E0F800, \ 8099 PPC_NONE, PPC2_TM), 8100 GEN_HANDLER2_E(tabortwc, "tabortwc", 0x1F, 0x0E, 0x18, 0x00000000, \ 8101 PPC_NONE, PPC2_TM), 8102 GEN_HANDLER2_E(tabortwci, "tabortwci", 0x1F, 0x0E, 0x1A, 0x00000000, \ 8103 PPC_NONE, PPC2_TM), 8104 GEN_HANDLER2_E(tabortdc, "tabortdc", 0x1F, 0x0E, 0x19, 0x00000000, \ 8105 PPC_NONE, PPC2_TM), 8106 GEN_HANDLER2_E(tabortdci, "tabortdci", 0x1F, 0x0E, 0x1B, 0x00000000, \ 8107 PPC_NONE, PPC2_TM), 8108 GEN_HANDLER2_E(tsr, "tsr", 0x1F, 0x0E, 0x17, 0x03DFF800, \ 8109 PPC_NONE, PPC2_TM), 8110 GEN_HANDLER2_E(tcheck, "tcheck", 0x1F, 0x0E, 0x16, 0x007FF800, \ 8111 PPC_NONE, PPC2_TM), 8112 GEN_HANDLER2_E(treclaim, "treclaim", 0x1F, 0x0E, 0x1D, 0x03E0F800, \ 8113 PPC_NONE, PPC2_TM), 8114 GEN_HANDLER2_E(trechkpt, "trechkpt", 0x1F, 0x0E, 0x1F, 0x03FFF800, \ 8115 PPC_NONE, PPC2_TM), 8116 8117 #include "translate/fp-ops.c.inc" 8118 8119 #include "translate/vmx-ops.c.inc" 8120 8121 #include "translate/vsx-ops.c.inc" 8122 8123 #include "translate/spe-ops.c.inc" 8124 }; 8125 8126 /*****************************************************************************/ 8127 /* Opcode types */ 8128 enum { 8129 PPC_DIRECT = 0, /* Opcode routine */ 8130 PPC_INDIRECT = 1, /* Indirect opcode table */ 8131 }; 8132 8133 #define PPC_OPCODE_MASK 0x3 8134 8135 static inline int is_indirect_opcode(void *handler) 8136 { 8137 return ((uintptr_t)handler & PPC_OPCODE_MASK) == PPC_INDIRECT; 8138 } 8139 8140 static inline opc_handler_t **ind_table(void *handler) 8141 { 8142 return (opc_handler_t **)((uintptr_t)handler & ~PPC_OPCODE_MASK); 8143 } 8144 8145 /* Instruction table creation */ 8146 /* Opcodes tables creation */ 8147 static void fill_new_table(opc_handler_t **table, int len) 8148 { 8149 int i; 8150 8151 for (i = 0; i < len; i++) { 8152 table[i] = &invalid_handler; 8153 } 8154 } 8155 8156 static int create_new_table(opc_handler_t **table, unsigned char idx) 8157 { 8158 opc_handler_t **tmp; 8159 8160 tmp = g_new(opc_handler_t *, PPC_CPU_INDIRECT_OPCODES_LEN); 8161 fill_new_table(tmp, PPC_CPU_INDIRECT_OPCODES_LEN); 8162 table[idx] = (opc_handler_t *)((uintptr_t)tmp | PPC_INDIRECT); 8163 8164 return 0; 8165 } 8166 8167 static int insert_in_table(opc_handler_t **table, unsigned char idx, 8168 opc_handler_t *handler) 8169 { 8170 if (table[idx] != &invalid_handler) { 8171 return -1; 8172 } 8173 table[idx] = handler; 8174 8175 return 0; 8176 } 8177 8178 static int register_direct_insn(opc_handler_t **ppc_opcodes, 8179 unsigned char idx, opc_handler_t *handler) 8180 { 8181 if (insert_in_table(ppc_opcodes, idx, handler) < 0) { 8182 printf("*** ERROR: opcode %02x already assigned in main " 8183 "opcode table\n", idx); 8184 return -1; 8185 } 8186 8187 return 0; 8188 } 8189 8190 static int register_ind_in_table(opc_handler_t **table, 8191 unsigned char idx1, unsigned char idx2, 8192 opc_handler_t *handler) 8193 { 8194 if (table[idx1] == &invalid_handler) { 8195 if (create_new_table(table, idx1) < 0) { 8196 printf("*** ERROR: unable to create indirect table " 8197 "idx=%02x\n", idx1); 8198 return -1; 8199 } 8200 } else { 8201 if (!is_indirect_opcode(table[idx1])) { 8202 printf("*** ERROR: idx %02x already assigned to a direct " 8203 "opcode\n", idx1); 8204 return -1; 8205 } 8206 } 8207 if (handler != NULL && 8208 insert_in_table(ind_table(table[idx1]), idx2, handler) < 0) { 8209 printf("*** ERROR: opcode %02x already assigned in " 8210 "opcode table %02x\n", idx2, idx1); 8211 return -1; 8212 } 8213 8214 return 0; 8215 } 8216 8217 static int register_ind_insn(opc_handler_t **ppc_opcodes, 8218 unsigned char idx1, unsigned char idx2, 8219 opc_handler_t *handler) 8220 { 8221 return register_ind_in_table(ppc_opcodes, idx1, idx2, handler); 8222 } 8223 8224 static int register_dblind_insn(opc_handler_t **ppc_opcodes, 8225 unsigned char idx1, unsigned char idx2, 8226 unsigned char idx3, opc_handler_t *handler) 8227 { 8228 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8229 printf("*** ERROR: unable to join indirect table idx " 8230 "[%02x-%02x]\n", idx1, idx2); 8231 return -1; 8232 } 8233 if (register_ind_in_table(ind_table(ppc_opcodes[idx1]), idx2, idx3, 8234 handler) < 0) { 8235 printf("*** ERROR: unable to insert opcode " 8236 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8237 return -1; 8238 } 8239 8240 return 0; 8241 } 8242 8243 static int register_trplind_insn(opc_handler_t **ppc_opcodes, 8244 unsigned char idx1, unsigned char idx2, 8245 unsigned char idx3, unsigned char idx4, 8246 opc_handler_t *handler) 8247 { 8248 opc_handler_t **table; 8249 8250 if (register_ind_in_table(ppc_opcodes, idx1, idx2, NULL) < 0) { 8251 printf("*** ERROR: unable to join indirect table idx " 8252 "[%02x-%02x]\n", idx1, idx2); 8253 return -1; 8254 } 8255 table = ind_table(ppc_opcodes[idx1]); 8256 if (register_ind_in_table(table, idx2, idx3, NULL) < 0) { 8257 printf("*** ERROR: unable to join 2nd-level indirect table idx " 8258 "[%02x-%02x-%02x]\n", idx1, idx2, idx3); 8259 return -1; 8260 } 8261 table = ind_table(table[idx2]); 8262 if (register_ind_in_table(table, idx3, idx4, handler) < 0) { 8263 printf("*** ERROR: unable to insert opcode " 8264 "[%02x-%02x-%02x-%02x]\n", idx1, idx2, idx3, idx4); 8265 return -1; 8266 } 8267 return 0; 8268 } 8269 static int register_insn(opc_handler_t **ppc_opcodes, opcode_t *insn) 8270 { 8271 if (insn->opc2 != 0xFF) { 8272 if (insn->opc3 != 0xFF) { 8273 if (insn->opc4 != 0xFF) { 8274 if (register_trplind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8275 insn->opc3, insn->opc4, 8276 &insn->handler) < 0) { 8277 return -1; 8278 } 8279 } else { 8280 if (register_dblind_insn(ppc_opcodes, insn->opc1, insn->opc2, 8281 insn->opc3, &insn->handler) < 0) { 8282 return -1; 8283 } 8284 } 8285 } else { 8286 if (register_ind_insn(ppc_opcodes, insn->opc1, 8287 insn->opc2, &insn->handler) < 0) { 8288 return -1; 8289 } 8290 } 8291 } else { 8292 if (register_direct_insn(ppc_opcodes, insn->opc1, &insn->handler) < 0) { 8293 return -1; 8294 } 8295 } 8296 8297 return 0; 8298 } 8299 8300 static int test_opcode_table(opc_handler_t **table, int len) 8301 { 8302 int i, count, tmp; 8303 8304 for (i = 0, count = 0; i < len; i++) { 8305 /* Consistency fixup */ 8306 if (table[i] == NULL) { 8307 table[i] = &invalid_handler; 8308 } 8309 if (table[i] != &invalid_handler) { 8310 if (is_indirect_opcode(table[i])) { 8311 tmp = test_opcode_table(ind_table(table[i]), 8312 PPC_CPU_INDIRECT_OPCODES_LEN); 8313 if (tmp == 0) { 8314 free(table[i]); 8315 table[i] = &invalid_handler; 8316 } else { 8317 count++; 8318 } 8319 } else { 8320 count++; 8321 } 8322 } 8323 } 8324 8325 return count; 8326 } 8327 8328 static void fix_opcode_tables(opc_handler_t **ppc_opcodes) 8329 { 8330 if (test_opcode_table(ppc_opcodes, PPC_CPU_OPCODES_LEN) == 0) { 8331 printf("*** WARNING: no opcode defined !\n"); 8332 } 8333 } 8334 8335 /*****************************************************************************/ 8336 void create_ppc_opcodes(PowerPCCPU *cpu, Error **errp) 8337 { 8338 PowerPCCPUClass *pcc = POWERPC_CPU_GET_CLASS(cpu); 8339 opcode_t *opc; 8340 8341 fill_new_table(cpu->opcodes, PPC_CPU_OPCODES_LEN); 8342 for (opc = opcodes; opc < &opcodes[ARRAY_SIZE(opcodes)]; opc++) { 8343 if (((opc->handler.type & pcc->insns_flags) != 0) || 8344 ((opc->handler.type2 & pcc->insns_flags2) != 0)) { 8345 if (register_insn(cpu->opcodes, opc) < 0) { 8346 error_setg(errp, "ERROR initializing PowerPC instruction " 8347 "0x%02x 0x%02x 0x%02x", opc->opc1, opc->opc2, 8348 opc->opc3); 8349 return; 8350 } 8351 } 8352 } 8353 fix_opcode_tables(cpu->opcodes); 8354 fflush(stdout); 8355 fflush(stderr); 8356 } 8357 8358 void destroy_ppc_opcodes(PowerPCCPU *cpu) 8359 { 8360 opc_handler_t **table, **table_2; 8361 int i, j, k; 8362 8363 for (i = 0; i < PPC_CPU_OPCODES_LEN; i++) { 8364 if (cpu->opcodes[i] == &invalid_handler) { 8365 continue; 8366 } 8367 if (is_indirect_opcode(cpu->opcodes[i])) { 8368 table = ind_table(cpu->opcodes[i]); 8369 for (j = 0; j < PPC_CPU_INDIRECT_OPCODES_LEN; j++) { 8370 if (table[j] == &invalid_handler) { 8371 continue; 8372 } 8373 if (is_indirect_opcode(table[j])) { 8374 table_2 = ind_table(table[j]); 8375 for (k = 0; k < PPC_CPU_INDIRECT_OPCODES_LEN; k++) { 8376 if (table_2[k] != &invalid_handler && 8377 is_indirect_opcode(table_2[k])) { 8378 g_free((opc_handler_t *)((uintptr_t)table_2[k] & 8379 ~PPC_INDIRECT)); 8380 } 8381 } 8382 g_free((opc_handler_t *)((uintptr_t)table[j] & 8383 ~PPC_INDIRECT)); 8384 } 8385 } 8386 g_free((opc_handler_t *)((uintptr_t)cpu->opcodes[i] & 8387 ~PPC_INDIRECT)); 8388 } 8389 } 8390 } 8391 8392 int ppc_fixup_cpu(PowerPCCPU *cpu) 8393 { 8394 CPUPPCState *env = &cpu->env; 8395 8396 /* 8397 * TCG doesn't (yet) emulate some groups of instructions that are 8398 * implemented on some otherwise supported CPUs (e.g. VSX and 8399 * decimal floating point instructions on POWER7). We remove 8400 * unsupported instruction groups from the cpu state's instruction 8401 * masks and hope the guest can cope. For at least the pseries 8402 * machine, the unavailability of these instructions can be 8403 * advertised to the guest via the device tree. 8404 */ 8405 if ((env->insns_flags & ~PPC_TCG_INSNS) 8406 || (env->insns_flags2 & ~PPC_TCG_INSNS2)) { 8407 warn_report("Disabling some instructions which are not " 8408 "emulated by TCG (0x%" PRIx64 ", 0x%" PRIx64 ")", 8409 env->insns_flags & ~PPC_TCG_INSNS, 8410 env->insns_flags2 & ~PPC_TCG_INSNS2); 8411 } 8412 env->insns_flags &= PPC_TCG_INSNS; 8413 env->insns_flags2 &= PPC_TCG_INSNS2; 8414 return 0; 8415 } 8416 8417 static bool decode_legacy(PowerPCCPU *cpu, DisasContext *ctx, uint32_t insn) 8418 { 8419 opc_handler_t **table, *handler; 8420 uint32_t inval; 8421 8422 ctx->opcode = insn; 8423 8424 LOG_DISAS("translate opcode %08x (%02x %02x %02x %02x) (%s)\n", 8425 insn, opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8426 ctx->le_mode ? "little" : "big"); 8427 8428 table = cpu->opcodes; 8429 handler = table[opc1(insn)]; 8430 if (is_indirect_opcode(handler)) { 8431 table = ind_table(handler); 8432 handler = table[opc2(insn)]; 8433 if (is_indirect_opcode(handler)) { 8434 table = ind_table(handler); 8435 handler = table[opc3(insn)]; 8436 if (is_indirect_opcode(handler)) { 8437 table = ind_table(handler); 8438 handler = table[opc4(insn)]; 8439 } 8440 } 8441 } 8442 8443 /* Is opcode *REALLY* valid ? */ 8444 if (unlikely(handler->handler == &gen_invalid)) { 8445 qemu_log_mask(LOG_GUEST_ERROR, "invalid/unsupported opcode: " 8446 "%02x - %02x - %02x - %02x (%08x) " 8447 TARGET_FMT_lx "\n", 8448 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8449 insn, ctx->cia); 8450 return false; 8451 } 8452 8453 if (unlikely(handler->type & (PPC_SPE | PPC_SPE_SINGLE | PPC_SPE_DOUBLE) 8454 && Rc(insn))) { 8455 inval = handler->inval2; 8456 } else { 8457 inval = handler->inval1; 8458 } 8459 8460 if (unlikely((insn & inval) != 0)) { 8461 qemu_log_mask(LOG_GUEST_ERROR, "invalid bits: %08x for opcode: " 8462 "%02x - %02x - %02x - %02x (%08x) " 8463 TARGET_FMT_lx "\n", insn & inval, 8464 opc1(insn), opc2(insn), opc3(insn), opc4(insn), 8465 insn, ctx->cia); 8466 return false; 8467 } 8468 8469 handler->handler(ctx); 8470 return true; 8471 } 8472 8473 static void ppc_tr_init_disas_context(DisasContextBase *dcbase, CPUState *cs) 8474 { 8475 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8476 CPUPPCState *env = cs->env_ptr; 8477 uint32_t hflags = ctx->base.tb->flags; 8478 8479 ctx->spr_cb = env->spr_cb; 8480 ctx->pr = (hflags >> HFLAGS_PR) & 1; 8481 ctx->mem_idx = (hflags >> HFLAGS_DMMU_IDX) & 7; 8482 ctx->dr = (hflags >> HFLAGS_DR) & 1; 8483 ctx->hv = (hflags >> HFLAGS_HV) & 1; 8484 ctx->insns_flags = env->insns_flags; 8485 ctx->insns_flags2 = env->insns_flags2; 8486 ctx->access_type = -1; 8487 ctx->need_access_type = !mmu_is_64bit(env->mmu_model); 8488 ctx->le_mode = (hflags >> HFLAGS_LE) & 1; 8489 ctx->default_tcg_memop_mask = ctx->le_mode ? MO_LE : MO_BE; 8490 ctx->flags = env->flags; 8491 #if defined(TARGET_PPC64) 8492 ctx->sf_mode = (hflags >> HFLAGS_64) & 1; 8493 ctx->has_cfar = !!(env->flags & POWERPC_FLAG_CFAR); 8494 #endif 8495 ctx->lazy_tlb_flush = env->mmu_model == POWERPC_MMU_32B 8496 || env->mmu_model == POWERPC_MMU_601 8497 || env->mmu_model & POWERPC_MMU_64; 8498 8499 ctx->fpu_enabled = (hflags >> HFLAGS_FP) & 1; 8500 ctx->spe_enabled = (hflags >> HFLAGS_SPE) & 1; 8501 ctx->altivec_enabled = (hflags >> HFLAGS_VR) & 1; 8502 ctx->vsx_enabled = (hflags >> HFLAGS_VSX) & 1; 8503 ctx->tm_enabled = (hflags >> HFLAGS_TM) & 1; 8504 ctx->gtse = (hflags >> HFLAGS_GTSE) & 1; 8505 ctx->hr = (hflags >> HFLAGS_HR) & 1; 8506 ctx->mmcr0_pmcc0 = (hflags >> HFLAGS_PMCC0) & 1; 8507 ctx->mmcr0_pmcc1 = (hflags >> HFLAGS_PMCC1) & 1; 8508 ctx->pmu_insn_cnt = (hflags >> HFLAGS_INSN_CNT) & 1; 8509 8510 ctx->singlestep_enabled = 0; 8511 if ((hflags >> HFLAGS_SE) & 1) { 8512 ctx->singlestep_enabled |= CPU_SINGLE_STEP; 8513 ctx->base.max_insns = 1; 8514 } 8515 if ((hflags >> HFLAGS_BE) & 1) { 8516 ctx->singlestep_enabled |= CPU_BRANCH_STEP; 8517 } 8518 } 8519 8520 static void ppc_tr_tb_start(DisasContextBase *db, CPUState *cs) 8521 { 8522 } 8523 8524 static void ppc_tr_insn_start(DisasContextBase *dcbase, CPUState *cs) 8525 { 8526 tcg_gen_insn_start(dcbase->pc_next); 8527 } 8528 8529 static bool is_prefix_insn(DisasContext *ctx, uint32_t insn) 8530 { 8531 REQUIRE_INSNS_FLAGS2(ctx, ISA310); 8532 return opc1(insn) == 1; 8533 } 8534 8535 static void ppc_tr_translate_insn(DisasContextBase *dcbase, CPUState *cs) 8536 { 8537 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8538 PowerPCCPU *cpu = POWERPC_CPU(cs); 8539 CPUPPCState *env = cs->env_ptr; 8540 target_ulong pc; 8541 uint32_t insn; 8542 bool ok; 8543 8544 LOG_DISAS("----------------\n"); 8545 LOG_DISAS("nip=" TARGET_FMT_lx " super=%d ir=%d\n", 8546 ctx->base.pc_next, ctx->mem_idx, (int)msr_ir); 8547 8548 ctx->cia = pc = ctx->base.pc_next; 8549 insn = translator_ldl_swap(env, dcbase, pc, need_byteswap(ctx)); 8550 ctx->base.pc_next = pc += 4; 8551 8552 if (!is_prefix_insn(ctx, insn)) { 8553 ok = (decode_insn32(ctx, insn) || 8554 decode_legacy(cpu, ctx, insn)); 8555 } else if ((pc & 63) == 0) { 8556 /* 8557 * Power v3.1, section 1.9 Exceptions: 8558 * attempt to execute a prefixed instruction that crosses a 8559 * 64-byte address boundary (system alignment error). 8560 */ 8561 gen_exception_err(ctx, POWERPC_EXCP_ALIGN, POWERPC_EXCP_ALIGN_INSN); 8562 ok = true; 8563 } else { 8564 uint32_t insn2 = translator_ldl_swap(env, dcbase, pc, 8565 need_byteswap(ctx)); 8566 ctx->base.pc_next = pc += 4; 8567 ok = decode_insn64(ctx, deposit64(insn2, 32, 32, insn)); 8568 } 8569 if (!ok) { 8570 gen_invalid(ctx); 8571 } 8572 8573 /* End the TB when crossing a page boundary. */ 8574 if (ctx->base.is_jmp == DISAS_NEXT && !(pc & ~TARGET_PAGE_MASK)) { 8575 ctx->base.is_jmp = DISAS_TOO_MANY; 8576 } 8577 8578 translator_loop_temp_check(&ctx->base); 8579 } 8580 8581 static void ppc_tr_tb_stop(DisasContextBase *dcbase, CPUState *cs) 8582 { 8583 DisasContext *ctx = container_of(dcbase, DisasContext, base); 8584 DisasJumpType is_jmp = ctx->base.is_jmp; 8585 target_ulong nip = ctx->base.pc_next; 8586 8587 if (is_jmp == DISAS_NORETURN) { 8588 /* We have already exited the TB. */ 8589 return; 8590 } 8591 8592 /* Honor single stepping. */ 8593 if (unlikely(ctx->singlestep_enabled & CPU_SINGLE_STEP) 8594 && (nip <= 0x100 || nip > 0xf00)) { 8595 switch (is_jmp) { 8596 case DISAS_TOO_MANY: 8597 case DISAS_EXIT_UPDATE: 8598 case DISAS_CHAIN_UPDATE: 8599 gen_update_nip(ctx, nip); 8600 break; 8601 case DISAS_EXIT: 8602 case DISAS_CHAIN: 8603 break; 8604 default: 8605 g_assert_not_reached(); 8606 } 8607 8608 gen_debug_exception(ctx); 8609 return; 8610 } 8611 8612 switch (is_jmp) { 8613 case DISAS_TOO_MANY: 8614 if (use_goto_tb(ctx, nip)) { 8615 pmu_count_insns(ctx); 8616 tcg_gen_goto_tb(0); 8617 gen_update_nip(ctx, nip); 8618 tcg_gen_exit_tb(ctx->base.tb, 0); 8619 break; 8620 } 8621 /* fall through */ 8622 case DISAS_CHAIN_UPDATE: 8623 gen_update_nip(ctx, nip); 8624 /* fall through */ 8625 case DISAS_CHAIN: 8626 /* 8627 * tcg_gen_lookup_and_goto_ptr will exit the TB if 8628 * CF_NO_GOTO_PTR is set. Count insns now. 8629 */ 8630 if (ctx->base.tb->flags & CF_NO_GOTO_PTR) { 8631 pmu_count_insns(ctx); 8632 } 8633 8634 tcg_gen_lookup_and_goto_ptr(); 8635 break; 8636 8637 case DISAS_EXIT_UPDATE: 8638 gen_update_nip(ctx, nip); 8639 /* fall through */ 8640 case DISAS_EXIT: 8641 pmu_count_insns(ctx); 8642 tcg_gen_exit_tb(NULL, 0); 8643 break; 8644 8645 default: 8646 g_assert_not_reached(); 8647 } 8648 } 8649 8650 static void ppc_tr_disas_log(const DisasContextBase *dcbase, CPUState *cs) 8651 { 8652 qemu_log("IN: %s\n", lookup_symbol(dcbase->pc_first)); 8653 log_target_disas(cs, dcbase->pc_first, dcbase->tb->size); 8654 } 8655 8656 static const TranslatorOps ppc_tr_ops = { 8657 .init_disas_context = ppc_tr_init_disas_context, 8658 .tb_start = ppc_tr_tb_start, 8659 .insn_start = ppc_tr_insn_start, 8660 .translate_insn = ppc_tr_translate_insn, 8661 .tb_stop = ppc_tr_tb_stop, 8662 .disas_log = ppc_tr_disas_log, 8663 }; 8664 8665 void gen_intermediate_code(CPUState *cs, TranslationBlock *tb, int max_insns) 8666 { 8667 DisasContext ctx; 8668 8669 translator_loop(&ppc_tr_ops, &ctx.base, cs, tb, max_insns); 8670 } 8671 8672 void restore_state_to_opc(CPUPPCState *env, TranslationBlock *tb, 8673 target_ulong *data) 8674 { 8675 env->nip = data[0]; 8676 } 8677